My app i've created using android studio keeps crashing and I don't know why. Its a weather app and i'm following google's developing android app course, here's the logcat:
07-20 18:19:45.740 16410-16434/com.alexander.sunshine E/ActivityThread﹕ Failed to find provider info for com.alexander.android.sunshine.app
07-20 18:19:45.748 16410-16434/com.alexander.sunshine E/AndroidRuntime﹕ FATAL EXCEPTION: AsyncTask #1
Process: com.alexander.sunshine, PID: 16410
java.lang.RuntimeException: An error occured while executing doInBackground()
at android.os.AsyncTask$3.done(AsyncTask.java:304)
at java.util.concurrent.FutureTask.finishCompletion(FutureTask.java:355)
at java.util.concurrent.FutureTask.setException(FutureTask.java:222)
at java.util.concurrent.FutureTask.run(FutureTask.java:242)
at android.os.AsyncTask$SerialExecutor$1.run(AsyncTask.java:231)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1112)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:587)
at java.lang.Thread.run(Thread.java:818)
Caused by: java.lang.NullPointerException: Attempt to invoke interface method 'boolean android.database.Cursor.moveToFirst()' on a null object reference
at com.alexander.sunshine.app.FetchWeatherTask.addLocation(FetchWeatherTask.java:109)
at com.alexander.sunshine.app.FetchWeatherTask.getWeatherDataFromJson(FetchWeatherTask.java:214)
at com.alexander.sunshine.app.FetchWeatherTask.doInBackground(FetchWeatherTask.java:414)
at com.alexander.sunshine.app.FetchWeatherTask.doInBackground(FetchWeatherTask.java:34)
at android.os.AsyncTask$2.call(AsyncTask.java:292)
at java.util.concurrent.FutureTask.run(FutureTask.java:237)
at android.os.AsyncTask$SerialExecutor$1.run(AsyncTask.java:231)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1112)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:587)
at java.lang.Thread.run(Thread.java:818)
And here's the FetchWeatherTask.java:
package com.alexander.sunshine.app;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.Context;
import android.content.SharedPreferences;
import android.database.Cursor;
import android.database.DatabaseUtils;
import android.net.Uri;
import android.os.AsyncTask;
import android.preference.PreferenceManager;
import android.text.format.Time;
import android.util.Log;
import android.widget.ArrayAdapter;
import com.alexander.sunshine.R;
import com.alexander.sunshine.app.data.WeatherContract;
import com.alexander.sunshine.app.data.WeatherContract.WeatherEntry;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Vector;
public class FetchWeatherTask extends AsyncTask<String, Void, String[]> {
private final String LOG_TAG = FetchWeatherTask.class.getSimpleName();
private ArrayAdapter<String> mForecastAdapter;
private final Context mContext;
public FetchWeatherTask(Context context, ArrayAdapter<String> forecastAdapter) {
mContext = context;
mForecastAdapter = forecastAdapter;
}
private boolean DEBUG = true;
/* The date/time conversion code is going to be moved outside the asynctask later,
* so for convenience we're breaking it out into its own method now.
*/
private String getReadableDateString(long time){
// Because the API returns a unix timestamp (measured in seconds),
// it must be converted to milliseconds in order to be converted to valid date.
Date date = new Date(time);
SimpleDateFormat format = new SimpleDateFormat("E, MMM d");
return format.format(date).toString();
}
/**
* Prepare the weather high/lows for presentation.
*/
private String formatHighLows(double high, double low) {
// Data is fetched in Celsius by default.
// If user prefers to see in Fahrenheit, convert the values here.
// We do this rather than fetching in Fahrenheit so that the user can
// change this option without us having to re-fetch the data once
// we start storing the values in a database.
SharedPreferences sharedPrefs =
PreferenceManager.getDefaultSharedPreferences(mContext);
String unitType = sharedPrefs.getString(
mContext.getString(R.string.pref_units_key),
mContext.getString(R.string.pref_units_metric));
if (unitType.equals(mContext.getString(R.string.pref_units_imperial))) {
high = (high * 1.8) + 32;
low = (low * 1.8) + 32;
} else if (!unitType.equals(mContext.getString(R.string.pref_units_metric))) {
Log.d(LOG_TAG, "Unit type not found: " + unitType);
}
// For presentation, assume the user doesn't care about tenths of a degree.
long roundedHigh = Math.round(high);
long roundedLow = Math.round(low);
String highLowStr = roundedHigh + "/" + roundedLow;
return highLowStr;
}
/**
* Helper method to handle insertion of a new location in the weather database.
*
* #param locationSetting The location string used to request updates from the server.
* #param cityName A human-readable city name, e.g "Mountain View"
* #param lat the latitude of the city
* #param lon the longitude of the city
* #return the row ID of the added location.
*/
long addLocation(String locationSetting, String cityName, double lat, double lon) {
long locationId;
// First, check if the location with this city name exists in the db
Cursor locationCursor = mContext.getContentResolver().query(
WeatherContract.LocationEntry.CONTENT_URI,
new String[]{WeatherContract.LocationEntry._ID},
WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ?",
new String[]{locationSetting},
null);
if (locationCursor.moveToFirst()) {
int locationIdIndex = locationCursor.getColumnIndex(WeatherContract.LocationEntry._ID);
locationId = locationCursor.getLong(locationIdIndex);
} else {
// Now that the content provider is set up, inserting rows of data is pretty simple.
// First create a ContentValues object to hold the data you want to insert.
ContentValues locationValues = new ContentValues();
// Then add the data, along with the corresponding name of the data type,
// so the content provider knows what kind of value is being inserted.
locationValues.put(WeatherContract.LocationEntry.COLUMN_CITY_NAME, cityName);
locationValues.put(WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING, locationSetting);
locationValues.put(WeatherContract.LocationEntry.COLUMN_COORD_LAT, lat);
locationValues.put(WeatherContract.LocationEntry.COLUMN_COORD_LONG, lon);
// Finally, insert location data into the database.
Uri insertedUri = mContext.getContentResolver().insert(
WeatherContract.LocationEntry.CONTENT_URI,
locationValues
);
// The resulting URI contains the ID for the row. Extract the locationId from the Uri.
locationId = ContentUris.parseId(insertedUri);
}
locationCursor.close();
// Wait, that worked? Yes!
return locationId;
}
/*
Students: This code will allow the FetchWeatherTask to continue to return the strings that
the UX expects so that we can continue to test the application even once we begin using
the database.
*/
String[] convertContentValuesToUXFormat(Vector<ContentValues> cvv) {
// return strings to keep UI functional for now
String[] resultStrs = new String[cvv.size()];
for ( int i = 0; i < cvv.size(); i++ ) {
ContentValues weatherValues = cvv.elementAt(i);
String highAndLow = formatHighLows(
weatherValues.getAsDouble(WeatherEntry.COLUMN_MAX_TEMP),
weatherValues.getAsDouble(WeatherEntry.COLUMN_MIN_TEMP));
resultStrs[i] = getReadableDateString(
weatherValues.getAsLong(WeatherEntry.COLUMN_DATE)) +
" - " + weatherValues.getAsString(WeatherEntry.COLUMN_SHORT_DESC) +
" - " + highAndLow;
}
return resultStrs;
}
/**
* Take the String representing the complete forecast in JSON Format and
* pull out the data we need to construct the Strings needed for the wireframes.
*
* Fortunately parsing is easy: constructor takes the JSON string and converts it
* into an Object hierarchy for us.
*/
private String[] getWeatherDataFromJson(String forecastJsonStr,
String locationSetting)
throws JSONException {
// Now we have a String representing the complete forecast in JSON Format.
// Fortunately parsing is easy: constructor takes the JSON string and converts it
// into an Object hierarchy for us.
// These are the names of the JSON objects that need to be extracted.
// Location information
final String OWM_CITY = "city";
final String OWM_CITY_NAME = "name";
final String OWM_COORD = "coord";
// Location coordinate
final String OWM_LATITUDE = "lat";
final String OWM_LONGITUDE = "lon";
// Weather information. Each day's forecast info is an element of the "list" array.
final String OWM_LIST = "list";
final String OWM_PRESSURE = "pressure";
final String OWM_HUMIDITY = "humidity";
final String OWM_WINDSPEED = "speed";
final String OWM_WIND_DIRECTION = "deg";
// All temperatures are children of the "temp" object.
final String OWM_TEMPERATURE = "temp";
final String OWM_MAX = "max";
final String OWM_MIN = "min";
final String OWM_WEATHER = "weather";
final String OWM_DESCRIPTION = "main";
final String OWM_WEATHER_ID = "id";
try {
JSONObject forecastJson = new JSONObject(forecastJsonStr);
JSONArray weatherArray = forecastJson.getJSONArray(OWM_LIST);
JSONObject cityJson = forecastJson.getJSONObject(OWM_CITY);
String cityName = cityJson.getString(OWM_CITY_NAME);
JSONObject cityCoord = cityJson.getJSONObject(OWM_COORD);
double cityLatitude = cityCoord.getDouble(OWM_LATITUDE);
double cityLongitude = cityCoord.getDouble(OWM_LONGITUDE);
long locationId = addLocation(locationSetting, cityName, cityLatitude, cityLongitude);
// Insert the new weather information into the database
Vector<ContentValues> cVVector = new Vector<ContentValues>(weatherArray.length());
// OWM returns daily forecasts based upon the local time of the city that is being
// asked for, which means that we need to know the GMT offset to translate this data
// properly.
// Since this data is also sent in-order and the first day is always the
// current day, we're going to take advantage of that to get a nice
// normalized UTC date for all of our weather.
Time dayTime = new Time();
dayTime.setToNow();
// we start at the day returned by local time. Otherwise this is a mess.
int julianStartDay = Time.getJulianDay(System.currentTimeMillis(), dayTime.gmtoff);
// now we work exclusively in UTC
dayTime = new Time();
for(int i = 0; i < weatherArray.length(); i++) {
// These are the values that will be collected.
long dateTime;
double pressure;
int humidity;
double windSpeed;
double windDirection;
double high;
double low;
String description;
int weatherId;
// Get the JSON object representing the day
JSONObject dayForecast = weatherArray.getJSONObject(i);
// Cheating to convert this to UTC time, which is what we want anyhow
dateTime = dayTime.setJulianDay(julianStartDay+i);
pressure = dayForecast.getDouble(OWM_PRESSURE);
humidity = dayForecast.getInt(OWM_HUMIDITY);
windSpeed = dayForecast.getDouble(OWM_WINDSPEED);
windDirection = dayForecast.getDouble(OWM_WIND_DIRECTION);
// Description is in a child array called "weather", which is 1 element long.
// That element also contains a weather code.
JSONObject weatherObject =
dayForecast.getJSONArray(OWM_WEATHER).getJSONObject(0);
description = weatherObject.getString(OWM_DESCRIPTION);
weatherId = weatherObject.getInt(OWM_WEATHER_ID);
// Temperatures are in a child object called "temp". Try not to name variables
// "temp" when working with temperature. It confuses everybody.
JSONObject temperatureObject = dayForecast.getJSONObject(OWM_TEMPERATURE);
high = temperatureObject.getDouble(OWM_MAX);
low = temperatureObject.getDouble(OWM_MIN);
ContentValues weatherValues = new ContentValues();
weatherValues.put(WeatherEntry.COLUMN_LOC_KEY, locationId);
weatherValues.put(WeatherEntry.COLUMN_DATE, dateTime);
weatherValues.put(WeatherEntry.COLUMN_HUMIDITY, humidity);
weatherValues.put(WeatherEntry.COLUMN_PRESSURE, pressure);
weatherValues.put(WeatherEntry.COLUMN_WIND_SPEED, windSpeed);
weatherValues.put(WeatherEntry.COLUMN_DEGREES, windDirection);
weatherValues.put(WeatherEntry.COLUMN_MAX_TEMP, high);
weatherValues.put(WeatherEntry.COLUMN_MIN_TEMP, low);
weatherValues.put(WeatherEntry.COLUMN_SHORT_DESC, description);
weatherValues.put(WeatherEntry.COLUMN_WEATHER_ID, weatherId);
cVVector.add(weatherValues);
}
// add to database
if ( cVVector.size() > 0 ) {
ContentValues[] cvArray = new ContentValues[cVVector.size()];
cVVector.toArray(cvArray);
mContext.getContentResolver().bulkInsert(WeatherEntry.CONTENT_URI, cvArray);
}
// Sort order: Ascending, by date.
String sortOrder = WeatherEntry.COLUMN_DATE + " ASC";
Uri weatherForLocationUri = WeatherEntry.buildWeatherLocationWithStartDate(
locationSetting, System.currentTimeMillis());
// Students: Uncomment the next lines to display what what you stored in the bulkInsert
Cursor cur = mContext.getContentResolver().query(weatherForLocationUri,
null, null, null, sortOrder);
cVVector = new Vector<ContentValues>(cur.getCount());
if ( cur.moveToFirst() ) {
do {
ContentValues cv = new ContentValues();
DatabaseUtils.cursorRowToContentValues(cur, cv);
cVVector.add(cv);
} while (cur.moveToNext());
}
Log.d(LOG_TAG, "FetchWeatherTask Complete. " + cVVector.size() + " Inserted");
String[] resultStrs = convertContentValuesToUXFormat(cVVector);
return resultStrs;
} catch (JSONException e) {
Log.e(LOG_TAG, e.getMessage(), e);
e.printStackTrace();
}
return null;
}
#Override
protected String[] doInBackground(String... params) {
// If there's no zip code, there's nothing to look up. Verify size of params.
if (params.length == 0) {
return null;
}
String locationQuery = params[0];
// These two need to be declared outside the try/catch
// so that they can be closed in the finally block.
HttpURLConnection urlConnection = null;
BufferedReader reader = null;
// Will contain the raw JSON response as a string.
String forecastJsonStr = null;
String format = "json";
String units = "metric";
int numDays = 14;
try {
// Construct the URL for the OpenWeatherMap query
// Possible parameters are avaiable at OWM's forecast API page, at
// http://openweathermap.org/API#forecast
final String FORECAST_BASE_URL =
"http://api.openweathermap.org/data/2.5/forecast/daily?";
final String QUERY_PARAM = "q";
final String FORMAT_PARAM = "mode";
final String UNITS_PARAM = "units";
final String DAYS_PARAM = "cnt";
Uri builtUri = Uri.parse(FORECAST_BASE_URL).buildUpon()
.appendQueryParameter(QUERY_PARAM, params[0])
.appendQueryParameter(FORMAT_PARAM, format)
.appendQueryParameter(UNITS_PARAM, units)
.appendQueryParameter(DAYS_PARAM, Integer.toString(numDays))
.build();
URL url = new URL(builtUri.toString());
// Create the request to OpenWeatherMap, and open the connection
urlConnection = (HttpURLConnection) url.openConnection();
urlConnection.setRequestMethod("GET");
urlConnection.connect();
// Read the input stream into a String
InputStream inputStream = urlConnection.getInputStream();
StringBuffer buffer = new StringBuffer();
if (inputStream == null) {
// Nothing to do.
return null;
}
reader = new BufferedReader(new InputStreamReader(inputStream));
String line;
while ((line = reader.readLine()) != null) {
// Since it's JSON, adding a newline isn't necessary (it won't affect parsing)
// But it does make debugging a *lot* easier if you print out the completed
// buffer for debugging.
buffer.append(line + "\n");
}
if (buffer.length() == 0) {
// Stream was empty. No point in parsing.
return null;
}
forecastJsonStr = buffer.toString();
} catch (IOException e) {
Log.e(LOG_TAG, "Error ", e);
// If the code didn't successfully get the weather data, there's no point in attemping
// to parse it.
return null;
} finally {
if (urlConnection != null) {
urlConnection.disconnect();
}
if (reader != null) {
try {
reader.close();
} catch (final IOException e) {
Log.e(LOG_TAG, "Error closing stream", e);
}
}
}
try {
return getWeatherDataFromJson(forecastJsonStr, locationQuery);
} catch (JSONException e) {
Log.e(LOG_TAG, e.getMessage(), e);
e.printStackTrace();
}
// This will only happen if there was an error getting or parsing the forecast.
return null;
}
#Override
protected void onPostExecute(String[] result) {
if (result != null && mForecastAdapter != null) {
mForecastAdapter.clear();
for(String dayForecastStr : result) {
mForecastAdapter.add(dayForecastStr);
}
// New data is back from the server. Hooray!
}
}
}
As the stack trace says, you're trying to call moveToFirst() on a null reference. locationCursor is the only object you're calling that method on, so it must be null.
The docs for query() say that it can return null, so you'll should null check that object, and you should also check your invocation of query() to try to understand why it's returning null.
Clearly, in your addLocation method, the locationCursor you get by calling Cursor locationCursor = mContext.getContentResolver().query(...) is null, which causes the crash when you call locationCursor.moveToFirst() just after that.
I suggest to check out this problem's accepted answer. The problem might be that the query is empty.
In any case, you should always check whether the Cursor is null before you apply methods to it, and deal with that case somehow by alerting the user.
The method could look like this:
long addLocation(String locationSetting, String cityName, double lat, double lon) {
long locationId;
// First, check if the location with this city name exists in the db
Cursor locationCursor = mContext.getContentResolver().query(
WeatherContract.LocationEntry.CONTENT_URI,
new String[]{WeatherContract.LocationEntry._ID},
WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ?",
new String[]{locationSetting},
null);
if (locationCursor != null) {
if (locationCursor.moveToFirst()) {
int locationIdIndex = locationCursor.getColumnIndex(WeatherContract.LocationEntry._ID);
locationId = locationCursor.getLong(locationIdIndex);
locationCursor.close();
return locationId;
}
}
// Now that the content provider is set up, inserting rows of data is pretty simple.
// First create a ContentValues object to hold the data you want to insert.
ContentValues locationValues = new ContentValues();
// Then add the data, along with the corresponding name of the data type,
// so the content provider knows what kind of value is being inserted.
locationValues.put(WeatherContract.LocationEntry.COLUMN_CITY_NAME, cityName);
locationValues.put(WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING, locationSetting);
locationValues.put(WeatherContract.LocationEntry.COLUMN_COORD_LAT, lat);
locationValues.put(WeatherContract.LocationEntry.COLUMN_COORD_LONG, lon);
// Finally, insert location data into the database.
Uri insertedUri = mContext.getContentResolver().insert(
WeatherContract.LocationEntry.CONTENT_URI,
locationValues
);
// The resulting URI contains the ID for the row. Extract the locationId from the Uri.
locationId = ContentUris.parseId(insertedUri);
locationCursor.close();
// Wait, that worked? Yes!
return locationId;
}
Related
Hi I am writing a web server to be hosted locally that will have latitude and longitude posted in the URL/URI from an android device and this will be used as search criteria in an SQL Select query to retrieve the 5 clostes train stations.
I have made the code work with the hard coded Longitude and Latitude but now need to add in the functionality of it being dynamically added form teh adnroid device using the Post/Get functions, unfortunately i have never used get/post so dont know where to start.
Below is my code from all Classes in the web server, as said it all works hardcoded but now needs to accept input from an android device and return the same expected results. Thanks
public class WebServer {
static String jArray = "";
public static void main(String[] args) {
try{
HttpServer server = HttpServer.create(new InetSocketAddress(8080),0);
server.createContext("/",new HttpHandler(){
public void handle(HttpExchange he) throws IOException{
try {
jArray = sqlConnector.train(jArray);
} catch (Throwable e) {
e.printStackTrace();
}
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(he.getResponseBody()));
System.out.println("Processing Request");
he.sendResponseHeaders(200, 0);
String output = "<html><head></head><body><p>" + jArray + "</p></body></html>";
bw.write(output);
bw.close();
}
});
server.start();
System.out.println("Started up . . .");
}
catch (IOException ioe){
System.err.println("problems Starting Webserver: " + ioe);
}
}
}
public class sqlConnector {
public static String train(String jArray) throws Exception{
PreparedStatement s = null;
try
{
Connection c = DriverManager.getConnection("jdbc:sqlite:C:/Users/Colin/trainstations.db");
s = c.prepareStatement("SELECT Latitude, Longitude, StationName,( 3959 * acos(cos(radians(53.4355)) * cos(radians(Latitude)) * cos(radians(Longitude) - radians(-3.0508)) + sin(radians(53.4355)) * sin(radians(Latitude )))) AS distance FROM stations ORDER BY distance ASC LIMIT 0,5;");
ResultSet rs = s.executeQuery();
jArray = jsonConverter.convertResultSetIntoJSON(rs, jArray);
}
catch (SQLException se)
{
se.printStackTrace();
}
return jArray;
}
}
public class jsonConverter {
public static String convertResultSetIntoJSON(ResultSet rs, String jArray) throws Exception {
JSONArray jsonArray = new JSONArray();
while (rs.next()) {
int total_rows = rs.getMetaData().getColumnCount();
JSONObject obj = new JSONObject();
for (int i = 0; i < total_rows; i++) {
String columnName = rs.getMetaData().getColumnLabel(i + 1).toString();
Object columnValue = rs.getObject(i + 1);
obj.put(columnName, columnValue);
}
jsonArray.put(obj);
}
jArray = jsonArray.toString();
return jArray;
}
}
I am currently connected to another webserver that hosts the same data and is fully functinal and after the port number its format is as follows
/stations?lat=" + lat + "&lng=" + lng);
where lat and lng are my variable taken using GPS
The process would be like this:
1) Parse the Parameters from query String he.getRequestURI().getQuery()
/**
* returns the url parameters in a map
* #param query
* #return map
*/
public static Map<String, String> queryToMap(String query){
Map<String, String> result = new HashMap<String, String>();
for (String param : query.split("&")) {
String pair[] = param.split("=");
if (pair.length>1) {
result.put(pair[0], pair[1]);
}else{
result.put(pair[0], "");
}
}
return result;
}
2) Pass these parameters into select method
public static String train(String jArray, double lat, double long) throws Exception{
3) Use it in your statement
s = c.prepareStatement("SELECT Latitude, Longitude, StationName,
( 3959 * acos(cos(radians(?)) * cos(radians(Latitude))
* cos(radians(Longitude) - radians(?)) + sin(radians(?))
* sin(radians(Latitude ))))
AS distance FROM stations ORDER BY distance ASC LIMIT 0,5;");
s.setDouble(1, lat);
s.setDouble(2, long);
s.setDouble(3, lat);
And finally
4) Fix your code.
use a Database connection pool instead of connecting to the database for every call
use try-with-resource to automatically close ResultSet, PreparedStatement, Connection (even in case of Errors)
Hope that helps as a rough guideline :-)
This post is a map reduce implementation suggested for my previous question: "How to optimize scan of 1 huge file / table in Hive to confirm/check if lat long point is contained in a wkt geometry shape"
I am not well-versed in writing java programs for map-reduce and I mainly use Hive or Pig or spark to develop in Hadoop eco-system. To give a background of task at hand: I am trying to associate every latitude/longitude ping to corresponding ZIP postal code. I have a WKT multi-polygon shape file (500 MB) with all the zip information. I have loaded this in Hive and can do a join using ST_Contains(polygon, point). However, it takes very long to complete. To over come this bottle neck I am trying to leverage the example in ESRI ("https://github.com/Esri/gis-tools-for-hadoop/tree/master/samples/point-in-polygon-aggregation-mr") by building a quad tree index for searching a point derived from lat-long in polygon.
I have managed to write the code and it clogs up the Java heap memory of the cluster. Any suggestions on improving the code or looking at a different approach will be greatly appreciated:
Error message:
Error: Java heap space
Container killed by the ApplicationMaster.
Container killed on request. Exit code is 143
Container exited with a non-zero exit code 143
My code:
public class MapperClass extends Mapper<LongWritable, Text, Text, IntWritable> {
// column indices for values in the text file
int longitudeIndex;
int latitudeIndex;
int wktZip;
int wktGeom;
int wktLineCount;
int wktStateID;
// in boundaries.wkt, the label for the polygon is "wkt"
//creating ArrayList to hold details of the file
ArrayList<ZipPolyClass> nodes = new ArrayList<ZipPolyClass>();
String labelAttribute;
EsriFeatureClass featureClass;
SpatialReference spatialReference;
QuadTree quadTree;
QuadTreeIterator quadTreeIter;
BufferedReader csvWkt;
// class to store all the values from wkt file and calculate geometryFromWKT
public class ZipPolyClass {
public String zipCode;
public String wktPoly;
public String stateID;
public int indexJkey;
public Geometry wktGeomObj;
public ZipPolyClass(int ijk, String z, String w, String s ){
zipCode = z;
wktPoly = w;
stateID = s;
indexJkey = ijk;
wktGeomObj = GeometryEngine.geometryFromWkt(wktPoly, 0, Geometry.Type.Unknown);
}
}
//building quadTree Index from WKT multiPolygon and creating an iterator
private void buildQuadTree(){
quadTree = new QuadTree(new Envelope2D(-180, -90, 180, 90), 8);
Envelope envelope = new Envelope();
int j=0;
while(j<nodes.size()){
nodes.get(j).wktGeomObj.queryEnvelope(envelope);
quadTree.insert(j, new Envelope2D(envelope.getXMin(), envelope.getYMin(), envelope.getXMax(), envelope.getYMax()));
}
quadTreeIter = quadTree.getIterator();
}
/**
* Query the quadtree for the feature containing the given point
*
* #param pt point as longitude, latitude
* #return index to feature in featureClass or -1 if not found
*/
private int queryQuadTree(Point pt)
{
// reset iterator to the quadrant envelope that contains the point passed
quadTreeIter.resetIterator(pt, 0);
int elmHandle = quadTreeIter.next();
while (elmHandle >= 0){
int featureIndex = quadTree.getElement(elmHandle);
// we know the point and this feature are in the same quadrant, but we need to make sure the feature
// actually contains the point
if (GeometryEngine.contains(nodes.get(featureIndex).wktGeomObj, pt, spatialReference)){
return featureIndex;
}
elmHandle = quadTreeIter.next();
}
// feature not found
return -1;
}
/**
* Sets up mapper with filter geometry provided as argument[0] to the jar
*/
#Override
public void setup(Context context)
{
Configuration config = context.getConfiguration();
spatialReference = SpatialReference.create(4326);
// first pull values from the configuration
String featuresPath = config.get("sample.features.input");
//get column reference from driver class
wktZip = config.getInt("sample.features.col.zip", 0);
wktGeom = config.getInt("sample.features.col.geometry", 18);
wktStateID = config.getInt("sample.features.col.stateID", 3);
latitudeIndex = config.getInt("samples.csvdata.columns.lat", 5);
longitudeIndex = config.getInt("samples.csvdata.columns.long", 6);
FSDataInputStream iStream = null;
try {
// load the text WKT file provided as argument 0
FileSystem hdfs = FileSystem.get(config);
iStream = hdfs.open(new Path(featuresPath));
BufferedReader br = new BufferedReader(new InputStreamReader(iStream));
String wktLine ;
int i=0;
while((wktLine = br.readLine()) != null){
String [] val = wktLine.split("\\|");
String qtZip = val[wktZip];
String poly = val[wktGeom];
String stID = val[wktStateID];
ZipPolyClass zpc = new ZipPolyClass(i, qtZip, poly, stID);
nodes.add(i,zpc);
i++; // increment in the loop before end
}
}
catch (Exception e)
{
e.printStackTrace();
}
finally
{
if (iStream != null)
{
try {
iStream.close();
} catch (IOException e) { }
}
}
// build a quadtree of our features for fast queries
if (!nodes.isEmpty()) {
buildQuadTree();
}
}
#Override
public void map(LongWritable key, Text val, Context context)
throws IOException, InterruptedException {
/*
* The TextInputFormat we set in the configuration, by default, splits a text file line by line.
* The key is the byte offset to the first character in the line. The value is the text of the line.
*/
String line = val.toString();
String [] values = line.split(",");
// get lat long from file and convert to float
float latitude = Float.parseFloat(values[latitudeIndex]);
float longitude = Float.parseFloat(values[longitudeIndex]);
// Create our Point directly from longitude and latitude
Point point = new Point(longitude, latitude);
int featureIndex = queryQuadTree(point);
// Each map only processes one record at a time, so we start out with our count
// as 1. Since we have a distinct record file we will not run reducer
IntWritable one = new IntWritable(1);
if (featureIndex >= 0){
String zipTxt =nodes.get(featureIndex).zipCode;
String stateIDTxt = nodes.get(featureIndex).stateID;
String latTxt = values[latitudeIndex];
String longTxt = values[longitudeIndex];
String pointTxt = point.toString();
String name;
name = zipTxt+"\t"+stateIDTxt+"\t"+latTxt+"\t"+longTxt+ "\t" +pointTxt;
context.write(new Text(name), one);
} else {
context.write(new Text("*Outside Feature Set"), one);
}
}
}
I was able to resolve the out of memory issue by modifying the arrayList < classObject > to just hold arrayList < geometry > type.
Creating a class object (around 50k) to hold each row of a text file, consumed all the java heap memory. After this change code ran fine even in a 1-node virtual sandbox. I was able to crunch around 40 million rows in around 6 minutes.
I have a Crystal Report that was written using a complex SQL and I'm trying to invoke that using the Crystal Report Java API. This report has a Command object associated with it.
I load the report and set the connection parameters.
Then I try to set the Connection information to the current JDBC Profile. Meaning Test Environment credentials.
I get an exception. I tried with Version 11. Version 12 both. None of them seems to be working.
I'm getting the exception when I invoke the following piece of code. This piece of code works just fine with reports without "Command" sqls.
try{
clientDoc.getDatabaseController().setTableLocation(
origTable, newTable);
}catch(Exception ex){
ex.printStackTrace();
}
See below for the entire code. Please reply if anyone knows how to work around this.
private static void changeDataSource(ReportClientDocument clientDoc,
String reportName, String tableName, String username,
String password, String connectionURL, String driverName,
String jndiName) throws ReportSDKException {
PropertyBag propertyBag = null;
IConnectionInfo connectionInfo = null;
ITable origTable = null;
ITable newTable = null;
// Declare variables to hold ConnectionInfo values.
// Below is the list of values required to switch to use a JDBC/JNDI
// connection
String TRUSTED_CONNECTION = "false";
String SERVER_TYPE = "JDBC (JNDI)";
String USE_JDBC = "true";
String DATABASE_DLL = "crdb_jdbc.dll";
String JNDI_OPTIONAL_NAME = jndiName;
String CONNECTION_URL = connectionURL;
String DATABASE_CLASS_NAME = driverName;
// Declare variables to hold database User Name and Password values
String DB_USER_NAME = username;
String DB_PASSWORD = password;
System.out.println("Trusted_Connection:" + TRUSTED_CONNECTION);
System.out.println("Server Type:" + SERVER_TYPE);
System.out.println("Use JDBC:" + USE_JDBC);
System.out.println("Database DLL:" + DATABASE_DLL);
System.out.println("JNDIOptionalName:" + JNDI_OPTIONAL_NAME);
System.out.println("Connection URL:" + CONNECTION_URL);
System.out.println("Database Class Name:" + DATABASE_CLASS_NAME);
System.out.println("DB_USER_NAME:" + DB_USER_NAME);
System.out.println("DB_PASSWORD:" + DB_PASSWORD);
// Obtain collection of tables from this database controller
if (reportName == null || reportName.equals("")) {
Tables tables = clientDoc.getDatabaseController().getDatabase()
.getTables();
for (int i = 0; i < tables.size(); i++) {
origTable = tables.getTable(i);
if (tableName == null || origTable.getName().equals(tableName)) {
newTable = (ITable) origTable;
newTable.setQualifiedName(origTable.getAlias());
connectionInfo = newTable.getConnectionInfo();
// Set new table connection property attributes
propertyBag = new PropertyBag();
// Overwrite any existing properties with updated values
propertyBag.put("Trusted_Connection", TRUSTED_CONNECTION);
propertyBag.put("Server Type", SERVER_TYPE);
propertyBag.put("Use JDBC", USE_JDBC);
propertyBag.put("Database DLL", DATABASE_DLL);
propertyBag.put("JNDIOptionalName", JNDI_OPTIONAL_NAME);
propertyBag.put("Connection URL", CONNECTION_URL);
propertyBag.put("Database Class Name", DATABASE_CLASS_NAME);
connectionInfo.setAttributes(propertyBag);
connectionInfo.setUserName(DB_USER_NAME);
connectionInfo.setPassword(DB_PASSWORD);
// Update the table information
try{
clientDoc.getDatabaseController().setTableLocation(
origTable, newTable);
}catch(Exception ex){
ex.printStackTrace();
}
}
}
}
// Next loop through all the subreports and pass in the same
// information. You may consider
// creating a separate method which accepts
if (reportName == null || !(reportName.equals(""))) {
IStrings subNames = clientDoc.getSubreportController()
.getSubreportNames();
for (int subNum = 0; subNum < subNames.size(); subNum++) {
Tables tables = clientDoc.getSubreportController()
.getSubreport(subNames.getString(subNum))
.getDatabaseController().getDatabase().getTables();
for (int i = 0; i < tables.size(); i++) {
origTable = tables.getTable(i);
if (tableName == null
|| origTable.getName().equals(tableName)) {
newTable = (ITable) origTable;
newTable.setQualifiedName(origTable.getAlias());
// Change connection information properties
connectionInfo = newTable.getConnectionInfo();
// Set new table connection property attributes
propertyBag = new PropertyBag();
// Overwrite any existing properties with updated values
propertyBag.put("Trusted_Connection",
TRUSTED_CONNECTION);
propertyBag.put("Server Type", SERVER_TYPE);
propertyBag.put("Use JDBC", USE_JDBC);
propertyBag.put("Database DLL", DATABASE_DLL);
propertyBag.put("JNDIOptionalName", JNDI_OPTIONAL_NAME);
propertyBag.put("Connection URL", CONNECTION_URL);
propertyBag.put("Database Class Name",
DATABASE_CLASS_NAME);
connectionInfo.setAttributes(propertyBag);
connectionInfo.setUserName(DB_USER_NAME);
connectionInfo.setPassword(DB_PASSWORD);
// Update the table information
clientDoc.getSubreportController()
.getSubreport(subNames.getString(subNum))
.getDatabaseController()
.setTableLocation(origTable, newTable);
}
}
}
}
}
Add after this line of yours
connectionInfo.setPassword(DB_PASSWORD);
newTable.setConnectionInfo(connectionInfo);
//This will add connection parameters to the new table
Instead of
clientDoc.getDatabaseController().setTableLocation(origTable, newTable);
replace this with
clientDoc.getDatabaseController ().setTableLocation (newTable, tables.getTable(i));
I'm working on gridsim project in Java eclipse. I have found a network flow program, which works only for one-to-one connection between the sender and receiver. If the same user (sender) wish to send a message to any other receiver, the program does not work. Similarly, if a receiver wish to send message to two sender users, it does not work. Here, I'm including all the java files for this work. In order to run the program, we need to include external .jar file path in the project. The gridsim.jar and simjava2.jar files can be downloaded from http://sourceforge.net/projects/gridsim/
The following are the programs. The main program is FlowNetEx01.java
package network.flow.example01;
import gridsim.*;
import gridsim.net.*;
import gridsim.net.flow.*;
import java.util.*;
// Test Driver class for this example
public class FlowNetEx01
{
// Creates main() to run this example
public static void main(String[] args)
{
System.out.println("Starting network example ...");
try
{
int num_user = 4; // number of grid users
Calendar calendar = Calendar.getInstance();
boolean trace_flag = false; // mean trace GridSim events
System.out.println("Initializing GridSim package");
// It is essential to set the network type before calling GridSim.init()
GridSim.initNetworkType(GridSimTags.NET_FLOW_LEVEL);
GridSim.init(num_user, calendar, trace_flag);
// In this example, the topology is:
// user(s) --10Mb/s-- r1 --1.5Mb/s-- r2 --10Mb/s-- GridResource(s)
Router r1 = new FlowRouter("router1", trace_flag); // router 1
Router r2 = new FlowRouter("router2", trace_flag); // router 2
String sender1 = "user1";
String receipient1 = "test1";
String sender2 = "user2";
String receipient2 = "test2";
// these entities are the senders
FlowNetUser user1 = new FlowNetUser(sender1, receipient2, 5.0);
FlowNetUser user2 = new FlowNetUser(sender2, receipient1, 20.0);
// these entities are the receipients
FlowTest test1 = new FlowTest(receipient1, sender2);
FlowTest test2 = new FlowTest(receipient2, sender1);
// The schedulers are redundent and will be stripped out soon
FIFOScheduler userSched1 = new FIFOScheduler("NetUserSched_0");
r1.attachHost(user1, userSched1);
FIFOScheduler userSched2 = new FIFOScheduler("NetUserSched_1");
r1.attachHost(user2, userSched2);
FIFOScheduler testSched1 = new FIFOScheduler("FlowTestSched_0");
r2.attachHost(test1, testSched1);
FIFOScheduler testSched2 = new FIFOScheduler("FlowTestSched_1");
r2.attachHost(test2, testSched2);
//////////////////////////////////////////
// Second step: Creates a physical link
double baud_rate = 1572864; // bits/sec (baud) [1.5Mb/s]
double propDelay = 300; // propagation delay in millisecond
int mtu = Integer.MAX_VALUE;; // max. transmission unit in byte
Link link = new FlowLink("r1_r2_link", baud_rate, propDelay, mtu);
FIFOScheduler r1Sched = new FIFOScheduler("r1_Sched");
FIFOScheduler r2Sched = new FIFOScheduler("r2_Sched");
r1.attachRouter(r2, link, r1Sched, r2Sched);
//////////////////////////////////////////
// Final step: Starts the simulation
GridSim.startGridSimulation();
System.out.println("\nFinish network example ...");
}
catch (Exception e)
{
e.printStackTrace();
System.err.print(e.toString());
System.out.println("Unwanted errors happen");
}
}
} // end class
Program-2:
package network.flow.example01;
import gridsim.*;
import gridsim.net.*;
import gridsim.net.flow.*;
import eduni.simjava.*;
import java.util.*;
public class FlowNetUser extends GridSim
{
private int myID_; // my entity ID
private String name_; // my entity name
private String destName_; // destination name
private int destID_; // destination id
private double wait_; // Delay until I begin sending
public static final int SEND_MSG = 1;
public static final int ACK_MSG = 2;
public FlowNetUser(String name, String destName, Link link, double wait) throws Exception
{
super(name, link);
// get this entity name from Sim_entity
this.name_ = super.get_name();
// get this entity ID from Sim_entity
this.myID_ = super.get_id();
// get the destination entity name
this.destName_ = destName;
// get the waiting time before sending
this.wait_ = wait;
}
public FlowNetUser(String name, String destName, double wait) throws Exception
{
// 10,485,760 baud = 10Mb/s
super(name, new FlowLink(name+"_link",10485760,450,Integer.MAX_VALUE));
// get this entity name from Sim_entity
this.name_ = super.get_name();
// get this entity ID from Sim_entity
this.myID_ = super.get_id();
// get the destination entity name
destName_ = destName;
// get the waiting time before sending
this.wait_ = wait;
}
public void body()
{
int packetSize = 524288000; // packet size in bytes [5MB]
//int packetSize = 52428800; // packet size in bytes [50MB]
//int packetSize = 524288000; // packet size in bytes [500MB]
//int packetSize = 5242880000; // packet size in bytes [5000MB]
int size = 3; // number of packets sent
int i = 0;
// get the destination entity ID
this.destID_ = GridSim.getEntityId(destName_);
//super.sim_pause(this.wait_);
this.gridSimHold(this.wait_);
// sends messages over the other side of the link
for (i = 0; i < size; i++)
{
String msg = "Message_" + i;
IO_data data = new IO_data(msg, packetSize, destID_);
System.out.println(name_ + ".body(): Sending " + msg +
", at time = " + GridSim.clock() );
// sends through Output buffer of this entity
super.send(super.output, GridSimTags.SCHEDULE_NOW,
GridSimTags.FLOW_SUBMIT, data);
//super.sim_pause();
super.sim_pause(10.0);
//this.gridSimHold((Math.random()*10)+1.0);
}
// get the ack back
Object obj = null;
for (i = 0; i < size; i++)
{
// waiting for incoming event in the Input buffer
obj = super.receiveEventObject();
System.out.println(name_ + ".body(): Receives Ack for " + obj);
}
// Wait for other FlowNetUser instances to finish
this.gridSimHold(1000.0);
super.send(destID_, GridSimTags.SCHEDULE_NOW,
GridSimTags.END_OF_SIMULATION);
// shut down I/O ports
shutdownUserEntity();
terminateIOEntities();
System.out.println(this.name_ + ":%%%% Exiting body() at time " +
GridSim.clock() );
}
} // end class
Program-3:
package network.flow.example01;
import java.util.*;
import gridsim.*;
import gridsim.net.*;
import gridsim.net.flow.*;
import gridsim.util.SimReport;
import eduni.simjava.*;
public class FlowTest extends GridSim
{
private int myID_; // my entity ID
private String name_; // my entity name
private String destName_; // destination name
private int destID_; // destination id
private SimReport report_; // logs every activity
public FlowTest(String name, String destName, Link link) throws Exception
{
super(name, link);
// get this entity name from Sim_entity
this.name_ = super.get_name();
// get this entity ID from Sim_entity
this.myID_ = super.get_id();
// get the destination entity name
this.destName_ = destName;
// logs every activity. It will automatically create name.csv file
report_ = new SimReport(name);
report_.write("Creates " + name);
}
public FlowTest(String name, String destName) throws Exception
{
// 10,485,760 baud = 10Mb/s
super(name, new FlowLink(name+"_link",10485760,250,Integer.MAX_VALUE));
// get this entity name from Sim_entity
this.name_ = super.get_name();
// get this entity ID from Sim_entity
this.myID_ = super.get_id();
// get the destination entity name
this.destName_ = destName;
// logs every activity. It will automatically create name.csv file
report_ = new SimReport(name);
report_.write("Creates " + name);
}
public void body()
{
// get the destination entity ID
this.destID_ = GridSim.getEntityId(destName_);
int packetSize = 1500; // packet size in bytes
Sim_event ev = new Sim_event(); // an event
// a loop waiting for incoming events
while ( Sim_system.running() )
{
// get the next event from the Input buffer
super.sim_get_next(ev);
// if an event denotes end of simulation
if (ev.get_tag() == GridSimTags.END_OF_SIMULATION)
{
System.out.println();
write(super.get_name() + ".body(): exiting ...");
break;
}
// if an event denotes another event type
else if (ev.get_tag() == GridSimTags.FLOW_SUBMIT)
{
System.out.println();
write(super.get_name() + ".body(): receive " +
ev.get_data() + ", at time = " + GridSim.clock());
// No need for an ack, it is handled in FlowBuffer now on our behalf
// sends back an ack
IO_data data = new IO_data(ev.get_data(), packetSize, destID_);
write(name_ + ".body(): Sending back " +
ev.get_data() + ", at time = " + GridSim.clock() );
// sends through Output buffer of this entity
super.send(super.output, GridSimTags.SCHEDULE_NOW,
GridSimTags.FLOW_ACK, data);
}
else if (ev.get_tag() == GridSimTags.INFOPKT_SUBMIT)
{
processPingRequest(ev);
}
}
// shut down I/O ports
shutdownUserEntity();
terminateIOEntities();
// don't forget to close the file
if (report_ != null) {
report_.finalWrite();
}
System.out.println(this.name_ + ":%%%% Exiting body() at time " +
GridSim.clock() );
}
private void processPingRequest(Sim_event ev)
{
InfoPacket pkt = (InfoPacket) ev.get_data();
pkt.setTag(GridSimTags.INFOPKT_RETURN);
pkt.setDestID( pkt.getSrcID() );
// sends back to the sender
super.send(super.output, GridSimTags.SCHEDULE_NOW,
GridSimTags.INFOPKT_RETURN,
new IO_data(pkt,pkt.getSize(),pkt.getSrcID()) );
}
private void write(String msg)
{
System.out.println(msg);
if (report_ != null) {
report_.write(msg);
}
}
} // end class
After running these programs, someone can tell us how to extend the required functionality as I mentioned in the beginning.
Personal experience ... GridSim5.2 potentially buggy.
The examples are dated written for version < 4.0, demonstrating not very complex scenarios.
Using version 5.2. According to the API docs every simulation should have at least one TopRegionalRC. This appends a UniqueID to the filename and records the location of the file in two hashmaps one for filename and the other for fileattr. Now the event filename used for the lookup remains unchanged - the lookup fails - compares to the fileattrmap.name_. Consequently, the waiting for ack block is never executed when performing a addmaster operation.
Fix: Since the UniqueId is returned by the initial request to the CTLG this can be appended to the filename for the subsequent event requiring the lookup. Alternatively, add the filename to fileattrmap and the filename+uniqueid to fileattrmap, then test for both in the lookup.
Also, GridSimTags uses -1 to signal END_OF_SIMULATION, but this conflicts with Advanced Reservation (AR) block of tags. That also use negative numbers. GridSimTags has an optional routine to check for duplicates but its use is optional and does not apply to DataGridTags. I created a reversemap for ease of debugging adding validation to ensure no duplicates occur and deprecated the GridSimTags method.
I am now wrestling with the DataGrid user tasks that do not seem to create events, I am also concerned that the delay operations are not effective.
i have a collection of raw text in a table in database, i need to replace some words in this collection using a set of words.
i put all the term to be replace and its substitutes in a text file as below
min=admin
lelet=lambat
lemot=lambat
nii=nih
ntu=itu
and so on.
i have successfully initiate a variabel of File and Scanner to read the collection of the term and its substitutes.
i loop all the dataset and save the raw text in a string
in the same loop
i loop all the term collection and save its row to a string name 'pattern', and split the pattern into two string named 'term' and 'replacer'
in this loop i initiate a new string which its value is the string from the dataset modified by replaceAll(term,replacer)
end loop for term collection
then i insert the new string to another table in database
end loop for dataset
i do it manualy as below
replaceAll("min","admin")
and its works but its really something to code it manually for almost 2000 terms to be replace it.
anyone ever face this kind of really something..
i really need a help now desperate :(
package sentimenrepo;
import javax.swing.*;
import java.sql.*;
import java.io.*;
//import java.util.HashMap;
import java.util.Scanner;
//import java.util.Map;
/**
*
* #author herman
*/
public class synonimReplaceV2 extends SwingWorker {
protected Object doInBackground() throws Exception {
new skripsisentimen.sentimenttwitter().setVisible(true);
Integer row = 0;
File synonimV2 = new File("synV2/catatan_kata_sinonim.txt");
String newTweet = "";
DB db = new DB();
Connection conn = db.dbConnect("jdbc:mysql://localhost:3306/tweet", "root", "");
try{
Statement select = conn.createStatement();
select.executeQuery("select * from synonimtweet");
ResultSet RS = select.getResultSet();
Scanner scSynV2 = new Scanner(synonimV2);
while(RS.next()){
row++;
String no = RS.getString("no");
String tweet = " "+ RS.getString("tweet");
String published = RS.getString("published");
String label = RS.getString("label");
clean2 cleanv2 = new clean2();
newTweet = cleanv2.cleanTweet(tweet);
try{
Statement insert = conn.createStatement();
insert.executeUpdate("INSERT INTO synonimtweet_v2(no,tweet,published,label) values('"
+no+"','"+newTweet+"','"+published+"','"+label+"')");
String current = skripsisentimen.sentimenttwitter.txtAreaResult.getText();
skripsisentimen.sentimenttwitter.txtAreaResult.setText(current+"\n"+row+"original : "+tweet+"\n"+newTweet+"\n______________________\n");
skripsisentimen.sentimenttwitter.lblStat.setText(row+" tweet read");
skripsisentimen.sentimenttwitter.txtAreaResult.setCaretPosition(skripsisentimen.sentimenttwitter.txtAreaResult.getText().length() - 1);
}catch(Exception e){
skripsisentimen.sentimenttwitter.lblStat.setText(e.getMessage());
}
skripsisentimen.sentimenttwitter.lblStat.setText(e.getMessage());
}
}catch(Exception e){
skripsisentimen.sentimenttwitter.lblStat.setText(e.getMessage());
}
return row;
}
class clean2{
public clean2(){}
public String cleanTweet(String tweet){
File synonimV2 = new File("synV2/catatan_kata_sinonim.txt");
String pattern = "";
String term = "";
String replacer = "";
String newTweet="";
try{
Scanner scSynV2 = new Scanner(synonimV2);
while(scSynV2.hasNext()){
pattern = scSynV2.next();
term = pattern.split("=")[0];
replacer = pattern.split("=")[1];
newTweet = tweet.replace(term, replacer);
}
}catch(Exception e){
e.printStackTrace();
}
System.out.println(newTweet+"\n"+tweet);
return newTweet;
}
}
}
update
ive just realize that the code actually works but only for the first row in database, the second row and so on stand still. here is i update the newest code i ve build
public class synonimReplaceV2 extends SwingWorker {
protected Object doInBackground() throws Exception {
new skripsisentimen.sentimenttwitter().setVisible(true);
Integer row = 0;
String newTweet = "";
DB db = new DB();
Connection conn = db.dbConnect("jdbc:mysql://localhost:3306/tweet", "root", "");
try{
Statement select = conn.createStatement();
select.executeQuery("select * from synonimtweet limit 2,10");
ResultSet RS = select.getResultSet();
FileReader readSyn = new FileReader("synV2/catatan_kata_sinonim.txt");
BufferedReader buffSyn = new BufferedReader(readSyn);
while(RS.next()){
row++;
String no = RS.getString("no");
String tweet = " "+ RS.getString("tweet");
String published = RS.getString("published");
String label = RS.getString("label");
String pattern = "";
while((pattern=buffSyn.readLine())!=null){
String patternTerm = pattern.split("=")[0];
String patternSubs = pattern.split("=")[1];
tweet = tweet.replaceAll("\\s"+patternTerm, patternSubs);
}
try{
Statement insert = conn.createStatement();
insert.executeUpdate("INSERT INTO synonimtweet_v2(no,tweet,published,label) values('"
+no+"','"+tweet+"','"+published+"','"+label+"')");
String current = skripsisentimen.sentimenttwitter.txtAreaResult.getText();
skripsisentimen.sentimenttwitter.txtAreaResult.setText(current+"\n"+row+"original : "+tweet+"\n"+newTweet+"\n______________________\n");
skripsisentimen.sentimenttwitter.lblStat.setText(row+" tweet read");
skripsisentimen.sentimenttwitter.txtAreaResult.setCaretPosition(skripsisentimen.sentimenttwitter.txtAreaResult.getText().length() - 1);
}catch(Exception e){
skripsisentimen.sentimenttwitter.lblStat.setText(e.getMessage());
}
}
}catch(Exception e){
skripsisentimen.sentimenttwitter.lblStat.setText(e.getMessage());
// System.out.println(e.getMessage());
}
Thread.sleep(100);
return row;
}
}
Opening the synonym file and iterating over 2,000 lines for every row in your ResultSet is a bit wasteful.
Load your synonyms into an in-memory Map once, keyed by unique misspelt term, then do a lookup on the map for every row in your result set, and replace as necessary.
Let us use both solutions to build a single solution for you:
First, you create a HashMap with all your keys:
public static HashMap<String, String> getMap() {
//your version would read from the file
HashMap<String,String> myMap=new HashMap<String,String>();
myMap.put("min", "admin");
myMap.put("lelet", "lambat");
myMap.put("lemot", "lambat");
myMap.put("nii", "nih");
myMap.put("ntu", "itu");
return(myMap);
}
Second, you create a pattern that contains all the keys in your hashmap:
public static String getPattern(HashMap<String,String> mapReplacement) {
String pattern="";
for (String s : mapReplacement.keySet()) {
if (!pattern.isEmpty()) {
pattern=pattern+"|";
}
pattern=pattern+s;
}
return(pattern);
}
Next, you can create a cleanTweet method that uses both structures you created:
public static String cleanTweet(String tweet, Pattern pattern,HashMap<String, String> myMap) {
String newTweet=tweet;
Matcher matcher = pattern.matcher(newTweet);
int start=0;
while (matcher.find()) {
String key=matcher.group();
String replacement=myMap.get(key);
if (replacement!=null) {
newTweet=newTweet.replace(key, replacement );
}
}
return(newTweet);
}
This might require some tweaking to perfect (I onyl tested a few cases), but the point is that you are going to iterate a single time in your keys and then iterate only on your tweets.
I hope it helps.
I didn't try, but it seems to me that you've almost got it - just replace this line:
newTweet = tweet.replace(term, replacer);
with this:
tweet = tweet.replaceAll(term, replacer);
As you're not using newTweet any more, return tweet:
return tweet;
You should also delete the newTweet declaration.
Also, you shouldn't read Scanner to read lines. Use FileReader instead.
thanks folks
i ve found the answer why the code is not working,
the txt file containing terms and its substitutes should be initiated each time the program read a row from database.
the code would be like this
public class synonimReplaceV2 extends SwingWorker {
protected Object doInBackground() throws Exception {
new skripsisentimen.sentimenttwitter().setVisible(true);
Integer row = 0;
String newTweet = "";
DB db = new DB();
Connection conn = db.dbConnect("jdbc:mysql://localhost:3306/tweet", "root", "");
try{
Statement select = conn.createStatement();
select.executeQuery("select * from synonimtweet limit 2,10");
ResultSet RS = select.getResultSet();
while(RS.next()){
row++;
FileReader readSyn = new FileReader("synV2/catatan_kata_sinonim.txt");
BufferedReader buffSyn = new BufferedReader(readSyn);
String no = RS.getString("no");
String tweet = " "+ RS.getString("tweet");
String published = RS.getString("published");
String label = RS.getString("label");
String pattern = "";
while((pattern=buffSyn.readLine())!=null){
String patternTerm = pattern.split("=")[0];
String patternSubs = pattern.split("=")[1];
tweet = tweet.replaceAll("\\s"+patternTerm, patternSubs);
}
try{
Statement insert = conn.createStatement();
insert.executeUpdate("INSERT INTO synonimtweet_v2(no,tweet,published,label) values('"
+no+"','"+tweet+"','"+published+"','"+label+"')");
String current = skripsisentimen.sentimenttwitter.txtAreaResult.getText();
skripsisentimen.sentimenttwitter.txtAreaResult.setText(current+"\n"+row+"original : "+tweet+"\n"+newTweet+"\n______________________\n");
skripsisentimen.sentimenttwitter.lblStat.setText(row+" tweet read");
skripsisentimen.sentimenttwitter.txtAreaResult.setCaretPosition(skripsisentimen.sentimenttwitter.txtAreaResult.getText().length() - 1);
}catch(Exception e){
skripsisentimen.sentimenttwitter.lblStat.setText(e.getMessage());
}
}
}catch(Exception e){
skripsisentimen.sentimenttwitter.lblStat.setText(e.getMessage());
// System.out.println(e.getMessage());
}
Thread.sleep(100);
return row;
}
}
but im actually want to apply the code in which rlinden made above, but cant figure it out how to call the cleanTweet function.