Unnamed: 0
int64 0
2.05k
| func
stringlengths 27
124k
| target
bool 2
classes | project
stringlengths 39
117
|
---|---|---|---|
0 | public interface AbbreviationService {
/**
* Gets the available abbreviations for a string. If no abbreviations
* are found, the returned available abbreviations consist of each
* word in the original string, in turn, with one abbreviation, the
* word unchanged.
*
* @param s the string to abbreviate
* @return the available abbreviations
*/
Abbreviations getAbbreviations(String s);
} | false | tableViews_src_main_java_gov_nasa_arc_mct_abbreviation_AbbreviationService.java |
1 | public interface Abbreviations {
/**
* Gets the original string for which the available abbreviations have
* been calculated.
*
* @return the original string
*/
String getValue();
/**
* Gets the phrases into which the original string has been divided as
* possible abbreviations were found. The phrases, in order, comprise
* all words of the original string.
*
* @return a list of phrases that can be abbreviated
*/
List<String> getPhrases();
/**
* Gets the available abbreviations for a phrase. The list is always
* nonempty, since the first element is the phrase unchanged.
*
* @param phrase the phrase to abbreviate, which may be a single word
* @return a list of possible abbreviations for the phrase
*/
List<String> getAbbreviations(String phrase);
} | false | tableViews_src_main_java_gov_nasa_arc_mct_abbreviation_Abbreviations.java |
2 | public class AbbreviationServiceImpl implements AbbreviationService {
private static final Logger logger = LoggerFactory.getLogger(AbbreviationServiceImpl.class);
private static final String ABBREVIATIONS_FILE_PROPERTY = "abbreviations-file";
private AbbreviationsManager manager;
/**
* Activates the service implementation. A map of properties is
* used to configure the service.
*
* @param context the component context for this service
*/
public void activate(ComponentContext context) {
@SuppressWarnings("unchecked")
Dictionary<String,String> properties = context.getProperties();
// This property will always be present, according to OSGi 4.1 Compendium
// Specification section 112.6.
String componentName = properties.get("component.name");
String abbreviationsFilePath = properties.get(ABBREVIATIONS_FILE_PROPERTY);
Properties abbreviationsProperties = null;
if (abbreviationsFilePath == null) {
logger.warn("{}: no configuration value for {} - no abbreviations will be available.", componentName, ABBREVIATIONS_FILE_PROPERTY);
} else {
InputStream in = findFile(abbreviationsFilePath);
if (in == null) {
logger.warn("{}: abbreviations file <{}> not found - no abbreviations will be available.", componentName, abbreviationsFilePath);
} else {
try {
abbreviationsProperties = new Properties();
abbreviationsProperties.load(in);
} catch (IOException ex) {
logger.warn("{}: error loading abbreviations file <{}> - no abbreviations will be available.", componentName, abbreviationsFilePath);
abbreviationsProperties = null;
}
}
}
if (abbreviationsProperties == null) {
abbreviationsProperties = new Properties();
}
manager = new AbbreviationsManager(abbreviationsProperties);
}
/**
* Looks up a file given a path. The file is looked up first relative to the
* current directory. If not found, a matching resource within the bundle is
* tried. If neither method works, null is returned to indicate that the file
* could not be found.
*
* @param path a relative or absolute pathname, or a resource name from within the bundle
* @return an input stream for reading from the file, or null if the file could not be found
*/
InputStream findFile(String path) {
// 1. Try to find using the file path, which may be absolute or
// relative to the current directory.
File f = new File(path);
if (f.isFile() && f.canRead()) {
try {
return new FileInputStream(f);
} catch (Exception ex) {
// ignore, for now
}
}
// 2. Try to find a resource in the bundle. This return value may be null,
// if no resource is found matching the path.
return getClass().getResourceAsStream(path);
}
@Override
public Abbreviations getAbbreviations(String s) {
return manager.getAbbreviations(s);
}
} | false | tableViews_src_main_java_gov_nasa_arc_mct_abbreviation_impl_AbbreviationServiceImpl.java |
3 | public class AbbreviationServiceImplTest {
@Mock private ComponentContext context;
AbbreviationServiceImpl abbrev;
Dictionary<String, String> properties;
@BeforeMethod
public void init() {
MockitoAnnotations.initMocks(this);
abbrev = new AbbreviationServiceImpl();
properties = new Hashtable<String, String>();
properties.put("component.name", "AbbreviationService");
when(context.getProperties()).thenReturn(properties);
}
@Test
public void testActivateGoodFile() {
properties.put("abbreviations-file", "/default-abbreviations.properties");
abbrev.activate(context);
Abbreviations abbreviations = abbrev.getAbbreviations("Fiber Optic MDM System");
assertEquals(abbreviations.getPhrases().size(), 3);
assertEquals(abbreviations.getPhrases().get(0), "Fiber Optic");
assertEquals(abbreviations.getPhrases().get(1), "MDM");
assertEquals(abbreviations.getPhrases().get(2), "System");
}
@Test
public void testActivateNoFileProperty() {
abbrev.activate(context);
Abbreviations abbreviations = abbrev.getAbbreviations("Fiber Optic MDM System");
assertEquals(abbreviations.getPhrases().size(), 4);
assertEquals(abbreviations.getPhrases().get(0), "Fiber");
assertEquals(abbreviations.getPhrases().get(1), "Optic");
assertEquals(abbreviations.getPhrases().get(2), "MDM");
assertEquals(abbreviations.getPhrases().get(3), "System");
}
@Test
public void testActivateNonexistentAbbreviationsFile() {
properties.put("abbreviations-file", "/file-does-not-exist.properties");
abbrev.activate(context);
Abbreviations abbreviations = abbrev.getAbbreviations("Fiber Optic MDM System");
assertEquals(abbreviations.getPhrases().size(), 4);
assertEquals(abbreviations.getPhrases().get(0), "Fiber");
assertEquals(abbreviations.getPhrases().get(1), "Optic");
assertEquals(abbreviations.getPhrases().get(2), "MDM");
assertEquals(abbreviations.getPhrases().get(3), "System");
}
@Test(dataProvider="findFileTests")
public void testFindFile(String path, String fileProperty) throws IOException {
InputStream in;
Properties p;
p = new Properties();
in = abbrev.findFile(path);
assertNotNull(in);
p.load(in);
assertEquals(p.getProperty("file"), fileProperty);
}
@DataProvider(name = "findFileTests")
public Object[][] getFindFileTests() {
return new Object[][] {
// A file path
{ "src/test/data/abbreviations.properties", "in file system" },
// A resource in the bundle using an absolute name
{ "/test-abbreviations.properties", "root of bundle" },
// A resource in the bundle using a relative name
{ "package-abbreviations.properties", "in bundle package" },
};
}
} | false | tableViews_src_test_java_gov_nasa_arc_mct_abbreviation_impl_AbbreviationServiceImplTest.java |
4 | public class AbbreviationsImpl implements Abbreviations {
private String value;
private List<String> phrases = new ArrayList<String>();
private Map<String, List<String>> abbreviations = new HashMap<String, List<String>>();
/**
* Create an abbreviation object.
* @param value original text
*/
protected AbbreviationsImpl(String value) {
this.value = value;
}
@Override
public List<String> getAbbreviations(String phrase) {
return abbreviations.get(phrase);
}
@Override
public List<String> getPhrases() {
return phrases;
}
@Override
public String getValue() {
return value;
}
/**
* Add a set of abbreviations to a phrase.
* @param phrase the phrase to be abbreviated
* @param alternatives the phrase's abbreviations
*/
protected void addPhrase(String phrase, List<String> alternatives) {
phrases.add(phrase);
abbreviations.put(phrase, alternatives);
}
} | false | tableViews_src_main_java_gov_nasa_arc_mct_abbreviation_impl_AbbreviationsImpl.java |
5 | public class AbbreviationsManager {
/** A regular expression used to separate alternative abbreviations. (\s == any whitespace) */
private static final Pattern ABBREVIATION_SEPARATOR = Pattern.compile("\\s*\\|\\s*");
/** A regular expression used to separate words. */
private static final Pattern WORD_SEPARATOR = Pattern.compile("\\s+");
private Map<String, List<String>> abbreviations = new HashMap<String, List<String>>();
/**
* Creates a new abbreviations manager configured with a set of abbreviation
* properties. Abbreviation properties are of the form:
* <pre>
* phrase = alt1 | alt2 | ...
* </pre>
* Whitespace around the "=" and "|" separators is removed. The phrase is
* converted to lower case, but the alternatives are used verbatim.
*
* @param abbreviationProperties the abbreviation properties
*/
public AbbreviationsManager(Properties abbreviationProperties) {
@SuppressWarnings("unchecked")
Enumeration<String> e = (Enumeration<String>) abbreviationProperties.propertyNames();
while (e.hasMoreElements()) {
String phrase = e.nextElement();
String lcPhrase = phrase.toLowerCase();
String[] alternatives = ABBREVIATION_SEPARATOR.split(abbreviationProperties.getProperty(phrase).trim());
List<String> abbreviationsForPhrase = new ArrayList<String>(Arrays.asList(alternatives));
Collections.sort(abbreviationsForPhrase, new Comparator<String>() {
@Override
public int compare(String o1, String o2) {
return o1.length() - o2.length();
}
});
abbreviations.put(lcPhrase, abbreviationsForPhrase);
}
}
/**
* Gets the alternative abbreviations for a phrase. The original phrase is always the
* the first alternative returned. If no abbreviations are found for the phrase, returns
* a list with one element, the original phrase. The phrase is converted to lower case
* before looking up its alternatives.
*
* @param phrase the phrase to abbreviate
* @return a list of alternative abbreviations, with the original phrase as the first element
*/
public List<String> getAlternatives(String phrase) {
List<String> result = new ArrayList<String>();
result.add(phrase);
List<String> alternatives = abbreviations.get(phrase.toLowerCase());
if (alternatives != null) {
result.addAll(alternatives);
}
return result;
}
/**
* Finds the phrases within a string that can be abbreviated, and returns
* a structure with those phrases and the alternatives for each phrase.
* A phrase is a sequence of one or more words in the original string, where
* words are delimited by whitespace. At each point in the original string,
* the longest phrase for which there are abbreviations is found.
*
* @param s the string to find abbreviations for
* @return a structure describing the available abbreviations
*/
public Abbreviations getAbbreviations(String s) {
AbbreviationsImpl abbrev = new AbbreviationsImpl(s);
List<String> phrases = getPhrasesWithAbbreviations(s);
for (String phrase : phrases) {
abbrev.addPhrase(phrase, getAlternatives(phrase));
}
return abbrev;
}
/**
* Constructs a partition of a string into phrases, along word boundaries,
* where each phrase has one or more alternative abbreviations, and each
* phrase is the longest match against the abbreviations at that position
* in the original string.
*
* @param s the original string to partition into phrases
* @return a list of phrases
*/
private List<String> getPhrasesWithAbbreviations(String s) {
int phraseStart = 0;
List<String> phrasesWithAbbreviations = new ArrayList<String>();
Matcher wordBoundary = WORD_SEPARATOR.matcher(s);
while (phraseStart < s.length()) {
int phraseLength = getLongestPhraseLength(s.substring(phraseStart));
phrasesWithAbbreviations.add(s.substring(phraseStart, phraseStart + phraseLength));
if (wordBoundary.find(phraseStart + phraseLength)) {
phraseStart = wordBoundary.end();
} else {
phraseStart = s.length();
}
}
return phrasesWithAbbreviations;
}
/**
* Finds the longest phrase within a string that has abbreviations. The first word
* is always a possibility, even if no alternatives exist to that word.
*
* @param s the string for which to find the longest phrase with alternatives
* @return the length of the longest phrase with alternative abbreviations
*/
private int getLongestPhraseLength(String s) {
// If the entire string matches, then it is obviously the longest matching phrase.
if (abbreviations.containsKey(s.toLowerCase())) {
return s.length();
}
Matcher wordBoundary = WORD_SEPARATOR.matcher(s);
if (!wordBoundary.find()) {
// No word boundaries found. Entire string is only possible phrase.
return s.length();
}
// First word is always an abbreviation candidate, perhaps with no
// alternatives but itself.
int longestMatchLength = wordBoundary.start();
while (wordBoundary.find()) {
if (abbreviations.containsKey(s.substring(0, wordBoundary.start()).toLowerCase())) {
longestMatchLength = wordBoundary.start();
}
}
return longestMatchLength;
}
} | false | tableViews_src_main_java_gov_nasa_arc_mct_abbreviation_impl_AbbreviationsManager.java |
6 | Collections.sort(abbreviationsForPhrase, new Comparator<String>() {
@Override
public int compare(String o1, String o2) {
return o1.length() - o2.length();
}
}); | false | tableViews_src_main_java_gov_nasa_arc_mct_abbreviation_impl_AbbreviationsManager.java |
7 | public class AbbreviationsManagerTest {
private Properties defaultProperties;
@BeforeMethod
public void init() {
defaultProperties = new Properties();
defaultProperties.setProperty("fiber optic", "F/O");
defaultProperties.setProperty("system", "Sys");
}
@Test
public void testGetAlternatives() {
AbbreviationsManager manager;
Properties props;
List<String> alternatives;
props = new Properties();
manager = new AbbreviationsManager(props);
alternatives = manager.getAlternatives("volts");
assertEquals(alternatives.size(), 1);
assertEquals(alternatives.get(0), "volts");
props = new Properties();
props.setProperty("Volts", "V"); // Note that lookup should be case insensitive.
manager = new AbbreviationsManager(props);
alternatives = manager.getAlternatives("volts");
assertEquals(alternatives.size(), 2);
assertEquals(alternatives.get(0), "volts"); // Matches the case of getAbbreviations() argument.
assertEquals(alternatives.get(1), "V");
props = new Properties();
props.setProperty("Amperes", "Amps | A | aa | bbbb | a | aaaa ");
manager = new AbbreviationsManager(props);
alternatives = manager.getAlternatives("amperes");
assertEquals(alternatives.size(), 7);
assertEquals(alternatives.get(0), "amperes"); // Must match in case to getAbbreviations() argument.
assertEquals(alternatives.get(1), "A");
assertEquals(alternatives.get(2), "a");
assertEquals(alternatives.get(3), "aa");
assertEquals(alternatives.get(4), "Amps"); // same length items are in left to right specified order
assertEquals(alternatives.get(5), "bbbb");
assertEquals(alternatives.get(6), "aaaa");
}
@Test(dataProvider="getAbbreviationsTests")
public void testGetAbbreviations(String s, String[] expectedPhrases) {
AbbreviationsManager manager = new AbbreviationsManager(defaultProperties);
Abbreviations abbrev = manager.getAbbreviations(s);
assertEquals(abbrev.getValue(), s);
assertEquals(abbrev.getPhrases().size(), expectedPhrases.length);
for (int i=0; i<abbrev.getPhrases().size(); ++i) {
String phrase = abbrev.getPhrases().get(i);
assertEquals(phrase, expectedPhrases[i]);
List<String> alternatives = abbrev.getAbbreviations(phrase);
List<String> expectedAlternatives = manager.getAlternatives(abbrev.getPhrases().get(i));
assertTrue(alternatives.size() >= 1);
assertEquals(alternatives.size(), expectedAlternatives.size());
assertEquals(alternatives.get(0), abbrev.getPhrases().get(i));
}
}
@DataProvider(name="getAbbreviationsTests")
private Object[][] getGetAbbreviationsTests() {
return new Object[][] {
{ "System", new String[] { "System" } }, // One word in abbreviations map
{ "MDM", new String[] { "MDM" } }, // One word not in abbreviations map
{ "Fiber Optic", new String[] { "Fiber Optic" } }, // Exact phrase in abbreviations map
// Some longer tests.
{ "Junk1 Junk2 Junk3", new String[] { "Junk1", "Junk2", "Junk3" } }, // No matches
{ "Fiber Optic MDM System", new String[] { "Fiber Optic", "MDM", "System" } },
};
}
} | false | tableViews_src_test_java_gov_nasa_arc_mct_abbreviation_impl_AbbreviationsManagerTest.java |
8 | public class LabelAbbreviationsTest {
@Test
public void getAbbreviation() throws SecurityException, NoSuchFieldException, IllegalArgumentException, IllegalAccessException {
AbbreviationsImpl availableAbbreviations = new AbbreviationsImpl("value");
availableAbbreviations.addPhrase("Amps", Collections.singletonList("A"));
availableAbbreviations.addPhrase("BCA1", Collections.<String>emptyList());
availableAbbreviations.addPhrase("Ch1", Collections.<String>emptyList());
availableAbbreviations.addPhrase("Serial", Collections.<String>emptyList());
AbbreviationSettings aSettings = new AbbreviationSettings("fullLabel", availableAbbreviations, new LabelAbbreviations());
String abbreviatedLabel = aSettings.getAbbreviatedLabel();
Assert.assertEquals(abbreviatedLabel, "Amps BCA1 Ch1 Serial");
LabelAbbreviations available2 = aSettings.getAbbreviations();
Assert.assertEquals(available2.getAbbreviation("BCA1"), "BCA1");
Assert.assertEquals(available2.getAbbreviation("Amps"), "Amps");
// Change the state of the control panel via currentAbbreviations
LabelAbbreviations currentAbbreviations = new LabelAbbreviations();
currentAbbreviations.addAbbreviation("Amps", "A | a | Amp");
currentAbbreviations.addAbbreviation("BCA1", "B | bca1");
currentAbbreviations.addAbbreviation("CAT", "C");
currentAbbreviations.addAbbreviation("DOG", "D");
currentAbbreviations.addAbbreviation("Ace", "ace");
currentAbbreviations.addAbbreviation("Abb", "a");
currentAbbreviations.addAbbreviation("Rabbit", "R");
AbbreviationSettings a2Settings = new AbbreviationSettings("fullLabel", availableAbbreviations, currentAbbreviations);
LabelAbbreviations available2afterSelect = a2Settings.getAbbreviations();
Assert.assertEquals(available2afterSelect.getAbbreviation("BCA1"), "B | bca1");
Assert.assertEquals(available2afterSelect.getAbbreviation("Amps"), "A | a | Amp");
Map<String, String> map = getAbbreviations(currentAbbreviations);
Assert.assertEquals(map.size(), 7);
}
private Map<String, String> getAbbreviations(
LabelAbbreviations currentAbbreviations) throws SecurityException, NoSuchFieldException, IllegalArgumentException, IllegalAccessException {
Field f = currentAbbreviations.getClass().getDeclaredField("abbreviations"); //NoSuchFieldException
f.setAccessible(true);
@SuppressWarnings("unchecked")
Map<String, String> map = (HashMap<String,String>) f.get(currentAbbreviations); //IllegalAccessException
return map;
}
} | false | tableViews_src_test_java_gov_nasa_arc_mct_abbreviation_impl_LabelAbbreviationsTest.java |
9 | public class BufferFullException extends Exception {
private static final long serialVersionUID = 2028815233703151762L;
/**
* Default constructor.
*/
public BufferFullException() {
super();
}
/**
* Overloaded constructor with single message argument.
* @param msg - Message to display buffer full exception.
*/
public BufferFullException(String msg) {
super(msg);
}
} | false | mctcore_src_main_java_gov_nasa_arc_mct_api_feed_BufferFullException.java |
10 | public interface DataArchive extends FeedDataArchive {
/**
* Return the level of service of this data archive.
*
* @return the level of service of this data archive
*/
public LOS getLOS();
} | false | mctcore_src_main_java_gov_nasa_arc_mct_api_feed_DataArchive.java |
11 | public interface DataProvider extends FeedAggregator {
/**
* An enum which defines the various level of service (LOS) a data provider can provide.
*
*/
public static enum LOS {
/** Fast enum. */
fast,
/** Medium enum. */
medium,
/** Slow enum.*/
slow
}
/**
* Returns a map of data for each feed. This allows the data to be queried in batch and improves performance.
* @param feedIDs to retrieve data for
* @param startTime the start time of the return data set.
* @param endTime the end time of the return data set.
* @param timeUnit the time unit of startTime and endTime parameters.
* @return map of data for the specified feeds. Each entry in the map has data
* with a timestamp that is >= startTime and <= endTime ordered according to the time.
*/
public Map<String, SortedMap<Long, Map<String, String>>> getData(Set<String> feedIDs, long startTime, long endTime, TimeUnit timeUnit);
/**
* Check if a request can be fully serviced by a data provider.
* @param feedID feed that is requested
* @param startTime start time of the request
* @param timeUnit the time unit of startTime
* @return true if a request can be fully serviced by a data provider.
*/
public boolean isFullyWithinTimeSpan(String feedID, long startTime, TimeUnit timeUnit);
/**
* Return the level of service that a data retrieval provider can provide.
* @return the level of service.
*/
public LOS getLOS();
} | false | mctcore_src_main_java_gov_nasa_arc_mct_api_feed_DataProvider.java |
12 | public static enum LOS {
/** Fast enum. */
fast,
/** Medium enum. */
medium,
/** Slow enum.*/
slow
} | false | mctcore_src_main_java_gov_nasa_arc_mct_api_feed_DataProvider.java |
13 | public interface FeedAggregator {
/**
* Returns a map of data for each feed. This allows the data to be queried in batch and improves performance.
* @param feedIDs to retrieve data for
* @param timeUnit the time unit of startTime and endTime parameters.
* @param startTime the start time of the return data set.
* @param endTime the end time of the return data set.
* @return map of data for the specified feeds. Each entry in the map has data
* with a timestamp that is >= startTime and < endTime.
*/
public Map<String, List<Map<String, String>>> getData(Set<String> feedIDs, TimeUnit timeUnit, long startTime, long endTime);
} | false | mctcore_src_main_java_gov_nasa_arc_mct_api_feed_FeedAggregator.java |
14 | public interface FeedDataArchive {
/**
* This api will put data into the archive.
*
* @param feedID the feedID from which the data should be archived.
* @param timeUnit the time unit of the time stamp of each data record that is put into the
* archive.
* @param entries a map from timestamp to data record.
* @throws BufferFullException - Buffer full exception.
*/
public void putData(String feedID, TimeUnit timeUnit, Map<Long, Map<String, String>> entries) throws BufferFullException;
/**
* This api will put data into the archive.
* @param feedID the feedID from which the data should be archived.
* @param timeUnit the time unit of the time stamp of each data record that is put into the
* archive.
* @param time the timestamp of the data record.
* @param value the data record to be saved in the archive that corresponds to the time.
* @throws BufferFullException - Buffer full exception.
*/
public void putData(String feedID, TimeUnit timeUnit, long time, Map<String, String> value) throws BufferFullException;
/**
* This method accepts a set of data and will invoke the runnable once all the data has
* been persisted.
* @param value for the set of data, feedId is the key for the Map based on key of time
* @param timeUnit the time unit of the time stamp of each data record that is put into the
* archive.
* @param callback to execute when the data has been committed to the repository
* @throws BufferFullException - Buffer full exception.
*/
public void putData(Map<String,Map<Long, Map<String, String>>> value, TimeUnit timeUnit, Runnable callback) throws BufferFullException;
/**
* Reset the Feed Aggregator so that the content provided by the Feed Aggregator starts from the very beginning.
*/
public void reset();
} | false | mctcore_src_main_java_gov_nasa_arc_mct_api_feed_FeedDataArchive.java |
15 | public class OptimisticLockException extends RuntimeException {
private static final long serialVersionUID = 1L;
/**
* Creates a new instance based on an existing exception.
* @param e root exception
*/
public OptimisticLockException(Exception e) {
super(e);
}
} | false | mctcore_src_main_java_gov_nasa_arc_mct_api_persistence_OptimisticLockException.java |
16 | public interface PersistenceService {
/**
* This method is invoked when starting a set of related persistence operations in the current thread. If the underlying
* persistence implementation is a database this will likely start a transaction. This method will
* generally only be used from code that operates outside the scope of an action, for example an action
* that does some processing in the background.
*/
void startRelatedOperations();
/**
* This method is invoked when completing a set of related persistence operations. This method must
* be invoked following {@link #startRelatedOperations()} and only a single time. T
* @param save if the operation should be saved, false if the operation should not be saved.
*/
void completeRelatedOperations(boolean save);
/**
* Checks if <code>tagId</code> is used on at least one component in the database.
* @param tagId tag ID
* @return true there are components tagged with <code>tagId</code>; false, otherwise.
*/
boolean hasComponentsTaggedBy(String tagId);
/**
* Returns the component with the specified external key and component type.
* @param externalKey to use for search criteria
* @param componentType to use with external key
* @param <T> type of component
* @return instance of component with the given type or null if the component cannot be found.
*/
<T extends AbstractComponent> T getComponent(String externalKey, Class<T> componentType);
/**
* Returns the component with the specified external key and component type.
* @param externalKey to use for search criteria
* @param componentType to use with external key
* @return instance of component with the given type or null if the component cannot
* be found.
*/
AbstractComponent getComponent(String externalKey, String componentType);
} | false | mctcore_src_main_java_gov_nasa_arc_mct_api_persistence_PersistenceService.java |
17 | public interface DataBufferEnv {
public int getNumOfBufferPartitions();
public int getCurrentBufferPartition();
public long getBufferTime();
public long getBufferPartitionOverlap();
public int previousBufferPartition(int currentPartition);
public int getConcurrencyDegree();
public int getBufferWriteThreadPoolSize();
public void closeAndRestartEnvironment();
public void restartEnvironment(boolean isReadOnly);
public int nextBufferPartition();
public DataBufferEnv advanceBufferPartition();
public Object clone();
public Object cloneMetaBuffer();
public Properties getConfigProperties();
public LOS getLOS();
public void flush();
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_config_DataBufferEnv.java |
18 | public final class DiskBufferEnv implements DataBufferEnv, Cloneable {
private static final Logger LOGGER = LoggerFactory.getLogger(DiskBufferEnv.class);
private static final String META_DATABASE_PATH = "metaBuffer";
private static final String META_DATABASE_NAME = "meta";
private static enum STATE {
unInitialized, initializing, initialized;
}
private Environment dbufferEnv;
private STATE state = STATE.unInitialized;
private final Properties prop;
private volatile long bufferTimeMills;
private long evictorRecurrMills;
private File envHome;
private final int concurrency;
private final int bufferWriteThreadPoolSize;
private final int numOfBufferPartitions;
private final int currentBufferPartition;
private final long partitionOverlapMillis;
private final long metaRefreshMillis;
private TransactionConfig txnConfig;
private CursorConfig cursorConfig;
private DiskQuotaHelper diskQuotaHelper;
private static Properties loadDefaultPropertyFile() {
Properties prop = new Properties();
InputStream is = null;
try {
is = ClassLoader.getSystemResourceAsStream("properties/feed.properties");
prop.load(is);
} catch (Exception e) {
LOGGER.error("Cannot initialized DataBufferEnv properties", e);
} finally {
if (is != null) {
try {
is.close();
} catch (IOException ioe) {
// ignore exception
}
}
}
return prop;
}
public String getErrorMsg() {
return diskQuotaHelper.getErrorMsg();
}
public DiskBufferEnv(Properties prop) {
if (prop == null) {
prop = loadDefaultPropertyFile();
}
this.prop = prop;
this.currentBufferPartition = 0;
File bufferHome = new File(FilepathReplacer.substitute(getPropertyWithPrecedence(prop, "buffer.disk.loc")));
if (!bufferHome.exists()) {
bufferHome.mkdirs();
}
envHome = new File(bufferHome, META_DATABASE_PATH);
if (!envHome.exists()) {
envHome.mkdirs();
}
concurrency = Integer.parseInt(prop.getProperty("buffer.concurrency"));
evictorRecurrMills = Long.parseLong(prop.getProperty("buffer.evictor.recurrMills"));
bufferWriteThreadPoolSize = Integer.parseInt(prop.getProperty("buffer.write.threadPool.size"));
numOfBufferPartitions = Integer.parseInt(prop.getProperty("buffer.partitions"));
bufferTimeMills = Long.parseLong(prop.getProperty("buffer.time.millis"));
metaRefreshMillis = Long.parseLong(prop.getProperty("meta.buffer.refresh.millis"));
if (bufferTimeMills > numOfBufferPartitions) {
bufferTimeMills = bufferTimeMills / numOfBufferPartitions;
}
partitionOverlapMillis = Long.parseLong(prop.getProperty("buffer.partition.overlap.millis"));
diskQuotaHelper = new DiskQuotaHelper(prop, bufferHome);
this.state = STATE.initializing;
setup(false);
}
public DiskBufferEnv(Properties prop, int currentBufferPartition) { //throws Exception {
if (prop == null) {
prop = loadDefaultPropertyFile();
}
this.prop = prop;
this.currentBufferPartition = currentBufferPartition;
File bufferHome = new File(FilepathReplacer.substitute(getPropertyWithPrecedence(prop, "buffer.disk.loc")));
if (!bufferHome.exists()) {
bufferHome.mkdirs();
}
envHome = new File(bufferHome, String.valueOf(currentBufferPartition));
if (!envHome.exists()) {
envHome.mkdirs();
}
concurrency = Integer.parseInt(prop.getProperty("buffer.concurrency"));
evictorRecurrMills = Long.parseLong(prop.getProperty("buffer.evictor.recurrMills"));
bufferWriteThreadPoolSize = Integer.parseInt(prop.getProperty("buffer.write.threadPool.size"));
numOfBufferPartitions = Integer.parseInt(prop.getProperty("buffer.partitions"));
bufferTimeMills = Long.parseLong(prop.getProperty("buffer.time.millis"));
bufferTimeMills = bufferTimeMills / numOfBufferPartitions;
partitionOverlapMillis = Long.parseLong(prop.getProperty("buffer.partition.overlap.millis"));
metaRefreshMillis = Long.parseLong(prop.getProperty("meta.buffer.refresh.millis"));
diskQuotaHelper = new DiskQuotaHelper(prop, bufferHome);
this.state = STATE.initializing;
setup(false);
}
private void setup(boolean readOnly) {
assertState(STATE.initializing);
// Instantiate an environment configuration object
EnvironmentConfig envConfig = new EnvironmentConfig();
envConfig.setSharedCache(true);
String cachePercent = prop.getProperty("bdb.cache.percent");
if (cachePercent != null) {
envConfig.setCachePercent(Integer.parseInt(cachePercent));
}
// Configure the environment for the read-only state as identified by
// the readOnly parameter on this method call.
envConfig.setReadOnly(readOnly);
// If the environment is opened for write, then we want to be able to
// create the environment if it does not exist.
envConfig.setAllowCreate(true);
envConfig.setConfigParam(EnvironmentConfig.CHECKPOINTER_BYTES_INTERVAL, "40000000");
envConfig.setTransactional(false);
envConfig.setDurability(Durability.COMMIT_NO_SYNC);
envConfig.setConfigParam(EnvironmentConfig.ENV_RUN_CLEANER, Boolean.FALSE.toString());
envConfig.setConfigParam(EnvironmentConfig.ENV_IS_LOCKING, Boolean.FALSE.toString());
setupConfig();
// Instantiate the Environment. This opens it and also possibly
// creates it.
try {
dbufferEnv = new Environment(envHome, envConfig);
state = STATE.initialized;
} catch (DatabaseException de) {
LOGGER.error("DatabaseException in setup", de);
state = STATE.unInitialized;
}
}
private void setupConfig() {
txnConfig = new TransactionConfig();
txnConfig.setReadUncommitted(true);
txnConfig.setDurability(Durability.COMMIT_NO_SYNC);
cursorConfig = new CursorConfig();
cursorConfig.setReadUncommitted(true);
}
private String getPropertyWithPrecedence(Properties localProps, String key) {
String systemProp = System.getProperty(key);
return systemProp != null ? systemProp.trim() : localProps.getProperty(key, "unset").trim();
}
public Database openMetaDiskStore() throws DatabaseException {
assertState(STATE.initialized);
DatabaseConfig dbConfig = new DatabaseConfig();
dbConfig.setAllowCreate(true);
dbConfig.setSortedDuplicates(false);
dbConfig.setTransactional(false);
Database diskStore = dbufferEnv.openDatabase(null, META_DATABASE_NAME, dbConfig);
return diskStore;
}
public Database openDiskStore(String dbName, SecondaryKeyCreator... keyCreators) throws DatabaseException {
assertState(STATE.initialized);
DatabaseConfig dbConfig = new DatabaseConfig();
dbConfig.setAllowCreate(true);
dbConfig.setSortedDuplicates(false);
dbConfig.setTransactional(false);
Database diskStore = dbufferEnv.openDatabase(null, dbName, dbConfig);
int i=0;
for (SecondaryKeyCreator keyCreator : keyCreators) {
SecondaryConfig secDbConfig = new SecondaryConfig();
secDbConfig.setKeyCreator(keyCreator);
secDbConfig.setAllowCreate(true);
secDbConfig.setSortedDuplicates(true);
secDbConfig.setTransactional(false);
// Perform the actual open
String secDbName = dbName + i;
dbufferEnv.openSecondaryDatabase(null, secDbName, diskStore, secDbConfig);
i++;
}
return diskStore;
}
public boolean isDiskBufferFull() {
return diskQuotaHelper.isDiskBufferFull();
}
public Transaction beginTransaction() throws DatabaseException {
assertState(STATE.initialized);
TransactionConfig txnConfig = new TransactionConfig();
txnConfig.setReadUncommitted(true);
return dbufferEnv.beginTransaction(null, txnConfig);
}
public SecondaryCursor openSecondaryCursor(Transaction txn, Database database, int index) throws DatabaseException {
List<SecondaryDatabase> secDbs = database.getSecondaryDatabases();
assert secDbs.size() == 2;
SecondaryDatabase secDb = secDbs.get(index);
SecondaryCursor mySecCursor = secDb.openCursor(txn, cursorConfig);
return mySecCursor;
}
public void removeEnvironment() throws DatabaseException {
dbufferEnv.cleanLog();
dbufferEnv.close();
deleteDatabaseFile(currentBufferPartition);
this.state = STATE.unInitialized;
}
public void closeEnvironment() throws DatabaseException {
dbufferEnv.cleanLog();
dbufferEnv.close();
this.state = STATE.unInitialized;
}
public void removeAndCloseAllDiskStores() throws DatabaseException {
List<String> dbNames = dbufferEnv.getDatabaseNames();
for (String dbName : dbNames) {
try {
dbufferEnv.removeDatabase(null, dbName);
} catch (DatabaseException de) {
continue;
}
}
closeEnvironment();
}
public void closeDatabase(Database database) throws DatabaseException {
if (database == null) { return; }
List<SecondaryDatabase> secDbs = database.getSecondaryDatabases();
for (Database secDb : secDbs) {
secDb.close();
}
database.close();
}
public void closeAndRestartEnvironment() throws DatabaseException {
boolean isReadOnly = dbufferEnv.getConfig().getReadOnly();
removeAndCloseAllDiskStores();
restartEnvironment(isReadOnly);
}
public void restartEnvironment(boolean isReadOnly) throws DatabaseException {
state = STATE.initializing;
setup(isReadOnly);
}
public int getConcurrencyDegree() {
return concurrency;
}
public int getBufferWriteThreadPoolSize() {
return bufferWriteThreadPoolSize;
}
public long getBufferTime() {
return bufferTimeMills;
}
public long getEvictorRecurr() {
return evictorRecurrMills;
}
public int getNumOfBufferPartitions() {
return numOfBufferPartitions;
}
public void setBufferTime(long bufferTimeMills) {
this.bufferTimeMills = bufferTimeMills;
}
public long getBufferPartitionOverlap() {
return partitionOverlapMillis;
}
public int getCurrentBufferPartition() {
return currentBufferPartition;
}
public DataBufferEnv advanceBufferPartition() {
int nextBufferPartition = nextBufferPartition();
deleteDatabaseFile(nextBufferPartition);
DiskBufferEnv newBufferEnv = new DiskBufferEnv(prop, (this.currentBufferPartition + 1) % numOfBufferPartitions);
return newBufferEnv;
}
private void deleteDatabaseFile(int partitionNo) {
File parentDir = this.envHome.getParentFile();
File nextBufferPartitionDir = new File(parentDir, String.valueOf(partitionNo));
if (nextBufferPartitionDir.exists()) {
if (nextBufferPartitionDir.isDirectory()) {
File[] files = nextBufferPartitionDir.listFiles();
for (File f: files) {
f.delete();
}
}
nextBufferPartitionDir.delete();
}
}
public int nextBufferPartition() {
return (this.currentBufferPartition+1)%numOfBufferPartitions;
}
public int previousBufferPartition(int currentPartition) {
int i = currentPartition;
if (i == 0) {
i = this.numOfBufferPartitions-1;
} else {
i--;
}
return i;
}
public long getMetaRefresh() {
return this.metaRefreshMillis;
}
@Override
public Object clone() {
return new DiskBufferEnv(prop, 0);
}
@Override
public Object cloneMetaBuffer() {
return new DiskBufferEnv(prop);
}
private void assertState(STATE expectedState) {
assert this.state == expectedState;
}
@Override
public Properties getConfigProperties() {
return this.prop;
}
public void flush() {
this.dbufferEnv.sync();
}
@Override
public LOS getLOS() {
return LOS.medium;
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_config_DiskBufferEnv.java |
19 | private static enum STATE {
unInitialized, initializing, initialized;
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_config_DiskBufferEnv.java |
20 | class DiskQuotaHelper {
private static final Logger LOGGER = LoggerFactory.getLogger(DiskQuotaHelper.class);
private double usableSpaceAvailableInPercentage = 0;
private double freeSpaceAvailableInPercentage = 0;
private double totalSpaceInMB = 0;
private double freeSpaceInMB = 0;
private double usableSpaceInMB = 0;
private int bufferMinDiskSpaceAvailableInMB = 10;
private int bufferMinDiskSpaceAvailableInPercentage = 1;
public String DISK_SPACE_PERCENTAGE_ERROR_MSG = "bufferMinDiskSpaceAvailableInMB = "
+ bufferMinDiskSpaceAvailableInMB + " bufferMinDiskSpaceAvailableInPercentage= " + bufferMinDiskSpaceAvailableInPercentage + "%";
public DiskQuotaHelper(Properties prop, File bufferHome) {
bufferMinDiskSpaceAvailableInMB = Integer.parseInt(prop.getProperty("buffer.min.disk.space.megabytes"));
bufferMinDiskSpaceAvailableInPercentage = Integer.parseInt(prop.getProperty("buffer.min.percentage.disk.space"));
DISK_SPACE_PERCENTAGE_ERROR_MSG = "Disk space for MCT Buffer is <= "
+ bufferMinDiskSpaceAvailableInMB + " MB or Total free disk space available is <= " + bufferMinDiskSpaceAvailableInPercentage + "%";
printAvailableDiskSpace("bufferHome from properties", bufferHome);
}
private void printAvailableDiskSpace(String fileNameDesignation, File filePartition) {
// NOTE: Usable Disk Space available in JVM is always less than Free Disk Space
LOGGER.info("*** Disk Partition '" + fileNameDesignation + "' at path:"+ filePartition.getAbsolutePath()+" ***");
// Prints total disk space in bytes for volume partition specified by file abstract pathname.
long totalSpace = filePartition.getTotalSpace();
totalSpaceInMB = totalSpace /1024 /1024;
// Prints an accurate estimate of the total free (and available to this JVM) bytes
// on the volume. This method may return the same result as 'getFreeSpace()' on some platforms.
long usableSpace = filePartition.getUsableSpace();
usableSpaceInMB = usableSpace /1024 /1024;
// Prints the total free unallocated bytes for the volume in bytes.
long freeSpace = filePartition.getFreeSpace();
freeSpaceInMB = freeSpace /1024 /1024;
LOGGER.info("MCT property specifying Min Disk Space Available (in MB): " + bufferMinDiskSpaceAvailableInMB );
LOGGER.info("MCT property specifying Min Disk Space Available (in Percentage): " + bufferMinDiskSpaceAvailableInPercentage );
LOGGER.info("total Space In MB: " + totalSpaceInMB + " MB");
LOGGER.info("usable Space In MB: " + usableSpaceInMB + " MB");
LOGGER.info("free Space In MB: " + freeSpaceInMB + " MB");
if (totalSpaceInMB > 0) {
usableSpaceAvailableInPercentage = (usableSpaceInMB / totalSpaceInMB) * 100;
freeSpaceAvailableInPercentage = (freeSpaceInMB / totalSpaceInMB) * 100;
LOGGER.info("Calculated Usable Space Available (in Percentage): " + usableSpaceAvailableInPercentage + " %");
LOGGER.info("Calculated Free Space Available (in Percentage): " + freeSpaceAvailableInPercentage + " %");
} else {
LOGGER.info("filePartition.getTotalSpace() reported: " + totalSpace);
}
String m = String.format("The disc is full when: " +
"\n usableSpaceAvailableInPercentage (%.1f) <= bufferMinDiskSpaceAvailableInPercentage (%d), or \n " +
"usableSpaceInMB (%.1f) <= bufferMinDiskSpaceAvailableInMB (%d), or \n " +
"freeSpaceInMB (%.1f) <= bufferMinDiskSpaceAvailableInMB (%d) \n" +
"***",
usableSpaceAvailableInPercentage, bufferMinDiskSpaceAvailableInPercentage,
usableSpaceInMB, bufferMinDiskSpaceAvailableInMB,
freeSpaceInMB, bufferMinDiskSpaceAvailableInMB);
LOGGER.info(m);
}
public String getErrorMsg() {
return ("<HTML>" + DISK_SPACE_PERCENTAGE_ERROR_MSG
+ "<BR>Total Disk Space (in MB): " + totalSpaceInMB
+ "<BR>JVM Usable Disk Space Available (in MB): " + usableSpaceInMB
+ "<BR>System Free Disk Space Availble (in MB): " + freeSpaceInMB
+ "<BR>Percentage JVM Usable Disk Space Available: " + usableSpaceAvailableInPercentage
+ "%<BR>Percentage System Free Disk Space Available: " + freeSpaceAvailableInPercentage + "%</HTML>");
}
public boolean isDiskBufferFull() {
return (usableSpaceAvailableInPercentage <= bufferMinDiskSpaceAvailableInPercentage) ||
(usableSpaceInMB <= bufferMinDiskSpaceAvailableInMB) ||
(freeSpaceInMB <= bufferMinDiskSpaceAvailableInMB);
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_config_DiskQuotaHelper.java |
21 | public class FastDiskBufferEnv implements DataBufferEnv, Cloneable {
private static final Logger LOGGER = LoggerFactory.getLogger(FastDiskBufferEnv.class);
private static final String META_DATABASE_PATH = "metaBuffer";
private static final String META_DATABASE_NAME = "meta";
private static enum STATE {
unInitialized, initializing, initialized;
}
private Environment dbufferEnv;
private STATE state = STATE.unInitialized;
private final Properties prop;
private volatile long bufferTimeMills;
private long evictorRecurrMills;
private File envHome;
private final int concurrency;
private final int bufferWriteThreadPoolSize;
private final int numOfBufferPartitions;
private final int currentBufferPartition;
private final long partitionOverlapMillis;
private final long metaRefreshMillis;
private TransactionConfig txnConfig;
private CursorConfig cursorConfig;
private List<EntityStore> openStores = new LinkedList<EntityStore>();
private DiskQuotaHelper diskQuotaHelper;
private static Properties loadDefaultPropertyFile() {
Properties prop = new Properties();
InputStream is = null;
try {
is = ClassLoader.getSystemResourceAsStream("properties/feed.properties");
prop.load(is);
} catch (Exception e) {
LOGGER.error("Cannot initialized DataBufferEnv properties", e);
} finally {
if (is != null) {
try {
is.close();
} catch (IOException ioe) {
// ignore exception
}
}
}
return prop;
}
public FastDiskBufferEnv(Properties prop) {
if (prop == null) {
prop = loadDefaultPropertyFile();
}
this.prop = prop;
this.currentBufferPartition = 0;
File bufferHome = new File(FilepathReplacer.substitute(getPropertyWithPrecedence(prop, "buffer.disk.loc")));
if (!bufferHome.exists()) {
bufferHome.mkdirs();
}
envHome = new File(bufferHome, META_DATABASE_PATH);
if (!envHome.exists()) {
envHome.mkdirs();
}
concurrency = Integer.parseInt(prop.getProperty("buffer.concurrency"));
evictorRecurrMills = Long.parseLong(prop.getProperty("buffer.evictor.recurrMills"));
bufferWriteThreadPoolSize = Integer.parseInt(prop.getProperty("buffer.write.threadPool.size"));
numOfBufferPartitions = Integer.parseInt(prop.getProperty("buffer.partitions"));
bufferTimeMills = Long.parseLong(prop.getProperty("buffer.time.millis"));
metaRefreshMillis = Long.parseLong(prop.getProperty("meta.buffer.refresh.millis"));
if (bufferTimeMills > numOfBufferPartitions) {
bufferTimeMills = bufferTimeMills / numOfBufferPartitions;
}
partitionOverlapMillis = Long.parseLong(prop.getProperty("buffer.partition.overlap.millis"));
diskQuotaHelper = new DiskQuotaHelper(prop, bufferHome);
this.state = STATE.initializing;
setup(false);
}
public FastDiskBufferEnv(Properties prop, int currentBufferPartition) {
if (prop == null) {
prop = loadDefaultPropertyFile();
}
this.prop = prop;
this.currentBufferPartition = currentBufferPartition;
File bufferHome = new File(FilepathReplacer.substitute(getPropertyWithPrecedence(prop, "buffer.disk.loc")));
if (!bufferHome.exists()) {
bufferHome.mkdirs();
}
envHome = new File(bufferHome, String.valueOf(currentBufferPartition));
if (!envHome.exists()) {
envHome.mkdirs();
}
concurrency = Integer.parseInt(prop.getProperty("buffer.concurrency"));
evictorRecurrMills = Long.parseLong(prop.getProperty("buffer.evictor.recurrMills"));
bufferWriteThreadPoolSize = Integer.parseInt(prop.getProperty("buffer.write.threadPool.size"));
numOfBufferPartitions = Integer.parseInt(prop.getProperty("buffer.partitions"));
bufferTimeMills = Long.parseLong(prop.getProperty("buffer.time.millis"));
bufferTimeMills = bufferTimeMills / numOfBufferPartitions;
partitionOverlapMillis = Long.parseLong(prop.getProperty("buffer.partition.overlap.millis"));
metaRefreshMillis = Long.parseLong(prop.getProperty("meta.buffer.refresh.millis"));
diskQuotaHelper = new DiskQuotaHelper(prop, bufferHome);
this.state = STATE.initializing;
setup(false);
}
private void setup(boolean readOnly) {
assertState(STATE.initializing);
// Instantiate an environment configuration object
EnvironmentConfig envConfig = new EnvironmentConfig();
envConfig.setSharedCache(true);
String cachePercent = prop.getProperty("bdb.cache.percent");
if (cachePercent != null) {
envConfig.setCachePercent(Integer.parseInt(cachePercent));
}
// Configure the environment for the read-only state as identified by
// the readOnly parameter on this method call.
envConfig.setReadOnly(readOnly);
// If the environment is opened for write, then we want to be able to
// create the environment if it does not exist.
envConfig.setAllowCreate(!readOnly);
envConfig.setConfigParam(EnvironmentConfig.CHECKPOINTER_BYTES_INTERVAL, "40000000");
envConfig.setTransactional(false);
envConfig.setDurability(Durability.COMMIT_NO_SYNC);
envConfig.setConfigParam(EnvironmentConfig.ENV_RUN_CLEANER, Boolean.FALSE.toString());
envConfig.setConfigParam(EnvironmentConfig.ENV_IS_LOCKING, Boolean.FALSE.toString());
setupConfig();
// Instantiate the Environment. This opens it and also possibly
// creates it.
try {
dbufferEnv = new Environment(envHome, envConfig);
state = STATE.initialized;
} catch (DatabaseException de) {
LOGGER.error("DatabaseException in setup", de);
state = STATE.unInitialized;
}
}
private void setupConfig() {
txnConfig = new TransactionConfig();
txnConfig.setReadUncommitted(true);
txnConfig.setDurability(Durability.COMMIT_NO_SYNC);
cursorConfig = new CursorConfig();
cursorConfig.setReadUncommitted(true);
}
public boolean isDiskBufferFull() {
return diskQuotaHelper.isDiskBufferFull();
}
public String getErrorMsg() {
return diskQuotaHelper.getErrorMsg();
}
private String getPropertyWithPrecedence(Properties localProps, String key) {
String systemProp = System.getProperty(key);
return systemProp != null ? systemProp.trim() : localProps.getProperty(key, "unset").trim();
}
public EntityStore openMetaDiskStore() throws DatabaseException {
assertState(STATE.initialized);
StoreConfig storeConfig = new StoreConfig();
storeConfig.setAllowCreate(true);
storeConfig.setDeferredWrite(true);
storeConfig.setTransactional(false);
ClassLoader originalClassloader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
return new EntityStore(dbufferEnv, META_DATABASE_NAME, storeConfig);
} finally {
Thread.currentThread().setContextClassLoader(originalClassloader);
}
}
public EntityStore openDiskStore(String dbName) throws DatabaseException {
assertState(STATE.initialized);
StoreConfig storeConfig = new StoreConfig();
storeConfig.setAllowCreate(true);
storeConfig.setDeferredWrite(true);
storeConfig.setTransactional(false);
EntityStore store = new EntityStore(dbufferEnv, dbName, storeConfig);
openStores.add(store);
return store;
}
public void removeEnvironment() throws DatabaseException {
dbufferEnv.cleanLog();
dbufferEnv.close();
deleteDatabaseFile(currentBufferPartition);
this.state = STATE.unInitialized;
}
public void closeEnvironment() throws DatabaseException {
dbufferEnv.cleanLog();
dbufferEnv.close();
this.state = STATE.unInitialized;
}
public void removeAndCloseAllDiskStores() throws DatabaseException {
for (EntityStore store: openStores) {
store.close();
}
openStores.clear();
removeEnvironment();
}
public void closeDatabase(EntityStore store) throws DatabaseException {
if (store == null) { return; }
store.close();
openStores.remove(store);
}
public void closeAndRestartEnvironment() throws DatabaseException {
boolean isReadOnly = dbufferEnv.getConfig().getReadOnly();
removeAndCloseAllDiskStores();
restartEnvironment(isReadOnly);
}
public void restartEnvironment(boolean isReadOnly) throws DatabaseException {
state = STATE.initializing;
setup(isReadOnly);
}
public int getConcurrencyDegree() {
return concurrency;
}
public int getBufferWriteThreadPoolSize() {
return bufferWriteThreadPoolSize;
}
public long getBufferTime() {
return bufferTimeMills;
}
public long getEvictorRecurr() {
return evictorRecurrMills;
}
public int getNumOfBufferPartitions() {
return numOfBufferPartitions;
}
public void setBufferTime(long bufferTimeMills) {
this.bufferTimeMills = bufferTimeMills;
}
public long getBufferPartitionOverlap() {
return partitionOverlapMillis;
}
public int getCurrentBufferPartition() {
return currentBufferPartition;
}
public DataBufferEnv advanceBufferPartition() {
int nextBufferPartition = nextBufferPartition();
deleteDatabaseFile(nextBufferPartition);
FastDiskBufferEnv newBufferEnv = new FastDiskBufferEnv(prop, (this.currentBufferPartition + 1) % numOfBufferPartitions);
return newBufferEnv;
}
private void deleteDatabaseFile(int partitionNo) {
File parentDir = this.envHome.getParentFile();
File nextBufferPartitionDir = new File(parentDir, String.valueOf(partitionNo));
if (nextBufferPartitionDir.exists()) {
if (nextBufferPartitionDir.isDirectory()) {
File[] files = nextBufferPartitionDir.listFiles();
for (File f: files) {
f.delete();
}
}
nextBufferPartitionDir.delete();
}
}
public int nextBufferPartition() {
return (this.currentBufferPartition+1)%numOfBufferPartitions;
}
public int previousBufferPartition(int currentPartition) {
int i = currentPartition;
if (i == 0) {
i = this.numOfBufferPartitions-1;
} else {
i--;
}
return i;
}
public long getMetaRefresh() {
return this.metaRefreshMillis;
}
@Override
public Object clone() {
return new FastDiskBufferEnv(prop, 0);
}
@Override
public Object cloneMetaBuffer() {
return new FastDiskBufferEnv(prop);
}
private void assertState(STATE expectedState) {
assert this.state == expectedState;
}
@Override
public Properties getConfigProperties() {
return this.prop;
}
public void flush() {
this.dbufferEnv.sync();
}
@Override
public LOS getLOS() {
return LOS.medium;
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_config_FastDiskBufferEnv.java |
22 | private static enum STATE {
unInitialized, initializing, initialized;
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_config_FastDiskBufferEnv.java |
23 | public class MemoryBufferEnv implements DataBufferEnv, Cloneable {
private static final Logger LOGGER = LoggerFactory.getLogger(MemoryBufferEnv.class);
private static Properties loadDefaultPropertyFile() {
Properties prop = new Properties();
InputStream is = null;
try {
is = ClassLoader.getSystemResourceAsStream("properties/feed.properties");
prop.load(is);
} catch (Exception e) {
LOGGER.error("Cannot initialized DataBufferEnv properties", e);
} finally {
if (is != null) {
try {
is.close();
} catch (IOException ioe) {
// ignore exception
}
}
}
return prop;
}
private final Properties prop;
private final long bufferTimeMillis;
private final int numOfBufferPartitions;
private int currentBufferPartition;
public MemoryBufferEnv(Properties prop) {
if (prop == null) {
prop = loadDefaultPropertyFile();
}
this.prop = prop;
numOfBufferPartitions = Integer.parseInt(prop.getProperty("memory.buffer.partition"));
long bufferTime = Long.parseLong(prop.getProperty("memory.buffer.time.millis"));
if (bufferTime > numOfBufferPartitions) {
bufferTimeMillis = bufferTime / numOfBufferPartitions;
} else {
bufferTimeMillis = bufferTime;
}
this.currentBufferPartition = 0;
}
public MemoryBufferEnv(Properties prop, int currentBufferPartition) {
if (prop == null) {
prop = loadDefaultPropertyFile();
}
this.prop = prop;
this.currentBufferPartition = currentBufferPartition;
numOfBufferPartitions = Integer.parseInt(prop.getProperty("memory.buffer.partition"));
bufferTimeMillis = Long.parseLong(prop.getProperty("memory.buffer.time.millis"));
}
@Override
public final long getBufferPartitionOverlap() {
return 0;
}
@Override
public long getBufferTime() {
return bufferTimeMillis;
}
@Override
public int getCurrentBufferPartition() {
return currentBufferPartition;
}
@Override
public int getNumOfBufferPartitions() {
return this.numOfBufferPartitions;
}
@Override
public int nextBufferPartition() {
return (this.currentBufferPartition+1)%numOfBufferPartitions;
}
@Override
public int previousBufferPartition(int currentPartition) {
int i = currentPartition;
if (i == 0) {
i = this.numOfBufferPartitions-1;
} else {
i--;
}
return i;
}
@Override
public DataBufferEnv advanceBufferPartition() {
int nextBufferPartition = nextBufferPartition();
MemoryBufferEnv newBufferEnv = new MemoryBufferEnv(prop, nextBufferPartition);
return newBufferEnv;
}
@Override
public void closeAndRestartEnvironment() {
this.currentBufferPartition = 0;
}
@Override
public int getBufferWriteThreadPoolSize() {
return 1;
}
@Override
public int getConcurrencyDegree() {
return 1;
}
@Override
public void restartEnvironment(boolean isReadOnly) {
this.currentBufferPartition = 0;
}
@Override
public Object clone() {
return new MemoryBufferEnv(prop);
}
@Override
public Object cloneMetaBuffer() {
return new MemoryBufferEnv(prop);
}
@Override
public Properties getConfigProperties() {
return this.prop;
}
@Override
public LOS getLOS() {
return LOS.fast;
}
@Override
public void flush() {
// TODO Auto-generated method stub
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_config_MemoryBufferEnv.java |
24 | public class NetworkBufferConstants {
public final static String HTTP_PROTOCOL = "http://";
public final static char DELIMITER = '/';
public final static char PORT_DELIMITER = ':';
public final static String GET_DATA_COMMAND = "requestData";
public final static String FEED_ID_PARAMETER = "feeds";
public final static String START_TIME_PARAMETER = "startTime";
public final static String END_TIME_PARAMETER = "endTime";
public final static char PARAMETER_DELIMITER = ',';
public final static String constructURL(String host, int port, String command) {
StringBuilder sb = new StringBuilder(HTTP_PROTOCOL);
sb.append(host);
sb.append(PORT_DELIMITER);
sb.append(port);
sb.append(DELIMITER);
sb.append(command);
sb.append(DELIMITER);
return sb.toString();
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_config_NetworkBufferConstants.java |
25 | public class NetworkBufferEnv implements DataBufferEnv, Cloneable {
private static final Logger LOGGER = LoggerFactory.getLogger(NetworkBufferEnv.class);
private static Properties loadDefaultPropertyFile() {
Properties prop = new Properties();
InputStream is = null;
try {
is = ClassLoader.getSystemResourceAsStream("properties/feed.properties");
prop.load(is);
} catch (Exception e) {
LOGGER.error("Cannot initialized DataBufferEnv properties", e);
} finally {
if (is != null) {
try {
is.close();
} catch (IOException ioe) {
// ignore exception
}
}
}
return prop;
}
private final Properties prop;
private final int numOfBufferPartitions;
private int currentBufferPartition;
private final String networkBufferServerHost;
private final int networkBufferServerPort;
public NetworkBufferEnv(Properties prop) {
if (prop == null) {
prop = loadDefaultPropertyFile();
}
this.prop = prop;
numOfBufferPartitions = Integer.parseInt(prop.getProperty("network.buffer.partition"));
networkBufferServerHost = prop.getProperty("network.buffer.server.host");
networkBufferServerPort = Integer.parseInt(prop.getProperty("network.buffer.server.port"));
this.currentBufferPartition = 0;
}
public NetworkBufferEnv(Properties prop, int currentBufferPartition) {
if (prop == null) {
prop = loadDefaultPropertyFile();
}
this.prop = prop;
this.currentBufferPartition = currentBufferPartition;
numOfBufferPartitions = Integer.parseInt(prop.getProperty("network.buffer.partition"));
networkBufferServerHost = prop.getProperty("network.buffer.server.host");
networkBufferServerPort = Integer.parseInt(prop.getProperty("network.buffer.server.port"));
}
@Override
public final long getBufferPartitionOverlap() {
return 0;
}
@Override
public long getBufferTime() {
return -1;
}
@Override
public int getCurrentBufferPartition() {
return currentBufferPartition;
}
@Override
public int getNumOfBufferPartitions() {
return this.numOfBufferPartitions;
}
@Override
public int nextBufferPartition() {
return (this.currentBufferPartition+1)%numOfBufferPartitions;
}
@Override
public int previousBufferPartition(int currentPartition) {
int i = currentPartition;
if (i == 0) {
i = this.numOfBufferPartitions-1;
} else {
i--;
}
return i;
}
@Override
public DataBufferEnv advanceBufferPartition() {
int nextBufferPartition = nextBufferPartition();
NetworkBufferEnv newBufferEnv = new NetworkBufferEnv(prop, nextBufferPartition);
return newBufferEnv;
}
@Override
public void closeAndRestartEnvironment() {
this.currentBufferPartition = 0;
}
@Override
public int getBufferWriteThreadPoolSize() {
return 1;
}
@Override
public int getConcurrencyDegree() {
return 1;
}
@Override
public void restartEnvironment(boolean isReadOnly) {
this.currentBufferPartition = 0;
}
@Override
public Object clone() {
return new NetworkBufferEnv(prop);
}
@Override
public Object cloneMetaBuffer() {
return new NetworkBufferEnv(prop);
}
public String getNetworkBufferServerHost() {
return networkBufferServerHost;
}
public int getNetworkBufferServerPort() {
return networkBufferServerPort;
}
@Override
public Properties getConfigProperties() {
return this.prop;
}
@Override
public LOS getLOS() {
return LOS.medium;
}
@Override
public void flush() {
// TODO Auto-generated method stub
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_config_NetworkBufferEnv.java |
26 | final class DataValue {
private final Map<String, String> data;
DataValue(Map<String, String> data) {
this.data = data;
}
Map<String, String> getData() {
return data;
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_DataValue.java |
27 | class DataValueTupleBinding extends TupleBinding<DataValue> {
@Override
public void objectToEntry(DataValue dv, TupleOutput to) {
Map<String, String> data = dv.getData();
for (String key: data.keySet()) {
String value = data.get(key);
to.writeString(key);
to.writeString(value);
}
}
@Override
public DataValue entryToObject(TupleInput ti) {
Map<String, String> data = new HashMap<String, String>();
while (ti.available() > 0) {
String key = ti.readString();
String value = ti.readString();
data.put(key, value);
}
return new DataValue(data);
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_DataValueTupleBinding.java |
28 | public class FastDiskDataBufferHelper implements DataBufferHelper {
public final PartitionDataBuffer newPartitionBuffer(int partitionNo) {
return new PartitionFastDiskBuffer(partitionNo);
}
public final PartitionDataBuffer newPartitionBuffer(DataBufferEnv env) {
assert env instanceof FastDiskBufferEnv;
return new PartitionFastDiskBuffer((FastDiskBufferEnv)env);
}
@Override
public MetaDataBuffer newMetaDataBuffer(DataBufferEnv env) {
if (env == null) {
return new MetaDiskBuffer();
}
assert env instanceof FastDiskBufferEnv;
return new MetaDiskBuffer((FastDiskBufferEnv)env);
}
@Override
public DataBufferEnv newMetaDataBufferEnv(Properties prop) {
return new FastDiskBufferEnv(prop);
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_FastDiskDataBufferHelper.java |
29 | public class FeedIDKeyCreator implements SecondaryKeyCreator {
private TupleBinding<?> keyBinding;
FeedIDKeyCreator(TupleBinding<?> keyBinding) {
this.keyBinding = keyBinding;
}
@Override
public boolean createSecondaryKey(SecondaryDatabase secDb, DatabaseEntry keyEntry, DatabaseEntry valueEntry,
DatabaseEntry resultEntry) throws DatabaseException {
KeyValue kv = KeyValue.class.cast(keyBinding.entryToObject(keyEntry));
String feedID = kv.getFeedID();
TupleOutput to = new TupleOutput();
to.writeString(feedID);
resultEntry.setData(to.getBufferBytes());
return true;
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_FeedIDKeyCreator.java |
30 | final class KeyValue {
private final String feedID;
private final long timestamp;
KeyValue(String feedID, long timestamp) {
this.feedID = feedID;
this.timestamp = timestamp;
}
String getFeedID() {
return feedID;
}
long getTimestamp() {
return timestamp;
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_KeyValue.java |
31 | class KeyValueTupleBinding extends TupleBinding<KeyValue> {
@Override
public void objectToEntry(KeyValue kv, TupleOutput to) {
to.writeLong(kv.getTimestamp());
to.writeString(kv.getFeedID());
}
@Override
public KeyValue entryToObject(TupleInput ti) {
long timestamp = ti.readLong();
String feedID = ti.readString();
return new KeyValue(feedID, timestamp);
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_KeyValueTupleBinding.java |
32 | public class MetaDiskBuffer extends MetaDataBuffer {
private static final Logger LOGGER = LoggerFactory.getLogger(MetaDiskBuffer.class);
private FastDiskBufferEnv metaEnv;
private EntityStore metaDatabase;
private final Timer updateTimer;
public MetaDiskBuffer() {
this(new FastDiskBufferEnv(null));
}
public MetaDiskBuffer(FastDiskBufferEnv env) {
super(env);
metaEnv = env;
metaDatabase = metaEnv.openMetaDiskStore();
loadAllPartitionsInformation();
long metaRefreshTime = metaEnv.getMetaRefresh();
if (metaRefreshTime == -1) {
updateTimer = null;
} else {
updateTimer = new Timer("Meta Data Buffer Update timer");
updateTimer.schedule(new TimerTask() {
@Override
public void run() {
for (int i = 0; i < partitionMetaDatas.length; i++) {
if (partitionMetaDatas[i] != null) {
writePartitionMetaData(i);
}
}
}
}, metaRefreshTime, metaRefreshTime);
}
}
private PrimaryIndex<Integer, PartitionMetaData> getMetaStoreIndex() {
ClassLoader originalClassloader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
return metaDatabase.getPrimaryIndex(Integer.class, PartitionMetaData.class);
} finally {
Thread.currentThread().setContextClassLoader(originalClassloader);
}
}
private void loadAllPartitionsInformation() {
try {
PrimaryIndex<Integer, PartitionMetaData> pi = getMetaStoreIndex();
if (pi.count() == 0) {
writeCurrentBufferPartition(0);
return;
}
EntityCursor<PartitionMetaData> piCursor = pi.entities();
try {
for (PartitionMetaData pObj : piCursor) {
partitionMetaDatas[pObj.getPartitionId()] = pObj;
if (pObj.isCurrentPartition()) {
this.currentPartition = pObj.getPartitionId();
}
}
} finally {
if (piCursor != null) {
piCursor.close();
}
}
} catch (Exception e) {
LOGGER.error("Exception in loadAllPartitionInformation", e);
}
}
public PartitionMetaData removePartitionMetaData(int bufferPartition) {
PartitionMetaData pObj = super.removePartitionMetaData(bufferPartition);
if (pObj == null) { return null; }
try {
getMetaStoreIndex().delete(pObj.getPartitionId());
} catch (Exception e) {
LOGGER.error("Exception in getData", e);
} finally {
metaEnv.flush();
LOGGER.info("Removing partition {} timestamp", bufferPartition);
}
return pObj;
}
public Set<String> resetPartitionMetaData(int bufferPartition) {
Set<String> rowoverFeedIDs = super.resetPartitionMetaData(bufferPartition);
PartitionMetaData pObj = getPartitionMetaData(bufferPartition);
if (pObj != null) {
try {
getMetaStoreIndex().putNoReturn(pObj);
} catch (Exception e) {
LOGGER.error("Exception in getData", e);
} finally {
metaEnv.flush();
LOGGER.info("Removing partition {} timestamp", bufferPartition);
}
}
return rowoverFeedIDs;
}
@Override
public void writePartitionMetaData(int bufferPartition) {
PartitionMetaData pObj = getPartitionMetaData(bufferPartition);
if (pObj == null) {
return;
}
try {
getMetaStoreIndex().putNoReturn(pObj);
} catch (Exception e) {
LOGGER.error("Exception in getData", e);
} finally {
metaEnv.flush();
LOGGER.debug("Putting start time and end time of partition {}", bufferPartition);
}
}
@Override
public void writeCurrentBufferPartition(int newCurrentBufferPartition) {
PartitionMetaData existingPartitionMetaData = getPartitionMetaData(this.currentPartition);
PartitionMetaData newPartitionMetaData = getPartitionMetaData(newCurrentBufferPartition);
if (existingPartitionMetaData != null) {
existingPartitionMetaData.setCurrentPartition(false);
}
if (newPartitionMetaData == null) {
newPartitionMetaData = new PartitionMetaData(newCurrentBufferPartition);
synchronized(this) {
this.partitionMetaDatas[newCurrentBufferPartition] = newPartitionMetaData;
}
}
newPartitionMetaData.setCurrentPartition(true);
boolean failed = false;
try {
if (existingPartitionMetaData != null) {
getMetaStoreIndex().putNoReturn(existingPartitionMetaData);
}
getMetaStoreIndex().putNoReturn(newPartitionMetaData);
} catch (Exception e) {
LOGGER.error("Exception in getData", e);
failed = true;
} finally {
if (!failed) {
metaEnv.flush();
this.currentPartition = newCurrentBufferPartition;
LOGGER.info("moving to partition {}", newCurrentBufferPartition);
}
}
}
public void close() {
metaEnv.closeDatabase(metaDatabase);
super.close();
}
public void closeDatabase() {
metaEnv.closeDatabase(metaDatabase);
super.closeDatabase();
}
public void restart() {
int numOfBufferPartitions = metaEnv.getNumOfBufferPartitions();
for (int i=0; i<numOfBufferPartitions; i++) {
removePartitionMetaData(i);
}
super.restart();
writeCurrentBufferPartition(0);
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_MetaDiskBuffer.java |
33 | updateTimer.schedule(new TimerTask() {
@Override
public void run() {
for (int i = 0; i < partitionMetaDatas.length; i++) {
if (partitionMetaDatas[i] != null) {
writePartitionMetaData(i);
}
}
}
}, metaRefreshTime, metaRefreshTime); | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_MetaDiskBuffer.java |
34 | class PartitionDataValueTupleBinding extends TupleBinding<PartitionTimestamps> {
@Override
public void objectToEntry(PartitionTimestamps dv, TupleOutput to) {
to.writeLong(dv.getStartTimestamp());
to.writeLong(dv.getEndTimestamp());
}
@Override
public PartitionTimestamps entryToObject(TupleInput ti) {
long startTimestamp;
long endTimestamp;
startTimestamp = ti.readLong();
endTimestamp = ti.readLong();
return new PartitionTimestamps(startTimestamp, endTimestamp);
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_PartitionDataValueTupleBinding.java |
35 | public class PartitionFastDiskBuffer implements PartitionDataBuffer {
private static final Logger LOGGER = LoggerFactory.getLogger(PartitionFastDiskBuffer.class);
private static final Logger READ_PERF_LOGGER = LoggerFactory
.getLogger("gov.nasa.arc.mct.performance.fastDisk.partitionbuffer.read");
private static final Logger WRITE_PERF_LOGGER = LoggerFactory
.getLogger("gov.nasa.arc.mct.performance.fastDisk.partitionbuffer.write");
private static final class TimeStampComparator implements Comparator<Long>, Serializable {
private static final long serialVersionUID = -665810351953536404L;
@Override
public int compare(Long o1, Long o2) {
return o1.compareTo(o2);
}
}
private static final Comparator<Long> TIMESTAMP_COMPARATOR = new TimeStampComparator();
private final EntityStore[] databases;
private final FastDiskBufferEnv env;
/**
* Mask value for indexing into segments. The upper bits of a key's hash
* code are used to choose the segment.
*/
private final int segmentMask;
/**
* Shift value for indexing within segments.
*/
private final int segmentShift;
private static final ThreadFactory tf = new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread t = Executors.defaultThreadFactory().newThread(r);
t.setContextClassLoader(getClass().getClassLoader());
return t;
}
};
private static final ExecutorService writeThreads = new ThreadPoolExecutor(4, 4,
10L, TimeUnit.SECONDS,
new LinkedBlockingQueue<Runnable>(),
tf);
private static final ExecutorService readThreads = new ThreadPoolExecutor(0, 10,
10L, TimeUnit.SECONDS,
new LinkedBlockingQueue<Runnable>(),
tf);
private volatile boolean active;
public PartitionFastDiskBuffer(int partitionNumber) {
this(new FastDiskBufferEnv(null, partitionNumber));
}
public PartitionFastDiskBuffer(FastDiskBufferEnv env) {
this.env = env;
int concurrencyLevel = env.getConcurrencyDegree();
// Determine the degree of concurrency which is a power of 2 and closest
// to what the user has indicated. For instance, if user specifies a
// degree of concurrency of 5, the degree of concurrency we will be
// using will be 8. This will allow a fairer hashing. This algorithm is
// copied from java.util.concurrent.ConcurrentHashMap.
int sshift = 0;
int ssize = 1;
while (ssize < concurrencyLevel) {
++sshift;
ssize <<= 1;
}
segmentShift = 32 - sshift;
this.segmentMask = ssize - 1;
this.databases = new EntityStore[ssize];
setupDatabasePartition(env);
this.active = true;
}
private synchronized void setupDatabasePartition(FastDiskBufferEnv env) {
for (int i = 0; i < databases.length; i++) {
try {
this.databases[i] = env.openDiskStore(String.valueOf(i));
} catch (DatabaseException e) {
databases[i] = null;
}
}
}
@SuppressWarnings("unchecked")
private Set<String>[] groupInputFeeds(Set<String> feedIDs) {
Set<String>[] groupFeeds = new Set[databases.length];
for (int i = 0; i < groupFeeds.length; i++) {
groupFeeds[i] = new HashSet<String>();
}
for (String feedID : feedIDs) {
int segmentIndex = hash(feedID.hashCode());
groupFeeds[segmentIndex].add(feedID);
}
return groupFeeds;
}
@SuppressWarnings("unchecked")
public Map<String, SortedMap<Long, Map<String, String>>> getData(Set<String> feedIDs, final TimeUnit timeUnit,
final long startTime, final long endTime) {
final ElapsedTimer timer = new ElapsedTimer();
timer.startInterval();
final Set<String>[] groupFeeds = groupInputFeeds(feedIDs);
final Map<String, SortedMap<Long, Map<String, String>>>[] dataSlices = new Map[groupFeeds.length];
final CountDownLatch readLatch = new CountDownLatch(groupFeeds.length);
for (int i = 0; i < groupFeeds.length; i++) {
final int dataIndex = i;
Runnable r = new Runnable() {
@Override
public void run() {
try {
Map<String, SortedMap<Long, Map<String, String>>> dataSlice = getData(databases[dataIndex],
groupFeeds[dataIndex], timeUnit, startTime, endTime);
if (dataSlice != null) {
dataSlices[dataIndex] = dataSlice;
}
} finally {
readLatch.countDown();
}
}
};
readThreads.execute(r);
}
try {
readLatch.await();
} catch (InterruptedException e) {
LOGGER.warn("Internal error during getData thread", e);
}
Map<String, SortedMap<Long, Map<String, String>>> returnedData = new HashMap<String, SortedMap<Long, Map<String, String>>>();
for (int i = 0; i < dataSlices.length; i++) {
Map<String, SortedMap<Long, Map<String, String>>> dataSlice = dataSlices[i];
if (dataSlice != null) {
returnedData.putAll(dataSlice);
}
}
timer.stopInterval();
READ_PERF_LOGGER.debug("time to get 1 partition Data for {} feeds: {}", feedIDs.size(), timer
.getIntervalInMillis());
return returnedData;
}
@SuppressWarnings("unchecked")
@Override
public Map<String, SortedMap<Long, Map<String, String>>> getLastData(Set<String> feedIDs, final TimeUnit timeUnit, final long startTime, final long endTime) {
final ElapsedTimer timer = new ElapsedTimer();
timer.startInterval();
final Set<String>[] groupFeeds = groupInputFeeds(feedIDs);
final Map<String, SortedMap<Long, Map<String, String>>>[] dataSlices = new Map[groupFeeds.length];
final CountDownLatch latch = new CountDownLatch(groupFeeds.length);
for (int i = 0; i < groupFeeds.length; i++) {
final int dataIndex = i;
Runnable r = new Runnable() {
@Override
public void run() {
try {
Map<String, SortedMap<Long, Map<String, String>>> dataSlice = getLastData(databases[dataIndex],
groupFeeds[dataIndex], timeUnit, startTime, endTime);
if (dataSlice != null) {
dataSlices[dataIndex] = dataSlice;
}
} finally {
latch.countDown();
}
}
};
readThreads.execute(r);
}
try {
latch.await();
} catch (InterruptedException e) {
LOGGER.warn("Internal error during getLastData thread", e);
}
Map<String, SortedMap<Long, Map<String, String>>> returnedData = new HashMap<String, SortedMap<Long, Map<String, String>>>();
for (int i = 0; i < dataSlices.length; i++) {
Map<String, SortedMap<Long, Map<String, String>>> dataSlice = dataSlices[i];
if (dataSlice != null) {
returnedData.putAll(dataSlice);
}
}
timer.stopInterval();
READ_PERF_LOGGER.debug("time to get 1 partition last Data for {} feeds: {}", feedIDs.size(), timer
.getIntervalInMillis());
return returnedData;
}
private Map<String, SortedMap<Long, Map<String, String>>> getLastData(EntityStore db, Set<String> feedIDs, TimeUnit timeUnit, long startTime, long endTime) {
Map<String, SortedMap<Long, Map<String, String>>> returnedData = new HashMap<String, SortedMap<Long, Map<String, String>>>();
startTime = TimeUnit.NANOSECONDS.convert(startTime, timeUnit);
endTime = TimeUnit.NANOSECONDS.convert(endTime, timeUnit);
PersistentBufferObjectAccess pObjectAccess = new PersistentBufferObjectAccess(db);
PersistentBufferKey startKey = new PersistentBufferKey();
PersistentBufferKey endKey = new PersistentBufferKey();
for (String feedID : feedIDs) {
startKey.feedID = feedID;
startKey.timestamp = startTime;
endKey.feedID = feedID;
endKey.timestamp = endTime;
EntityCursor<PersistentBufferObject> piCursor = pObjectAccess.pIdx.entities(startKey, true, endKey, true);
try {
PersistentBufferObject pObj = piCursor.last();
SortedMap<Long, Map<String, String>> data = new TreeMap<Long, Map<String, String>>(TIMESTAMP_COMPARATOR);
returnedData.put(feedID, data);
if (pObj != null)
data.put(pObj.getKey().timestamp, pObj.getData());
} catch (DatabaseException e) {
e.printStackTrace();
} finally {
piCursor.close();
}
}
return returnedData;
}
private Map<String, SortedMap<Long, Map<String, String>>> getData(EntityStore db, Set<String> feedIDs,
TimeUnit timeUnit, long startTime, long endTime) {
Map<String, SortedMap<Long, Map<String, String>>> returnedData = new HashMap<String, SortedMap<Long, Map<String, String>>>();
startTime = TimeUnit.NANOSECONDS.convert(startTime, timeUnit);
endTime = TimeUnit.NANOSECONDS.convert(endTime, timeUnit);
PersistentBufferObjectAccess pObjectAccess = new PersistentBufferObjectAccess(db);
PersistentBufferKey startKey = new PersistentBufferKey();
PersistentBufferKey endKey = new PersistentBufferKey();
for (String feedID : feedIDs) {
startKey.feedID = feedID;
startKey.timestamp = startTime;
endKey.feedID = feedID;
endKey.timestamp = endTime;
EntityCursor<PersistentBufferObject> piCursor = pObjectAccess.pIdx.entities(startKey, true, endKey, true);
try {
for (PersistentBufferObject pObj : piCursor) {
SortedMap<Long, Map<String, String>> data = returnedData.get(feedID);
if (data == null) {
data = new TreeMap<Long, Map<String, String>>(TIMESTAMP_COMPARATOR);
returnedData.put(feedID, data);
}
data.put(pObj.getKey().timestamp, pObj.getData());
}
} catch (DatabaseException e) {
e.printStackTrace();
} finally {
piCursor.close();
}
}
return returnedData;
}
@SuppressWarnings("unchecked")
private Map<String, Map<Long, Map<String, String>>>[] groupInputDataByFeed(
Map<String, Map<Long, Map<String, String>>> value) {
Map[] groupInputData = new Map[databases.length];
for (int i = 0; i < groupInputData.length; i++) {
groupInputData[i] = new HashMap<String, Map<Long, Map<String, String>>>();
}
for (Entry<String, Map<Long, Map<String, String>>> entry : value.entrySet()) {
int segmentIndex = hash(entry.getKey().hashCode());
groupInputData[segmentIndex].put(entry.getKey(), entry.getValue());
}
return (Map<String, Map<Long, Map<String, String>>>[]) groupInputData;
}
public Map<String, PartitionTimestamps> putData(Map<String, Map<Long, Map<String, String>>> value, final TimeUnit timeUnit) throws BufferFullException {
final ElapsedTimer timer = new ElapsedTimer();
timer.startInterval();
final Map<String, Map<Long, Map<String, String>>>[] groupData = groupInputDataByFeed(value);
final Map<String, PartitionTimestamps> timestamps = new HashMap<String, PartitionTimestamps>();
final AtomicBoolean bufferFull = new AtomicBoolean(false);
final CountDownLatch latch = new CountDownLatch(groupData.length);
for (int i = 0; i < groupData.length; i++) {
final int dataIndex = i;
Runnable r = new Runnable() {
@Override
public void run() {
try {
for (Entry<String, Map<Long, Map<String, String>>> feedData : groupData[dataIndex].entrySet()) {
PartitionTimestamps timeStamp = null;
try {
timeStamp = putData(null, feedData.getKey(), databases[dataIndex], timeUnit, feedData.getValue());
} catch (BufferFullException e) {
bufferFull.compareAndSet(false, true);
}
if (timeStamp == null) {
break;
} else {
timestamps.put(feedData.getKey(), timeStamp);
}
}
} finally {
latch.countDown();
}
}
};
writeThreads.execute(r);
}
try {
latch.await();
} catch (InterruptedException e) {
LOGGER.warn("Internal error during putData thread", e);
}
if (bufferFull.get()) {
throw new BufferFullException(env.getErrorMsg());
}
timer.stopInterval();
WRITE_PERF_LOGGER.debug("Time to write {} feeds: {}", value.size(), timer.getIntervalInMillis());
return timestamps;
}
@Override
public void putData(Map<String, Map<Long, Map<String, String>>> value, final TimeUnit timeUnit, final MetaDataBuffer metadata, final int metadataIndex) throws BufferFullException {
final ElapsedTimer timer = new ElapsedTimer();
timer.startInterval();
final Map<String, Map<Long, Map<String, String>>>[] groupData = groupInputDataByFeed(value);
final AtomicBoolean bufferFull = new AtomicBoolean(false);
final CountDownLatch latch = new CountDownLatch(groupData.length);
for (int i = 0; i < groupData.length; i++) {
final int dataIndex = i;
Runnable r = new Runnable() {
@Override
public void run() {
try {
for (Entry<String, Map<Long, Map<String, String>>> feedData : groupData[dataIndex].entrySet()) {
PartitionTimestamps timeStamp = null;
try {
timeStamp = putData(null, feedData.getKey(), databases[dataIndex], timeUnit, feedData.getValue());
} catch (BufferFullException e) {
bufferFull.compareAndSet(false, true);
}
if (timeStamp == null) {
break;
} else {
metadata.updatePartitionMetaData(metadataIndex, feedData.getKey(), timeStamp.getStartTimestamp(), timeStamp.getEndTimestamp());
}
}
} finally {
latch.countDown();
}
}
};
writeThreads.execute(r);
}
try {
latch.await();
} catch (InterruptedException e) {
LOGGER.warn("Internal error during putData thread", e);
}
if (bufferFull.get()) {
throw new BufferFullException(env.getErrorMsg());
}
timer.stopInterval();
WRITE_PERF_LOGGER.debug("Time to write {} feeds: {}", value.size(), timer.getIntervalInMillis());
}
private PartitionTimestamps putData(Transaction txn, String feedID, EntityStore db, TimeUnit timeUnit,
Map<Long, Map<String, String>> entries) throws BufferFullException {
long largestTime = 0;
long smallestTime = 0;
try {
PersistentBufferObjectAccess pObjAccess = new PersistentBufferObjectAccess(db);
for (Long time : entries.keySet()) {
try {
Map<String, String> value = entries.get(time);
time = TimeUnit.NANOSECONDS.convert(time, timeUnit);
LOGGER.debug("Putting data for feed {} with time {}", feedID, time);
if (time.longValue() > largestTime) {
largestTime = time.longValue();
}
if (smallestTime == 0) {
smallestTime = time.longValue();
} else if (time.longValue() < smallestTime) {
smallestTime = time.longValue();
}
PersistentBufferObject pObj = new PersistentBufferObject();
pObj.setKey(new PersistentBufferKey(feedID, time.longValue()));
pObj.setData(value);
pObjAccess.pIdx.putNoReturn(pObj);
} catch (DatabaseException de) {
largestTime = -1;
LOGGER.error("Putting data for feed {} failed", feedID, de);
if (env.isDiskBufferFull()) {
LOGGER.error("[PartitionFastDiskBuffer]: " + env.getErrorMsg());
throw new BufferFullException();
}
break;
}
}
} catch (DatabaseException de) {
largestTime = -1;
LOGGER.error("Putting data for feed {} failed", feedID, de);
if (env.isDiskBufferFull()) {
LOGGER.error("[PartitionFastDiskBuffer]: " + env.getErrorMsg());
throw new BufferFullException();
}
}
return new PartitionTimestamps(smallestTime, largestTime);
}
private int hash(int h) {
// Spread bits to regularize both segment and index locations,
// using variant of single-word Wang/Jenkins hash.
h += (h << 15) ^ 0xffffcd7d;
h ^= (h >>> 10);
h += (h << 3);
h ^= (h >>> 6);
h += (h << 2) + (h << 14);
int i = h ^ (h >>> 16);
return ((i >>> segmentShift) & segmentMask);
}
public synchronized void removeBuffer() {
for (int i = 0; i < databases.length; i++) {
try {
if (databases[i] != null) {
env.closeDatabase(databases[i]);
databases[i] = null;
}
} catch (DatabaseException de) {
LOGGER.debug("DatabaseException in closeBuffer", de);
}
}
env.removeEnvironment();
}
public synchronized void closeBuffer() {
this.env.flush();
for (int i = 0; i < databases.length; i++) {
try {
if (databases[i] != null) {
env.closeDatabase(databases[i]);
databases[i] = null;
}
} catch (DatabaseException de) {
LOGGER.debug("DatabaseException in closeBuffer", de);
}
}
env.closeEnvironment();
}
public synchronized boolean isClosed() {
for (int i = 0; i < databases.length; i++) {
if (databases[i] != null) {
return false;
}
}
return true;
}
private void closeDatabases() {
for (int i = 0; i < databases.length; i++) {
try {
if (databases[i] != null) {
env.closeDatabase(databases[i]);
databases[i] = null;
}
} catch (DatabaseException de) {
LOGGER.debug("DatabaseException in closeBuffer", de);
}
}
}
public synchronized void resetBuffer() {
closeDatabases();
env.closeAndRestartEnvironment();
setupDatabasePartition(env);
}
public void inactive() {
active = false;
}
public boolean isActive() {
return active;
}
public DataBufferEnv getBufferEnv() {
return env;
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_PartitionFastDiskBuffer.java |
36 | private static final ThreadFactory tf = new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread t = Executors.defaultThreadFactory().newThread(r);
t.setContextClassLoader(getClass().getClassLoader());
return t;
}
}; | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_PartitionFastDiskBuffer.java |
37 | Runnable r = new Runnable() {
@Override
public void run() {
try {
Map<String, SortedMap<Long, Map<String, String>>> dataSlice = getData(databases[dataIndex],
groupFeeds[dataIndex], timeUnit, startTime, endTime);
if (dataSlice != null) {
dataSlices[dataIndex] = dataSlice;
}
} finally {
readLatch.countDown();
}
}
}; | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_PartitionFastDiskBuffer.java |
38 | Runnable r = new Runnable() {
@Override
public void run() {
try {
Map<String, SortedMap<Long, Map<String, String>>> dataSlice = getLastData(databases[dataIndex],
groupFeeds[dataIndex], timeUnit, startTime, endTime);
if (dataSlice != null) {
dataSlices[dataIndex] = dataSlice;
}
} finally {
latch.countDown();
}
}
}; | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_PartitionFastDiskBuffer.java |
39 | Runnable r = new Runnable() {
@Override
public void run() {
try {
for (Entry<String, Map<Long, Map<String, String>>> feedData : groupData[dataIndex].entrySet()) {
PartitionTimestamps timeStamp = null;
try {
timeStamp = putData(null, feedData.getKey(), databases[dataIndex], timeUnit, feedData.getValue());
} catch (BufferFullException e) {
bufferFull.compareAndSet(false, true);
}
if (timeStamp == null) {
break;
} else {
timestamps.put(feedData.getKey(), timeStamp);
}
}
} finally {
latch.countDown();
}
}
}; | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_PartitionFastDiskBuffer.java |
40 | Runnable r = new Runnable() {
@Override
public void run() {
try {
for (Entry<String, Map<Long, Map<String, String>>> feedData : groupData[dataIndex].entrySet()) {
PartitionTimestamps timeStamp = null;
try {
timeStamp = putData(null, feedData.getKey(), databases[dataIndex], timeUnit, feedData.getValue());
} catch (BufferFullException e) {
bufferFull.compareAndSet(false, true);
}
if (timeStamp == null) {
break;
} else {
metadata.updatePartitionMetaData(metadataIndex, feedData.getKey(), timeStamp.getStartTimestamp(), timeStamp.getEndTimestamp());
}
}
} finally {
latch.countDown();
}
}
}; | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_PartitionFastDiskBuffer.java |
41 | private static final class TimeStampComparator implements Comparator<Long>, Serializable {
private static final long serialVersionUID = -665810351953536404L;
@Override
public int compare(Long o1, Long o2) {
return o1.compareTo(o2);
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_PartitionFastDiskBuffer.java |
42 | @Persistent
public final class PartitionTimestamps implements Cloneable {
private long startTimestamp;
private long endTimestamp;
public PartitionTimestamps() {
//
}
public PartitionTimestamps(long startTimestamp, long endTimestamp) {
this.startTimestamp = startTimestamp;
this.endTimestamp = endTimestamp;
}
public long getStartTimestamp() {
return startTimestamp;
}
public void setStartTimestamp(long startTimestamp) {
this.startTimestamp = startTimestamp;
}
public long getEndTimestamp() {
return endTimestamp;
}
public void setEndTimestamp(long endTimestamp) {
this.endTimestamp = endTimestamp;
}
@Override
public PartitionTimestamps clone() {
try {
return (PartitionTimestamps) super.clone();
} catch (CloneNotSupportedException e) {
throw new RuntimeException(e); // should never happen
}
}
public void merge(long aStartTimestamp, long aEndTimestamp) {
startTimestamp = Math.min(startTimestamp, aStartTimestamp);
endTimestamp = Math.max(endTimestamp, aEndTimestamp);
}
public void merge(PartitionTimestamps timeStamp) {
merge(timeStamp.startTimestamp, timeStamp.endTimestamp);
}
@Override
public String toString() {
return "[" + startTimestamp + ", " + endTimestamp + "]";
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_PartitionTimestamps.java |
43 | @Persistent
public class PersistentBufferKey {
@KeyField(1) String feedID;
@KeyField(2) Long timestamp;
public PersistentBufferKey() {
//
}
public PersistentBufferKey(String feedID, Long timestamp) {
this.feedID = feedID;
this.timestamp = timestamp;
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_PersistentBufferKey.java |
44 | @Entity
public class PersistentBufferObject {
@PrimaryKey
private PersistentBufferKey key;
private Map<String, String> data;
public PersistentBufferKey getKey() {
return key;
}
public void setKey(PersistentBufferKey key) {
this.key = key;
}
public Map<String, String> getData() {
return data;
}
public void setData(Map<String, String> data) {
this.data = data;
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_PersistentBufferObject.java |
45 | class PersistentBufferObjectAccess {
PrimaryIndex<PersistentBufferKey, PersistentBufferObject> pIdx;
public PersistentBufferObjectAccess(EntityStore store) throws DatabaseException {
pIdx = store.getPrimaryIndex(PersistentBufferKey.class, PersistentBufferObject.class);
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_PersistentBufferObjectAccess.java |
46 | final class TimestampKeyCreator implements SecondaryKeyCreator {
private TupleBinding<?> keyBinding;
TimestampKeyCreator(TupleBinding<?> keyBinding) {
this.keyBinding = keyBinding;
}
@Override
public boolean createSecondaryKey(SecondaryDatabase secDb, DatabaseEntry keyEntry, DatabaseEntry valueEntry,
DatabaseEntry resultEntry) throws DatabaseException {
KeyValue kv = KeyValue.class.cast(keyBinding.entryToObject(keyEntry));
long timestamp = kv.getTimestamp();
TupleOutput to = new TupleOutput();
to.writeLong(timestamp);
resultEntry.setData(to.getBufferBytes());
return true;
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_TimestampKeyCreator.java |
47 | public class BufferFullTest {
private FeedAggregatorService feedAggregatorService;
private Vector<DataArchive> dataArchives;
private MockLogger mockLogger;
private File bufferLocation;
@BeforeMethod
public void startup() throws Exception {
Properties prop = new Properties();
prop.load(ClassLoader.getSystemResourceAsStream("properties/testFeed2.properties"));
prop.put("buffer.partitions", "2");
prop.put("buffer.time.millis", "-1");
bufferLocation = File.createTempFile("mct-buffer", "");
bufferLocation.delete();
bufferLocation.mkdir();
prop.put("buffer.disk.loc", bufferLocation.toString());
feedAggregatorService = new FeedAggregatorService(prop);
dataArchives = getDataArchives();
dataArchives.clear();
dataArchives.add(new MockBuffer(false));
dataArchives.add(new MockBuffer(true));
mockLogger = new MockLogger();
setMockLogger();
}
@AfterMethod
public void reset() {
delete(bufferLocation);
}
private void delete(File f) {
if (f.isDirectory()) {
for (File f2 : f.listFiles()) {
delete(f2);
}
}
f.delete();
}
@Test
public void testBufferFull() throws InterruptedException {
Map<String, String> testData = new HashMap<String, String>();
testData.put("key1", "value1");
Assert.assertEquals(dataArchives.size(), 2);
Assert.assertFalse(mockLogger.errorLogged);
feedAggregatorService.putData("testFeed", TimeUnit.MILLISECONDS, System.currentTimeMillis(), testData);
Thread.sleep(5000);
Assert.assertEquals(dataArchives.size(), 1);
Assert.assertEquals(dataArchives.get(0).getLOS(), LOS.fast);
Assert.assertTrue(mockLogger.errorLogged);
}
private void setMockLogger() throws Exception {
Field f = feedAggregatorService.getClass().getDeclaredField("LOGGER");
f.setAccessible(true);
f.set(null, mockLogger);
}
@SuppressWarnings("unchecked")
private Vector<DataArchive> getDataArchives() throws Exception {
Field f = feedAggregatorService.getClass().getDeclaredField("dataArchives");
f.setAccessible(true);
return (Vector<DataArchive>)f.get(feedAggregatorService);
}
private static class MockLogger implements Logger {
private static final long serialVersionUID = 531417069158028639L;
private boolean errorLogged = false;
@Override
public void debug(String arg0) {
// TODO Auto-generated method stub
}
@Override
public void debug(String arg0, Object arg1) {
// TODO Auto-generated method stub
}
@Override
public void debug(String arg0, Object[] arg1) {
// TODO Auto-generated method stub
}
@Override
public void debug(String arg0, Throwable arg1) {
// TODO Auto-generated method stub
}
@Override
public void debug(Marker arg0, String arg1) {
// TODO Auto-generated method stub
}
@Override
public void debug(String arg0, Object arg1, Object arg2) {
// TODO Auto-generated method stub
}
@Override
public void debug(Marker arg0, String arg1, Object arg2) {
// TODO Auto-generated method stub
}
@Override
public void debug(Marker arg0, String arg1, Object[] arg2) {
// TODO Auto-generated method stub
}
@Override
public void debug(Marker arg0, String arg1, Throwable arg2) {
// TODO Auto-generated method stub
}
@Override
public void debug(Marker arg0, String arg1, Object arg2, Object arg3) {
// TODO Auto-generated method stub
}
@Override
public void error(String arg0) {
this.errorLogged = true;
}
@Override
public void error(String arg0, Object arg1) {
// TODO Auto-generated method stub
}
@Override
public void error(String arg0, Object[] arg1) {
// TODO Auto-generated method stub
}
@Override
public void error(String arg0, Throwable arg1) {
// TODO Auto-generated method stub
}
@Override
public void error(Marker arg0, String arg1) {
// TODO Auto-generated method stub
}
@Override
public void error(String arg0, Object arg1, Object arg2) {
// TODO Auto-generated method stub
}
@Override
public void error(Marker arg0, String arg1, Object arg2) {
// TODO Auto-generated method stub
}
@Override
public void error(Marker arg0, String arg1, Object[] arg2) {
// TODO Auto-generated method stub
}
@Override
public void error(Marker arg0, String arg1, Throwable arg2) {
// TODO Auto-generated method stub
}
@Override
public void error(Marker arg0, String arg1, Object arg2, Object arg3) {
// TODO Auto-generated method stub
}
@Override
public String getName() {
// TODO Auto-generated method stub
return null;
}
@Override
public void info(String arg0) {
// TODO Auto-generated method stub
}
@Override
public void info(String arg0, Object arg1) {
// TODO Auto-generated method stub
}
@Override
public void info(String arg0, Object[] arg1) {
// TODO Auto-generated method stub
}
@Override
public void info(String arg0, Throwable arg1) {
// TODO Auto-generated method stub
}
@Override
public void info(Marker arg0, String arg1) {
// TODO Auto-generated method stub
}
@Override
public void info(String arg0, Object arg1, Object arg2) {
// TODO Auto-generated method stub
}
@Override
public void info(Marker arg0, String arg1, Object arg2) {
// TODO Auto-generated method stub
}
@Override
public void info(Marker arg0, String arg1, Object[] arg2) {
// TODO Auto-generated method stub
}
@Override
public void info(Marker arg0, String arg1, Throwable arg2) {
// TODO Auto-generated method stub
}
@Override
public void info(Marker arg0, String arg1, Object arg2, Object arg3) {
// TODO Auto-generated method stub
}
@Override
public boolean isDebugEnabled() {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isDebugEnabled(Marker arg0) {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isErrorEnabled() {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isErrorEnabled(Marker arg0) {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isInfoEnabled() {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isInfoEnabled(Marker arg0) {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isTraceEnabled() {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isTraceEnabled(Marker arg0) {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isWarnEnabled() {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isWarnEnabled(Marker arg0) {
// TODO Auto-generated method stub
return false;
}
@Override
public void trace(String arg0) {
// TODO Auto-generated method stub
}
@Override
public void trace(String arg0, Object arg1) {
// TODO Auto-generated method stub
}
@Override
public void trace(String arg0, Object[] arg1) {
// TODO Auto-generated method stub
}
@Override
public void trace(String arg0, Throwable arg1) {
// TODO Auto-generated method stub
}
@Override
public void trace(Marker arg0, String arg1) {
// TODO Auto-generated method stub
}
@Override
public void trace(String arg0, Object arg1, Object arg2) {
// TODO Auto-generated method stub
}
@Override
public void trace(Marker arg0, String arg1, Object arg2) {
// TODO Auto-generated method stub
}
@Override
public void trace(Marker arg0, String arg1, Object[] arg2) {
// TODO Auto-generated method stub
}
@Override
public void trace(Marker arg0, String arg1, Throwable arg2) {
// TODO Auto-generated method stub
}
@Override
public void trace(Marker arg0, String arg1, Object arg2, Object arg3) {
// TODO Auto-generated method stub
}
@Override
public void warn(String arg0) {
// TODO Auto-generated method stub
}
@Override
public void warn(String arg0, Object arg1) {
// TODO Auto-generated method stub
}
@Override
public void warn(String arg0, Object[] arg1) {
// TODO Auto-generated method stub
}
@Override
public void warn(String arg0, Throwable arg1) {
// TODO Auto-generated method stub
}
@Override
public void warn(Marker arg0, String arg1) {
// TODO Auto-generated method stub
}
@Override
public void warn(String arg0, Object arg1, Object arg2) {
// TODO Auto-generated method stub
}
@Override
public void warn(Marker arg0, String arg1, Object arg2) {
// TODO Auto-generated method stub
}
@Override
public void warn(Marker arg0, String arg1, Object[] arg2) {
// TODO Auto-generated method stub
}
@Override
public void warn(Marker arg0, String arg1, Throwable arg2) {
// TODO Auto-generated method stub
}
@Override
public void warn(Marker arg0, String arg1, Object arg2, Object arg3) {
// TODO Auto-generated method stub
}
}
private static class MockBuffer implements DataArchive, DataProvider {
private boolean bufferFull;
MockBuffer(boolean bufferFull) {
this.bufferFull = bufferFull;
}
@Override
public LOS getLOS() {
if (bufferFull) {
return LOS.medium;
}
return LOS.fast;
}
@Override
public void putData(String feedID, TimeUnit timeUnit, Map<Long, Map<String, String>> entries)
throws BufferFullException {
if (bufferFull) {
throw new BufferFullException("Test buffer full.");
}
}
@Override
public void putData(String feedID, TimeUnit timeUnit, long time, Map<String, String> value)
throws BufferFullException {
if (bufferFull) {
throw new BufferFullException("Test buffer full.");
}
}
@Override
public void putData(Map<String, Map<Long, Map<String, String>>> value, TimeUnit timeUnit, Runnable callback)
throws BufferFullException {
if (bufferFull) {
throw new BufferFullException("Test buffer full.");
}
}
@Override
public void reset() {
// TODO Auto-generated method stub
}
@Override
public Map<String, SortedMap<Long, Map<String, String>>> getData(Set<String> feedIDs, long startTime,
long endTime, TimeUnit timeUnit) {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean isFullyWithinTimeSpan(String feedID, long startTime, TimeUnit timeUnit) {
// TODO Auto-generated method stub
return false;
}
@Override
public Map<String, List<Map<String, String>>> getData(Set<String> feedIDs, TimeUnit timeUnit, long startTime,
long endTime) {
// TODO Auto-generated method stub
return null;
}
}
} | false | timeSequenceFeedAggregator_src_test_java_gov_nasa_arc_mct_buffer_internal_BufferFullTest.java |
48 | private static class MockBuffer implements DataArchive, DataProvider {
private boolean bufferFull;
MockBuffer(boolean bufferFull) {
this.bufferFull = bufferFull;
}
@Override
public LOS getLOS() {
if (bufferFull) {
return LOS.medium;
}
return LOS.fast;
}
@Override
public void putData(String feedID, TimeUnit timeUnit, Map<Long, Map<String, String>> entries)
throws BufferFullException {
if (bufferFull) {
throw new BufferFullException("Test buffer full.");
}
}
@Override
public void putData(String feedID, TimeUnit timeUnit, long time, Map<String, String> value)
throws BufferFullException {
if (bufferFull) {
throw new BufferFullException("Test buffer full.");
}
}
@Override
public void putData(Map<String, Map<Long, Map<String, String>>> value, TimeUnit timeUnit, Runnable callback)
throws BufferFullException {
if (bufferFull) {
throw new BufferFullException("Test buffer full.");
}
}
@Override
public void reset() {
// TODO Auto-generated method stub
}
@Override
public Map<String, SortedMap<Long, Map<String, String>>> getData(Set<String> feedIDs, long startTime,
long endTime, TimeUnit timeUnit) {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean isFullyWithinTimeSpan(String feedID, long startTime, TimeUnit timeUnit) {
// TODO Auto-generated method stub
return false;
}
@Override
public Map<String, List<Map<String, String>>> getData(Set<String> feedIDs, TimeUnit timeUnit, long startTime,
long endTime) {
// TODO Auto-generated method stub
return null;
}
} | false | timeSequenceFeedAggregator_src_test_java_gov_nasa_arc_mct_buffer_internal_BufferFullTest.java |
49 | private static class MockLogger implements Logger {
private static final long serialVersionUID = 531417069158028639L;
private boolean errorLogged = false;
@Override
public void debug(String arg0) {
// TODO Auto-generated method stub
}
@Override
public void debug(String arg0, Object arg1) {
// TODO Auto-generated method stub
}
@Override
public void debug(String arg0, Object[] arg1) {
// TODO Auto-generated method stub
}
@Override
public void debug(String arg0, Throwable arg1) {
// TODO Auto-generated method stub
}
@Override
public void debug(Marker arg0, String arg1) {
// TODO Auto-generated method stub
}
@Override
public void debug(String arg0, Object arg1, Object arg2) {
// TODO Auto-generated method stub
}
@Override
public void debug(Marker arg0, String arg1, Object arg2) {
// TODO Auto-generated method stub
}
@Override
public void debug(Marker arg0, String arg1, Object[] arg2) {
// TODO Auto-generated method stub
}
@Override
public void debug(Marker arg0, String arg1, Throwable arg2) {
// TODO Auto-generated method stub
}
@Override
public void debug(Marker arg0, String arg1, Object arg2, Object arg3) {
// TODO Auto-generated method stub
}
@Override
public void error(String arg0) {
this.errorLogged = true;
}
@Override
public void error(String arg0, Object arg1) {
// TODO Auto-generated method stub
}
@Override
public void error(String arg0, Object[] arg1) {
// TODO Auto-generated method stub
}
@Override
public void error(String arg0, Throwable arg1) {
// TODO Auto-generated method stub
}
@Override
public void error(Marker arg0, String arg1) {
// TODO Auto-generated method stub
}
@Override
public void error(String arg0, Object arg1, Object arg2) {
// TODO Auto-generated method stub
}
@Override
public void error(Marker arg0, String arg1, Object arg2) {
// TODO Auto-generated method stub
}
@Override
public void error(Marker arg0, String arg1, Object[] arg2) {
// TODO Auto-generated method stub
}
@Override
public void error(Marker arg0, String arg1, Throwable arg2) {
// TODO Auto-generated method stub
}
@Override
public void error(Marker arg0, String arg1, Object arg2, Object arg3) {
// TODO Auto-generated method stub
}
@Override
public String getName() {
// TODO Auto-generated method stub
return null;
}
@Override
public void info(String arg0) {
// TODO Auto-generated method stub
}
@Override
public void info(String arg0, Object arg1) {
// TODO Auto-generated method stub
}
@Override
public void info(String arg0, Object[] arg1) {
// TODO Auto-generated method stub
}
@Override
public void info(String arg0, Throwable arg1) {
// TODO Auto-generated method stub
}
@Override
public void info(Marker arg0, String arg1) {
// TODO Auto-generated method stub
}
@Override
public void info(String arg0, Object arg1, Object arg2) {
// TODO Auto-generated method stub
}
@Override
public void info(Marker arg0, String arg1, Object arg2) {
// TODO Auto-generated method stub
}
@Override
public void info(Marker arg0, String arg1, Object[] arg2) {
// TODO Auto-generated method stub
}
@Override
public void info(Marker arg0, String arg1, Throwable arg2) {
// TODO Auto-generated method stub
}
@Override
public void info(Marker arg0, String arg1, Object arg2, Object arg3) {
// TODO Auto-generated method stub
}
@Override
public boolean isDebugEnabled() {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isDebugEnabled(Marker arg0) {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isErrorEnabled() {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isErrorEnabled(Marker arg0) {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isInfoEnabled() {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isInfoEnabled(Marker arg0) {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isTraceEnabled() {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isTraceEnabled(Marker arg0) {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isWarnEnabled() {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isWarnEnabled(Marker arg0) {
// TODO Auto-generated method stub
return false;
}
@Override
public void trace(String arg0) {
// TODO Auto-generated method stub
}
@Override
public void trace(String arg0, Object arg1) {
// TODO Auto-generated method stub
}
@Override
public void trace(String arg0, Object[] arg1) {
// TODO Auto-generated method stub
}
@Override
public void trace(String arg0, Throwable arg1) {
// TODO Auto-generated method stub
}
@Override
public void trace(Marker arg0, String arg1) {
// TODO Auto-generated method stub
}
@Override
public void trace(String arg0, Object arg1, Object arg2) {
// TODO Auto-generated method stub
}
@Override
public void trace(Marker arg0, String arg1, Object arg2) {
// TODO Auto-generated method stub
}
@Override
public void trace(Marker arg0, String arg1, Object[] arg2) {
// TODO Auto-generated method stub
}
@Override
public void trace(Marker arg0, String arg1, Throwable arg2) {
// TODO Auto-generated method stub
}
@Override
public void trace(Marker arg0, String arg1, Object arg2, Object arg3) {
// TODO Auto-generated method stub
}
@Override
public void warn(String arg0) {
// TODO Auto-generated method stub
}
@Override
public void warn(String arg0, Object arg1) {
// TODO Auto-generated method stub
}
@Override
public void warn(String arg0, Object[] arg1) {
// TODO Auto-generated method stub
}
@Override
public void warn(String arg0, Throwable arg1) {
// TODO Auto-generated method stub
}
@Override
public void warn(Marker arg0, String arg1) {
// TODO Auto-generated method stub
}
@Override
public void warn(String arg0, Object arg1, Object arg2) {
// TODO Auto-generated method stub
}
@Override
public void warn(Marker arg0, String arg1, Object arg2) {
// TODO Auto-generated method stub
}
@Override
public void warn(Marker arg0, String arg1, Object[] arg2) {
// TODO Auto-generated method stub
}
@Override
public void warn(Marker arg0, String arg1, Throwable arg2) {
// TODO Auto-generated method stub
}
@Override
public void warn(Marker arg0, String arg1, Object arg2, Object arg3) {
// TODO Auto-generated method stub
}
} | false | timeSequenceFeedAggregator_src_test_java_gov_nasa_arc_mct_buffer_internal_BufferFullTest.java |
50 | public final class CODataBuffer extends DataBuffer implements DataArchive, DataProvider {
private static final Logger LOGGER = LoggerFactory.getLogger(CODataBuffer.class);
private static final Logger PERF_READ_LOGGER = LoggerFactory
.getLogger("gov.nasa.arc.mct.performance.read.codbuffer");
private static final Logger PERF_WRITE_LOGGER = LoggerFactory
.getLogger("gov.nasa.arc.mct.performance.write.codbuffer");
private static final Logger PERF_LOGGER = LoggerFactory
.getLogger("gov.nasa.arc.mct.performance.codbuffer");
CODataBuffer(DataBufferEnv env, DataBufferHelper partitionBufferFactory) {
super(env, partitionBufferFactory);
}
@Override
protected void setupPartitionBuffers(DataBufferEnv env, DataBufferHelper partitionBufferFactory) {
for (int i=0; i<partitionDataBuffers.length; i++) {
this.partitionDataBuffers[i] = new AtomicReference<PartitionDataBuffer>();
}
if (env == null) {
for (int i=0; i<partitionDataBuffers.length; i++) {
this.partitionDataBuffers[i].set(partitionBufferFactory.newPartitionBuffer(i));
if (i == metaDataBuffer.getCurrentPartition()) {
this.currentParition = this.partitionDataBuffers[i].get();
} else {
this.partitionDataBuffers[i].get().inactive();
}
}
} else {
this.currentParition = partitionBufferFactory.newPartitionBuffer(env);
this.partitionDataBuffers[currentParition.getBufferEnv().getCurrentBufferPartition()].set(currentParition);
}
}
private final static class FeedRequestContext {
private final String feedID;
private final boolean getLastDataIfNeeded;
public FeedRequestContext(String feedID, boolean getLastDataIfNeeded) {
this.feedID = feedID;
this.getLastDataIfNeeded = getLastDataIfNeeded;
}
@Override
public boolean equals(Object obj) {
if (obj == null || ! (obj instanceof FeedRequestContext)) { return false; }
return feedID.equals(FeedRequestContext.class.cast(obj).feedID);
}
@Override
public int hashCode() {
return feedID.hashCode();
}
@Override
public String toString() {
return ("feedID: " + feedID + ", lastDataRequired: " + getLastDataIfNeeded);
}
}
@SuppressWarnings("unchecked")
private Map<String, FeedRequestContext>[] mapFeedsToPartitions(Set<String> feedIDs, long startTime, long endTime, TimeUnit timeUnit) {
Map<String, FeedRequestContext>[] partitionFeeds = new Map[this.currentParition.getBufferEnv().getNumOfBufferPartitions()];
int startPartition = this.currentParition.getBufferEnv().getCurrentBufferPartition();
int i = startPartition;
do {
Map<String, FeedRequestContext> feedsForThisPartition = null;
for (Iterator<String> it = feedIDs.iterator(); it.hasNext(); ) {
String feedID = it.next();
if (metaDataBuffer.hasFeed(i, feedID)) {
feedsForThisPartition = partitionFeeds[i];
if (feedsForThisPartition == null) {
feedsForThisPartition = new HashMap<String, FeedRequestContext>();
partitionFeeds[i] = feedsForThisPartition;
}
FeedRequestContext frc = null;
if (metaDataBuffer.isFullyWithinTimeSpan(i, feedID, timeUnit, startTime)) {
frc = new FeedRequestContext(feedID, true);
it.remove();
} else {
frc = new FeedRequestContext(feedID, false);
}
feedsForThisPartition.put(feedID, frc);
}
}
i = this.currentParition.getBufferEnv().previousBufferPartition(i);
} while (i != startPartition);
return partitionFeeds;
}
@Override
public Map<String, SortedMap<Long, Map<String, String>>> getData(Set<String> feedIDs, long startTime, long endTime,
TimeUnit timeUnit) {
Set<String> requestFeedIDs = new HashSet<String>(feedIDs);
Map<String, FeedRequestContext>[] partitionFeeds = mapFeedsToPartitions(requestFeedIDs, startTime, endTime, timeUnit);
synchronized (movePartitionLock) {
if (reset) return Collections.emptyMap();
}
Map<String, SortedMap<Long, Map<String, String>>> aggregateData = new HashMap<String, SortedMap<Long, Map<String,String>>>();
for (int i=0; i< partitionFeeds.length; i++) {
Map<String, FeedRequestContext> partitionFeed = partitionFeeds[i];
if (partitionFeed != null) {
PartitionDataBuffer partitionBuffer = this.partitionDataBuffers[i].get();
Map<String, SortedMap<Long, Map<String, String>>> data = getData(partitionBuffer, partitionFeed, timeUnit, startTime, endTime);
for (Entry<String, SortedMap<Long, Map<String, String>>> entry: data.entrySet()) {
SortedMap<Long, Map<String, String>> cumulativeData = aggregateData.get(entry.getKey());
if (cumulativeData != null) {
cumulativeData.putAll(entry.getValue());
} else {
aggregateData.put(entry.getKey(), entry.getValue());
}
}
}
}
return aggregateData;
}
@Override
public Map<String, List<Map<String, String>>> getData(Set<String> feedIDs, TimeUnit timeUnit, long startTime,
long endTime) {
Set<String> requestFeedIDs = new HashSet<String>(feedIDs);
Map<String, FeedRequestContext>[] partitionFeeds = mapFeedsToPartitions(requestFeedIDs, startTime, endTime, timeUnit);
synchronized (movePartitionLock) {
if (reset) return Collections.emptyMap();
}
Map<String, List<Map<String, String>>> aggregateData = new HashMap<String, List<Map<String,String>>>();
for (int i=0; i<partitionFeeds.length; i++) {
Map<String, FeedRequestContext> partitionFeed = partitionFeeds[i];
if (partitionFeed != null) {
PartitionDataBuffer partitionBuffer = this.partitionDataBuffers[i].get();
Map<String, SortedMap<Long, Map<String, String>>> data = getData(partitionBuffer, partitionFeed, timeUnit, startTime, endTime);
for (Entry<String, SortedMap<Long, Map<String, String>>> entry: data.entrySet()) {
List<Map<String, String>> cumulativeData = aggregateData.get(entry.getKey());
if (cumulativeData != null) {
cumulativeData.addAll(0, entry.getValue().values());
} else {
aggregateData.put(entry.getKey(), new LinkedList<Map<String, String>>(entry.getValue().values()));
}
}
}
}
return aggregateData;
}
private Map<String, SortedMap<Long, Map<String, String>>> getData(PartitionDataBuffer partitionDataBuffer, Map<String, FeedRequestContext> feedRequestContexts, TimeUnit timeUnit,
long startTime, long endTime) {
synchronized (movePartitionLock) {
if (reset) return Collections.emptyMap();
while (moveParitionInProgress) {
try {
movePartitionLock.wait();
} catch (InterruptedException e) {
// ignore
}
}
readInProgress = true;
}
try {
final ElapsedTimer timer = new ElapsedTimer();
timer.startInterval();
Map<String, SortedMap<Long, Map<String, String>>> returnedData = partitionDataBuffer.getData(feedRequestContexts.keySet(), timeUnit, startTime, endTime);
PERF_READ_LOGGER.debug("Get Regular Data feeds: {} from partition: {}", returnedData, partitionDataBuffer.getBufferEnv().getCurrentBufferPartition());
for (Iterator<Entry<String, FeedRequestContext>> it = feedRequestContexts.entrySet().iterator(); it.hasNext(); ) {
Entry<String, FeedRequestContext> entry = it.next();
String feedID = entry.getKey();
SortedMap<Long, Map<String, String>> data = returnedData.get(feedID);
boolean needPrevPoint = true;
if (data != null && !data.isEmpty()) {
long firstPointTS = data.firstKey();
needPrevPoint = firstPointTS > TimeUnit.NANOSECONDS.convert(startTime, timeUnit);
}
if (!entry.getValue().getLastDataIfNeeded || !needPrevPoint) {
it.remove();
}
}
if (!feedRequestContexts.isEmpty()) {
Set<String> feedIDs = feedRequestContexts.keySet();
Map<String, SortedMap<Long, Map<String, String>>> lastData = partitionDataBuffer.getLastData(feedIDs,
timeUnit, 0, startTime);
for (Entry<String, SortedMap<Long, Map<String, String>>> entry : lastData.entrySet()) {
String feedID = entry.getKey();
SortedMap<Long, Map<String, String>> data = entry.getValue();
if (data != null && !data.isEmpty()) {
SortedMap<Long, Map<String, String>> feedData = returnedData.get(feedID);
if (feedData == null) {
feedData = new TreeMap<Long, Map<String, String>>();
returnedData.put(feedID, feedData);
}
Long ts = data.firstKey();
feedData.put(ts, data.get(ts));
}
}
PERF_READ_LOGGER.debug("Get Last Data feeds: {} from partition: {} ", returnedData, partitionDataBuffer.getBufferEnv().getCurrentBufferPartition());
}
timer.stopInterval();
LOGGER.debug("time to get Data for feeds {}: {}", feedRequestContexts, timer.getIntervalInMillis());
return returnedData;
} finally {
synchronized (movePartitionLock) {
readInProgress = false;
movePartitionLock.notifyAll();
}
}
}
@Override
public void putData(String feedID, TimeUnit timeUnit, Map<Long, Map<String, String>> entries) throws BufferFullException {
synchronized (movePartitionLock) {
if (reset) return;
}
final ElapsedTimer timer = new ElapsedTimer();
timer.startInterval();
Map<String, Map<Long, Map<String, String>>> feedDataToPut = new HashMap<String, Map<Long,Map<String,String>>>();
feedDataToPut.put(feedID, entries);
int i = this.currentParition.getBufferEnv().getCurrentBufferPartition();
int startPartition = i;
do {
PartitionDataBuffer partitionBuffer = this.partitionDataBuffers[i].get();
if (partitionBuffer == null || !partitionBuffer.isActive()) {
break;
}
LOGGER.debug("Putting in partition {}", i);
Map<String, PartitionTimestamps> timeStamps = putData(partitionBuffer, feedDataToPut, timeUnit);
if (timeStamps != null) {
metaDataBuffer.updatePartitionMetaData(partitionBuffer.getBufferEnv().getCurrentBufferPartition(), timeStamps);
}
i = (i + 1) % this.currentParition.getBufferEnv().getNumOfBufferPartitions();
} while (i != startPartition);
timer.stopInterval();
PERF_LOGGER.debug("Time to save data for feed {}: {}", feedID, timer.getIntervalInMillis());
}
private void putData(PartitionDataBuffer partitionBuffer, Map<String, Map<Long, Map<String, String>>> value, TimeUnit timeUnit, MetaDataBuffer metadata, int metadataIndex) throws BufferFullException {
synchronized (movePartitionLock) {
if (reset) return;
while (moveParitionInProgress) {
try {
movePartitionLock.wait();
} catch (InterruptedException e) {
// ignore
}
}
writeInProgress = true;
}
if (this.partitionDataBuffers[partitionBuffer.getBufferEnv().getCurrentBufferPartition()].get() == null) {
return;
}
try {
partitionBuffer.putData(value, timeUnit, metadata, metadataIndex);
} finally {
synchronized (movePartitionLock) {
writeInProgress = false;
movePartitionLock.notifyAll();
}
}
}
private Map<String, PartitionTimestamps> putData(PartitionDataBuffer partitionBuffer, Map<String, Map<Long, Map<String, String>>> value, TimeUnit timeUnit) throws BufferFullException {
synchronized (movePartitionLock) {
if (reset) return null;
while (moveParitionInProgress) {
try {
movePartitionLock.wait();
} catch (InterruptedException e) {
// ignore
}
}
writeInProgress = true;
}
if (this.partitionDataBuffers[partitionBuffer.getBufferEnv().getCurrentBufferPartition()].get() == null) {
return null;
}
try {
return partitionBuffer.putData(value, timeUnit);
} finally {
synchronized (movePartitionLock) {
writeInProgress = false;
movePartitionLock.notifyAll();
}
}
}
@Override
public void putData(Map<String, Map<Long, Map<String, String>>> value, TimeUnit timeUnit,
Runnable callback) throws BufferFullException {
PERF_WRITE_LOGGER.debug("COD Putting data for {} feeds", value);
synchronized (movePartitionLock) {
if (reset) return;
}
final ElapsedTimer timer = new ElapsedTimer();
timer.startInterval();
int i = this.currentParition.getBufferEnv().getCurrentBufferPartition();
int startPartition = i;
do {
PartitionDataBuffer partitionBuffer = this.partitionDataBuffers[i].get();
if (partitionBuffer == null || !partitionBuffer.isActive()) {
break;
}
LOGGER.debug("Putting in partition {}", i);
putData(partitionBuffer, value, timeUnit, metaDataBuffer, i);
timer.stopInterval();
PERF_LOGGER.debug("Time to save data for {} feeds: {}", value.size(), timer
.getIntervalInMillis());
i = (i + 1) % currentParition.getBufferEnv().getNumOfBufferPartitions();
} while (i != startPartition);
if (callback != null) {
callback.run();
}
}
@Override
public void putData(String feedID, TimeUnit timeUnit, long time, Map<String, String> value) throws BufferFullException {
synchronized (movePartitionLock) {
if (reset) return;
}
final ElapsedTimer timer = new ElapsedTimer();
timer.startInterval();
Map<Long, Map<String, String>> dataToPut = new HashMap<Long, Map<String, String>>();
dataToPut.put(Long.valueOf(time), value);
Map<String, Map<Long, Map<String, String>>> feedDataToPut = new HashMap<String, Map<Long,Map<String,String>>>();
feedDataToPut.put(feedID, dataToPut);
int i = this.currentParition.getBufferEnv().getCurrentBufferPartition();
int startPartition = i;
do {
PartitionDataBuffer partitionBuffer = this.partitionDataBuffers[i].get();
if (partitionBuffer == null || !partitionBuffer.isActive()) {
break;
}
LOGGER.debug("Putting in partition {}", i);
Map<String, PartitionTimestamps> timeStamps = putData(partitionBuffer, feedDataToPut, timeUnit);
if (timeStamps != null) {
metaDataBuffer.updatePartitionMetaData(partitionBuffer.getBufferEnv().getCurrentBufferPartition(), timeStamps);
}
i = (i + 1) % this.currentParition.getBufferEnv().getNumOfBufferPartitions();
} while (i != startPartition);
timer.stopInterval();
PERF_LOGGER.debug("Time to save data for feed {}: {}", feedID, timer.getIntervalInMillis());
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_internal_CODataBuffer.java |
51 | private final static class FeedRequestContext {
private final String feedID;
private final boolean getLastDataIfNeeded;
public FeedRequestContext(String feedID, boolean getLastDataIfNeeded) {
this.feedID = feedID;
this.getLastDataIfNeeded = getLastDataIfNeeded;
}
@Override
public boolean equals(Object obj) {
if (obj == null || ! (obj instanceof FeedRequestContext)) { return false; }
return feedID.equals(FeedRequestContext.class.cast(obj).feedID);
}
@Override
public int hashCode() {
return feedID.hashCode();
}
@Override
public String toString() {
return ("feedID: " + feedID + ", lastDataRequired: " + getLastDataIfNeeded);
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_internal_CODataBuffer.java |
52 | public class CODataBufferTest {
private DataBuffer codataBuffer;
private String testFeedID1 = "TestPui1";
private String testFeedID2 = "TestPui2";
private File bufferLocation;
@BeforeMethod
public void setup() throws IOException {
Properties prop = new Properties();
prop.load(ClassLoader.getSystemResourceAsStream("properties/testFeed.properties"));
prop.put("buffer.partitions", "2");
prop.put("buffer.time.millis", "-1");
bufferLocation = File.createTempFile("mct-buffer", "");
bufferLocation.delete();
bufferLocation.mkdir();
prop.put("buffer.disk.loc", bufferLocation.toString());
DataBufferFactory.reset();
codataBuffer = DataBufferFactory.getFastDiskDataBuffer(prop);
if (codataBuffer.isDataBufferClose()) {
codataBuffer.reset();
}
}
@AfterMethod
public void reset() {
if (codataBuffer != null) {
codataBuffer.closeBuffer();
}
DataBufferFactory.reset();
delete(bufferLocation);
}
private void delete(File f) {
if (f.isDirectory()) {
for (File f2 : f.listFiles()) {
delete(f2);
}
}
f.delete();
}
@Test
public void CODReadTest() throws Exception {
int currentPartition = getCurrentBufferPartition(codataBuffer);
Assert.assertEquals(currentPartition, 0);
Map<String, String> value = new HashMap<String, String>();
value.put("value", "1.3");
value.put("status", "ok");
long time = System.currentTimeMillis();
long nanotime = TimeUnit.NANOSECONDS.convert(time, TimeUnit.MILLISECONDS);
codataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time, value);
codataBuffer.prepareForNextPartition();
currentPartition = getCurrentBufferPartition(codataBuffer);
Assert.assertEquals(currentPartition, 0);
codataBuffer.moveToNextPartition();
currentPartition = getCurrentBufferPartition(codataBuffer);
Assert.assertEquals(currentPartition, 1);
List<Map<String, String>> returnData = codataBuffer.getData(Collections.singleton(testFeedID1), TimeUnit.NANOSECONDS, nanotime+1000,
nanotime + 2000).get(testFeedID1);
Assert.assertEquals(returnData.size(), 1);
Map<String, String> returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value);
assertHasSameValue(returnValue, value);
}
@Test
public void prevPointTest() throws Exception {
int currentPartition = getCurrentBufferPartition(codataBuffer);
Assert.assertEquals(currentPartition, 0);
Map<String, String> value = new HashMap<String, String>();
value.put("value", "1.3");
value.put("status", "ok");
long time = System.currentTimeMillis();
long nanotime = TimeUnit.NANOSECONDS.convert(time, TimeUnit.MILLISECONDS);
codataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time, value);
Assert.assertEquals(getCurrentBufferPartition(codataBuffer), 0);
Map<String, String> value2 = new HashMap<String, String>();
value2.put("value", "1.4");
value2.put("status", "ok");
codataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time + 1, value2);
Assert.assertEquals(getCurrentBufferPartition(codataBuffer), 0);
List<Map<String, String>> returnData = codataBuffer.getData(Collections.singleton(testFeedID1), TimeUnit.NANOSECONDS, nanotime + 2000000,
nanotime + 3000000).get(testFeedID1);
Assert.assertEquals(returnData.size(), 1);
Map<String, String> returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value2);
assertHasSameValue(returnValue, value2);
}
@Test
public void noNextPointTest() throws Exception {
int currentPartition = getCurrentBufferPartition(codataBuffer);
Assert.assertEquals(currentPartition, 0);
Map<String, String> value = new HashMap<String, String>();
value.put("value", "1.3");
value.put("status", "ok");
long time = System.currentTimeMillis();
long nanotime = TimeUnit.NANOSECONDS.convert(time, TimeUnit.MILLISECONDS);
codataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time, value);
List<Map<String, String>> returnData = codataBuffer.getData(Collections.singleton(testFeedID1), TimeUnit.NANOSECONDS, nanotime-2000,
nanotime - 1000).get(testFeedID1);
Assert.assertNull(returnData);
}
@Test
public void putDataTimeRangeTest() throws Exception {
long time = System.currentTimeMillis();
Map<String, String> value = new HashMap<String, String>();
value.put("value", "1.3");
value.put("status", "ok");
Map<Long, Map<String, String>> feedData1 = new HashMap<Long, Map<String, String>>();
feedData1.put(time, value);
Map<String, String> value2 = new HashMap<String, String>();
value2.put("value", "1.4");
value2.put("status", "ok");
Map<Long, Map<String, String>> feedData2 = new HashMap<Long, Map<String, String>>();
feedData2.put(time + 100, value2);
Map<String, Map<Long, Map<String, String>>> data = new HashMap<String, Map<Long, Map<String, String>>>();
data.put(testFeedID1, feedData1);
data.put(testFeedID2, feedData2);
codataBuffer.putData(data, TimeUnit.MILLISECONDS, null);
long nanotime = TimeUnit.NANOSECONDS.convert(time, TimeUnit.MILLISECONDS);
long nanotime2 = TimeUnit.NANOSECONDS.convert(time + 100, TimeUnit.MILLISECONDS);
Assert.assertEquals(codataBuffer.metaDataBuffer.getStartTimestamp(0, testFeedID1), nanotime);
Assert.assertEquals(codataBuffer.metaDataBuffer.getEndTimestamp(0, testFeedID1), nanotime);
Assert.assertEquals(codataBuffer.metaDataBuffer.getStartTimestamp(0, testFeedID2), nanotime2);
Assert.assertEquals(codataBuffer.metaDataBuffer.getEndTimestamp(0, testFeedID2), nanotime2);
}
private void assertHasSameValue(Map<String, String> actualValue, Map<String, String> expectedValue) {
Assert.assertEquals(actualValue.size(), expectedValue.size());
for (String key : actualValue.keySet()) {
Assert.assertEquals(actualValue.get(key), expectedValue.get(key));
}
}
private int getCurrentBufferPartition(DataBuffer dataBuffer) throws Exception {
Field f = DataBuffer.class.getDeclaredField("currentParition");
f.setAccessible(true);
PartitionDataBuffer currentPartitionBuffer = (PartitionDataBuffer) f.get(dataBuffer);
return currentPartitionBuffer.getBufferEnv().getCurrentBufferPartition();
}
} | false | timeSequenceFeedAggregator_src_test_java_gov_nasa_arc_mct_buffer_internal_CODataBufferTest.java |
53 | public class DataBuffer implements DataArchive, DataProvider {
private static final Logger LOGGER = LoggerFactory.getLogger(DataBuffer.class);
private static final Logger PERF_LOGGER = LoggerFactory
.getLogger("gov.nasa.arc.mct.performance.buffer");
protected final AtomicReference<PartitionDataBuffer>[] partitionDataBuffers;
protected MetaDataBuffer metaDataBuffer;
protected volatile PartitionDataBuffer currentParition;
protected DataBufferEvictor evictor;
protected final Object movePartitionLock = new Object();
protected final Object resetLock = new Object();
protected boolean readInProgress = false;
protected boolean writeInProgress = false;
protected boolean moveParitionInProgress = false;
protected volatile boolean reset = false;
protected volatile boolean prepareNewPartitionInProgress = false;
protected final DataBufferHelper dataBufferHelper;
@SuppressWarnings("unchecked")
DataBuffer(DataBufferEnv env, DataBufferHelper partitionBufferFactory) {
this.dataBufferHelper = partitionBufferFactory;
if (env == null) {
metaDataBuffer = partitionBufferFactory.newMetaDataBuffer(null);
} else {
metaDataBuffer = partitionBufferFactory.newMetaDataBuffer(partitionBufferFactory.newMetaDataBufferEnv(env.getConfigProperties()));
}
this.partitionDataBuffers = new AtomicReference[metaDataBuffer.getNumOfPartitions()];
setupPartitionBuffers(env, partitionBufferFactory);
startEvictor();
}
protected void setupPartitionBuffers(DataBufferEnv env, DataBufferHelper partitionBufferFactory) {
PartitionDataBuffer partitionBuffer;
if (env == null) {
partitionBuffer = partitionBufferFactory.newPartitionBuffer(metaDataBuffer.getCurrentPartition());
} else {
partitionBuffer = partitionBufferFactory.newPartitionBuffer(env);
}
this.currentParition = partitionBuffer;
DataBufferEnv currentEnv = currentParition.getBufferEnv();
for (int i=0; i<partitionDataBuffers.length; i++) {
this.partitionDataBuffers[i] = new AtomicReference<PartitionDataBuffer>();
}
this.partitionDataBuffers[currentEnv.getCurrentBufferPartition()].set(currentParition);
}
private void startEvictor() {
DataBufferEnv currentEnv = currentParition.getBufferEnv();
if (currentEnv.getNumOfBufferPartitions() > 1 && currentEnv.getBufferTime() != -1) {
this.evictor = new DataBufferEvictor(this, currentEnv.getBufferTime()
- currentEnv.getBufferPartitionOverlap(), currentEnv.getBufferPartitionOverlap());
evictor.schedule();
}
}
@Override
public boolean isFullyWithinTimeSpan(String feedID, long startTime, TimeUnit timeUnit) {
int startPartition = this.currentParition.getBufferEnv().getCurrentBufferPartition();
int i = startPartition;
do {
if (metaDataBuffer.isFullyWithinTimeSpan(i, feedID, timeUnit, startTime)) {
return true;
}
i = this.currentParition.getBufferEnv().previousBufferPartition(i);
} while (i != startPartition);
return false;
}
/**
* Returns true if the entire request can be satisfied for all feeds.
* @param partition partition index
* @param feedIDs feed IDs
* @param startTime start time
* @param timeUnit unit of time for startTime
* @return
*/
private boolean isFullyWithinTimeSpan(int partition, Set<String> feedIDs, long startTime, TimeUnit timeUnit) {
for (String feedID : feedIDs) {
if(!metaDataBuffer.isFullyWithinTimeSpan(partition, feedID, timeUnit, startTime)) {
return false;
}
}
return true;
}
private boolean isWithinTimeSpan(int partition, Set<String> feedIDs, TimeUnit timeUnit, long startTime, long endTime) {
for (String feedID : feedIDs) {
if(metaDataBuffer.isWithinTimeSpan(partition, feedID, timeUnit, startTime, endTime)) {
return true;
}
}
return false;
}
@Override
public Map<String, SortedMap<Long, Map<String, String>>> getData(Set<String> feedIDs, long startTime, long endTime,
TimeUnit timeUnit) {
synchronized (movePartitionLock) {
if (reset) return Collections.emptyMap();
}
Map<String, SortedMap<Long, Map<String, String>>> aggregateData = new HashMap<String, SortedMap<Long, Map<String,String>>>();
int startPartition = this.currentParition.getBufferEnv().getCurrentBufferPartition();
int i = startPartition;
do {
PartitionDataBuffer partitionBuffer = this.partitionDataBuffers[i].get();
if (partitionBuffer != null && isWithinTimeSpan(i, feedIDs, timeUnit, startTime, endTime)) {
Map<String, SortedMap<Long, Map<String, String>>> data = getData(partitionBuffer, feedIDs, timeUnit, startTime, endTime);
for (Entry<String, SortedMap<Long, Map<String, String>>> entry: data.entrySet()) {
SortedMap<Long, Map<String, String>> cumulativeData = aggregateData.get(entry.getKey());
if (cumulativeData != null) {
cumulativeData.putAll(entry.getValue());
} else {
aggregateData.put(entry.getKey(), entry.getValue());
}
}
if (isFullyWithinTimeSpan(i, feedIDs, startTime, timeUnit)) {
break;
}
}
i = this.currentParition.getBufferEnv().previousBufferPartition(i);
} while (i != startPartition);
return aggregateData;
}
@Override
public Map<String, List<Map<String, String>>> getData(Set<String> feedIDs, TimeUnit timeUnit, long startTime,
long endTime) {
synchronized (movePartitionLock) {
if (reset) return Collections.emptyMap();
}
Map<String, List<Map<String, String>>> aggregateData = new HashMap<String, List<Map<String,String>>>();
int startPartition = this.currentParition.getBufferEnv().getCurrentBufferPartition();
int i = startPartition;
do {
PartitionDataBuffer partitionBuffer = this.partitionDataBuffers[i].get();
if (partitionBuffer != null && isWithinTimeSpan(i, feedIDs, timeUnit, startTime, endTime)) {
Map<String, SortedMap<Long, Map<String, String>>> data = getData(partitionBuffer, feedIDs, timeUnit, startTime, endTime);
for (Entry<String, SortedMap<Long, Map<String, String>>> entry: data.entrySet()) {
List<Map<String, String>> cumulativeData = aggregateData.get(entry.getKey());
if (cumulativeData != null) {
cumulativeData.addAll(0, entry.getValue().values());
} else {
aggregateData.put(entry.getKey(), new LinkedList<Map<String, String>>(entry.getValue().values()));
}
}
if (isFullyWithinTimeSpan(i, feedIDs, startTime, timeUnit)) {
break;
}
}
i = this.currentParition.getBufferEnv().previousBufferPartition(i);
} while (i != startPartition);
return aggregateData;
}
private Map<String, SortedMap<Long, Map<String, String>>> getData(PartitionDataBuffer partitionDataBuffer, Set<String> feedIDs, TimeUnit timeUnit,
long startTime, long endTime) {
synchronized (movePartitionLock) {
if (reset) return Collections.emptyMap();
while (moveParitionInProgress) {
try {
movePartitionLock.wait();
} catch (InterruptedException e) {
// ignore
}
}
readInProgress = true;
}
Set<String> searchFeedIDS = new HashSet<String>(feedIDs);
try {
final ElapsedTimer timer = new ElapsedTimer();
timer.startInterval();
Map<String, SortedMap<Long, Map<String, String>>> returnedData = partitionDataBuffer.getData(searchFeedIDS, timeUnit, startTime, endTime);
timer.stopInterval();
LOGGER.debug("time to get Data for feeds {}: {}", feedIDs, timer.getIntervalInMillis());
return returnedData;
} finally {
synchronized (movePartitionLock) {
readInProgress = false;
movePartitionLock.notifyAll();
}
}
}
@Override
public void putData(String feedID, TimeUnit timeUnit, Map<Long, Map<String, String>> entries) throws BufferFullException {
synchronized (movePartitionLock) {
if (reset) return;
}
final ElapsedTimer timer = new ElapsedTimer();
timer.startInterval();
Map<String, Map<Long, Map<String, String>>> feedDataToPut = new HashMap<String, Map<Long,Map<String,String>>>();
feedDataToPut.put(feedID, entries);
int i = this.currentParition.getBufferEnv().getCurrentBufferPartition();
int startPartition = i;
do {
PartitionDataBuffer partitionBuffer = this.partitionDataBuffers[i].get();
if (partitionBuffer == null || !partitionBuffer.isActive()) {
break;
}
LOGGER.debug("Putting in partition {}", i);
Map<String, PartitionTimestamps> timeStamps = putData(partitionBuffer, feedDataToPut, timeUnit);
if (timeStamps != null) {
metaDataBuffer.updatePartitionMetaData(partitionBuffer.getBufferEnv().getCurrentBufferPartition(), timeStamps);
}
i = (i + 1) % this.currentParition.getBufferEnv().getNumOfBufferPartitions();
} while (i != startPartition);
timer.stopInterval();
PERF_LOGGER.debug("Time to save data for feed {}: {}", feedID, timer.getIntervalInMillis());
}
private Map<String, PartitionTimestamps> putData(PartitionDataBuffer partitionBuffer, Map<String, Map<Long, Map<String, String>>> value, TimeUnit timeUnit) throws BufferFullException {
synchronized (movePartitionLock) {
if (reset) return null;
while (moveParitionInProgress) {
try {
movePartitionLock.wait();
} catch (InterruptedException e) {
// ignore
}
}
writeInProgress = true;
}
if (this.partitionDataBuffers[partitionBuffer.getBufferEnv().getCurrentBufferPartition()].get() == null) {
return null;
}
try {
return partitionBuffer.putData(value, timeUnit);
} finally {
synchronized (movePartitionLock) {
writeInProgress = false;
movePartitionLock.notifyAll();
}
}
}
@Override
public void putData(Map<String, Map<Long, Map<String, String>>> value, TimeUnit timeUnit,
Runnable callback) throws BufferFullException {
PERF_LOGGER.debug("Putting data for {} feeds", value.size());
synchronized (movePartitionLock) {
if (reset) return;
}
final ElapsedTimer timer = new ElapsedTimer();
timer.startInterval();
int i = this.currentParition.getBufferEnv().getCurrentBufferPartition();
int startPartition = i;
do {
PartitionDataBuffer partitionBuffer = this.partitionDataBuffers[i].get();
if (partitionBuffer == null || !partitionBuffer.isActive()) {
break;
}
LOGGER.debug("Putting in partition {}", i);
Map<String, PartitionTimestamps> timeStamps = putData(partitionBuffer, value, timeUnit);
if (timeStamps != null) {
metaDataBuffer.updatePartitionMetaData(partitionBuffer.getBufferEnv().getCurrentBufferPartition(), timeStamps);
}
timer.stopInterval();
PERF_LOGGER.debug("Time to save data for {} feeds: {}", value.size(), timer
.getIntervalInMillis());
i = (i + 1) % currentParition.getBufferEnv().getNumOfBufferPartitions();
} while (i != startPartition);
if (callback != null) {
callback.run();
}
}
@Override
public void putData(String feedID, TimeUnit timeUnit, long time, Map<String, String> value) throws BufferFullException {
synchronized (movePartitionLock) {
if (reset) return;
}
final ElapsedTimer timer = new ElapsedTimer();
timer.startInterval();
Map<Long, Map<String, String>> dataToPut = new HashMap<Long, Map<String, String>>();
dataToPut.put(Long.valueOf(time), value);
Map<String, Map<Long, Map<String, String>>> feedDataToPut = new HashMap<String, Map<Long,Map<String,String>>>();
feedDataToPut.put(feedID, dataToPut);
int i = this.currentParition.getBufferEnv().getCurrentBufferPartition();
int startPartition = i;
do {
PartitionDataBuffer partitionBuffer = this.partitionDataBuffers[i].get();
if (partitionBuffer == null || !partitionBuffer.isActive()) {
break;
}
LOGGER.debug("Putting in partition {}", i);
Map<String, PartitionTimestamps> timeStamps = putData(partitionBuffer, feedDataToPut, timeUnit);
if (timeStamps != null) {
metaDataBuffer.updatePartitionMetaData(partitionBuffer.getBufferEnv().getCurrentBufferPartition(), timeStamps);
}
i = (i + 1) % this.currentParition.getBufferEnv().getNumOfBufferPartitions();
} while (i != startPartition);
timer.stopInterval();
PERF_LOGGER.debug("Time to save data for feed {}: {}", feedID, timer.getIntervalInMillis());
}
public int getConcurrencyDegree() {
return this.currentParition.getBufferEnv().getConcurrencyDegree();
}
public int getBufferWriteThreadPoolSize() {
return this.currentParition.getBufferEnv().getBufferWriteThreadPoolSize();
}
@Override
public void reset() {
synchronized (movePartitionLock) {
while (moveParitionInProgress || writeInProgress || readInProgress) {
try {
movePartitionLock.wait();
} catch (InterruptedException e) {
// ignore
}
}
reset = true;
}
synchronized(resetLock) {
while (prepareNewPartitionInProgress) {
try {
resetLock.wait();
} catch (InterruptedException e) {
// ignore
}
}
}
try {
if (evictor != null) {
evictor.cancel();
}
DataBufferEnv env = currentParition.getBufferEnv();
for (int j = 0; j < this.partitionDataBuffers.length; j++) {
if (partitionDataBuffers[j].get() != null) {
partitionDataBuffers[j].get().removeBuffer();
partitionDataBuffers[j].set(null);
}
}
if (this.metaDataBuffer != null) {
this.metaDataBuffer.restart();
}
DataBufferEnv currentEnv = (DataBufferEnv)env.clone();
assert currentEnv != null : "Current DataBufferEnv should not be null.";
assert dataBufferHelper != null : "DataBufferHelper should not be null.";
PartitionDataBuffer partitionBuffer = dataBufferHelper.newPartitionBuffer(currentEnv);
this.currentParition = partitionBuffer;
this.partitionDataBuffers[currentEnv.getCurrentBufferPartition()].set(currentParition);
startEvictor();
} finally {
synchronized(movePartitionLock) {
reset = false;
}
}
}
public void closeBuffer() {
if (evictor != null) {
evictor.cancel();
evictor = null;
}
for (int j = 0; j < this.partitionDataBuffers.length; j++) {
if (partitionDataBuffers[j].get() != null) {
partitionDataBuffers[j].get().closeBuffer();
partitionDataBuffers[j].set(null);
}
}
if (this.metaDataBuffer != null) {
this.metaDataBuffer.close();
}
}
private synchronized void closeBuffer(PartitionDataBuffer partitionBuffer) {
partitionBuffer.removeBuffer();
}
public boolean isDataBufferClose() {
return this.currentParition.isClosed();
}
public boolean isAllDataBuffersClose() {
for (int i=0; i<this.partitionDataBuffers.length; i++) {
PartitionDataBuffer partitionBuffer = partitionDataBuffers[i].get();
if (partitionBuffer != null && !partitionBuffer.isClosed()) {
return false;
}
}
return true;
}
public void prepareForNextPartition() {
synchronized(resetLock) {
if (reset) { return; }
prepareNewPartitionInProgress = true;
}
try {
int newBufferPartition = this.currentParition.getBufferEnv().nextBufferPartition();
PartitionDataBuffer toBeClosedBuffer = this.partitionDataBuffers[newBufferPartition].get();
Map<String, SortedMap<Long, Map<String, String>>> rowOverData = null;
if (toBeClosedBuffer != null) {
Set<String> rowOverFeedIDs = metaDataBuffer.resetPartitionMetaData(newBufferPartition);
if (!rowOverFeedIDs.isEmpty()) {
rowOverData = toBeClosedBuffer.getLastData(rowOverFeedIDs, TimeUnit.NANOSECONDS, 0, Long.MAX_VALUE);
}
closeBuffer(toBeClosedBuffer);
}
DataBufferEnv newBufferEnv = this.currentParition.getBufferEnv().advanceBufferPartition();
PartitionDataBuffer newPartitionBuffer = dataBufferHelper.newPartitionBuffer(newBufferEnv);
if (rowOverData != null) {
Map<String, Map<Long, Map<String, String>>> data = new HashMap<String, Map<Long,Map<String,String>>>();
for (Entry<String, SortedMap<Long, Map<String, String>>> entry: rowOverData.entrySet()) {
Map<Long, Map<String, String>> feedData = new HashMap<Long, Map<String,String>>(entry.getValue());
data.put(entry.getKey(), feedData);
}
try {
Map<String, PartitionTimestamps> timeStamps = putData(newPartitionBuffer, data, TimeUnit.NANOSECONDS);
if (timeStamps != null) {
metaDataBuffer.updatePartitionMetaData(newBufferPartition, timeStamps);
}
} catch (BufferFullException e) {
LOGGER.error("Buffer full during prepareForNextPartition", e);
}
}
this.partitionDataBuffers[newBufferEnv.getCurrentBufferPartition()].set(newPartitionBuffer);
} finally {
synchronized(resetLock) {
prepareNewPartitionInProgress = false;
resetLock.notifyAll();
}
}
}
public void moveToNextPartition() {
int nextBufferPartition = this.currentParition.getBufferEnv().nextBufferPartition();
int currentBufferPartition = this.currentParition.getBufferEnv().getCurrentBufferPartition();
PartitionDataBuffer toBeInActiveBuffer = this.partitionDataBuffers[currentBufferPartition].get();
metaDataBuffer.writeCurrentBufferPartition(nextBufferPartition);
synchronized (movePartitionLock) {
if (reset) { return; }
while (readInProgress || writeInProgress) {
try {
movePartitionLock.wait();
} catch (InterruptedException e) {
// ignore
}
}
moveParitionInProgress = true;
}
try {
this.currentParition = this.partitionDataBuffers[nextBufferPartition].get();
} finally {
synchronized (movePartitionLock) {
moveParitionInProgress = false;
movePartitionLock.notifyAll();
}
}
metaDataBuffer.writePartitionMetaData(currentBufferPartition);
if (toBeInActiveBuffer != null) {
toBeInActiveBuffer.getBufferEnv().flush();
toBeInActiveBuffer.inactive();
} else {
LOGGER.warn("PartitionDataBuffer object should not be null!");
LOGGER.warn("currentBufferPartition={}, nextBufferPartition={}", currentBufferPartition, nextBufferPartition);
}
}
@Override
public LOS getLOS() {
return this.currentParition.getBufferEnv().getLOS();
}
DataBufferEvictor getEvictor() {
return this.evictor;
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_internal_DataBuffer.java |
54 | final class DataBufferEvictor extends Timer {
private static final Logger logger = LoggerFactory.getLogger(DataBufferEvictor.class);
private final long evictMillis;
private final long switchMillis;
private final DataBuffer dataBuffer;
DataBufferEvictor(DataBuffer dataBuffer, long evictMillis, long switchMillis) {
super("DataBuffer Evictor", true);
this.evictMillis = evictMillis;
this.switchMillis = switchMillis;
this.dataBuffer = dataBuffer;
}
void schedule() {
super.schedule(newPrepareTask(), evictMillis);
}
private TimerTask newPrepareTask() {
return new TimerTask() {
@Override
public void run() {
try {
dataBuffer.prepareForNextPartition();
schedule(newMoveTask(), switchMillis);
} catch(Exception e) {
logger.error(e.toString(), e);
schedule(newPrepareTask(), evictMillis);
}
}
};
}
private TimerTask newMoveTask() {
return new TimerTask() {
@Override
public void run() {
try {
dataBuffer.moveToNextPartition();
schedule(newPrepareTask(), evictMillis);
} catch(Exception e) {
logger.error(e.toString(), e);
schedule(newMoveTask(), switchMillis);
}
}
};
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_internal_DataBufferEvictor.java |
55 | return new TimerTask() {
@Override
public void run() {
try {
dataBuffer.prepareForNextPartition();
schedule(newMoveTask(), switchMillis);
} catch(Exception e) {
logger.error(e.toString(), e);
schedule(newPrepareTask(), evictMillis);
}
}
}; | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_internal_DataBufferEvictor.java |
56 | return new TimerTask() {
@Override
public void run() {
try {
dataBuffer.moveToNextPartition();
schedule(newPrepareTask(), evictMillis);
} catch(Exception e) {
logger.error(e.toString(), e);
schedule(newMoveTask(), switchMillis);
}
}
}; | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_internal_DataBufferEvictor.java |
57 | public class DataBufferEvictorTest {
private DataBuffer dataBuffer;
private File bufferLocation;
@BeforeMethod
public void setup() throws IOException {
DataBufferFactory.reset();
Properties prop = new Properties();
prop.load(ClassLoader.getSystemResourceAsStream("properties/testFeed.properties"));
prop.put("buffer.partitions", "3");
prop.put("buffer.time.millis", "12000");
prop.put("buffer.partition.overlap.millis", "1000");
bufferLocation = File.createTempFile("mct-buffer", "");
bufferLocation.delete();
bufferLocation.mkdir();
prop.put("buffer.disk.loc", bufferLocation.toString());
dataBuffer = DataBufferFactory.getFastDiskDataBuffer(prop);
if (dataBuffer.isDataBufferClose()) {
dataBuffer.reset();
}
}
@AfterMethod
public void reset() {
if (dataBuffer != null) {
dataBuffer.closeBuffer();
}
DataBufferFactory.reset();
delete(bufferLocation);
}
private void delete(File f) {
if (f.isDirectory()) {
for (File f2 : f.listFiles()) {
delete(f2);
}
}
f.delete();
}
@Test
public void switchPartitionsTest() throws Exception {
int currentPartition = getCurrentBufferPartition();
Assert.assertEquals(currentPartition, 0);
Thread.sleep(5000);
currentPartition = getCurrentBufferPartition();
Assert.assertEquals(currentPartition, 1);
Thread.sleep(5000);
currentPartition = getCurrentBufferPartition();
Assert.assertEquals(currentPartition, 2);
Thread.sleep(5000);
currentPartition = getCurrentBufferPartition();
Assert.assertEquals(currentPartition, 0);
}
@Test
public void testExceptions() {
Properties prop = new Properties();
prop.put("memory.buffer.partition", "2");
prop.put("memory.buffer.time.millis", "100");
final CountDownLatch latch = new CountDownLatch(2);
DataBufferEnv env = new MemoryBufferEnv(prop);
DataBufferHelper partitionBufferFactory = new MemoryDataBufferHelper();
DataBuffer mockBuffer = new DataBuffer(env, partitionBufferFactory) {
int prepareCount;
int moveCount;
@Override
public void prepareForNextPartition() {
prepareCount++;
if (prepareCount == 1) {
throw new RuntimeException("This exception is normal.");
} else if (prepareCount == 2) {
latch.countDown();
}
}
@Override
public void moveToNextPartition() {
moveCount++;
if (moveCount == 1) {
throw new RuntimeException("This exception is normal.");
} else if (moveCount == 2) {
latch.countDown();
}
}
};
try {
latch.await(1, TimeUnit.SECONDS);
} catch (InterruptedException e) {
Assert.fail("Evictor failed");
}
mockBuffer.closeBuffer();
}
private int getCurrentBufferPartition() throws Exception {
Field f = DataBuffer.class.getDeclaredField("currentParition");
f.setAccessible(true);
PartitionFastDiskBuffer currentPartitionBuffer = (PartitionFastDiskBuffer)f.get(dataBuffer);
return currentPartitionBuffer.getBufferEnv().getCurrentBufferPartition();
}
} | false | timeSequenceFeedAggregator_src_test_java_gov_nasa_arc_mct_buffer_internal_DataBufferEvictorTest.java |
58 | DataBuffer mockBuffer = new DataBuffer(env, partitionBufferFactory) {
int prepareCount;
int moveCount;
@Override
public void prepareForNextPartition() {
prepareCount++;
if (prepareCount == 1) {
throw new RuntimeException("This exception is normal.");
} else if (prepareCount == 2) {
latch.countDown();
}
}
@Override
public void moveToNextPartition() {
moveCount++;
if (moveCount == 1) {
throw new RuntimeException("This exception is normal.");
} else if (moveCount == 2) {
latch.countDown();
}
}
}; | false | timeSequenceFeedAggregator_src_test_java_gov_nasa_arc_mct_buffer_internal_DataBufferEvictorTest.java |
59 | public class DataBufferFactory {
private final static AtomicBoolean fastDiskBufferInitializeToken = new AtomicBoolean(false);
private final static AtomicBoolean memoryBufferInitializeToken = new AtomicBoolean(false);
private static volatile DataBuffer fastDiskDataBuffer;
private static volatile DataBuffer memoryDataBuffer;
private final static DataBufferHelper fastDiskBufferHelper = new FastDiskDataBufferHelper();
private final static DataBufferHelper memoryBufferHelper = new MemoryDataBufferHelper();
private DataBufferFactory() {
//
}
public static DataBuffer getMemoryDataBuffer(Properties prop) {
if (!memoryBufferInitializeToken.get()) {
synchronized(DataBufferFactory.class) {
if (memoryDataBuffer == null) {
memoryDataBuffer = new CODataBuffer(new MemoryBufferEnv(prop), memoryBufferHelper);
}
}
memoryBufferInitializeToken.compareAndSet(false, true);
}
return memoryDataBuffer;
}
public static DataBuffer getFastDiskDataBuffer(Properties prop) {
if (!fastDiskBufferInitializeToken.get()) {
synchronized(DataBufferFactory.class) {
if (fastDiskDataBuffer == null) {
fastDiskDataBuffer = new CODataBuffer(new FastDiskBufferEnv(prop), fastDiskBufferHelper);
}
}
fastDiskBufferInitializeToken.compareAndSet(false, true);
}
return fastDiskDataBuffer;
}
static void reset() {
fastDiskDataBuffer = null;
memoryDataBuffer = null;
fastDiskBufferInitializeToken.set(false);
memoryBufferInitializeToken.set(false);
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_internal_DataBufferFactory.java |
60 | public interface DataBufferHelper {
public PartitionDataBuffer newPartitionBuffer(int partitionNo);
public PartitionDataBuffer newPartitionBuffer(DataBufferEnv env);
public MetaDataBuffer newMetaDataBuffer(DataBufferEnv env);
public DataBufferEnv newMetaDataBufferEnv(Properties prop);
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_internal_DataBufferHelper.java |
61 | public class DataBufferResetTest {
private DataBuffer dataBuffer;
private String testFeedID1 = "TestPui1";
private File bufferLocation;
@BeforeMethod
public void setup() throws IOException {
Properties prop = new Properties();
prop.load(ClassLoader.getSystemResourceAsStream("properties/testFeed.properties"));
prop.put("buffer.partitions", "1");
prop.put("buffer.time.millis", "-1");
bufferLocation = File.createTempFile("mct-buffer", "");
bufferLocation.delete();
bufferLocation.mkdir();
prop.put("buffer.disk.loc", bufferLocation.toString());
dataBuffer = DataBufferFactory.getFastDiskDataBuffer(prop);
if (dataBuffer.isDataBufferClose()) {
dataBuffer.reset();
}
}
@AfterMethod
public void reset() {
if (dataBuffer != null) {
dataBuffer.closeBuffer();
}
DataBufferFactory.reset();
delete(bufferLocation);
}
private void delete(File f) {
if (f.isDirectory()) {
for (File f2 : f.listFiles()) {
delete(f2);
}
}
f.delete();
}
@Test
public void resetTest() throws BufferFullException {
Map<String, String> value = new HashMap<String, String>();
value.put("value", "1.3");
value.put("status", "ok");
long time = System.currentTimeMillis();
long nanotime = TimeUnit.NANOSECONDS.convert(time, TimeUnit.MILLISECONDS);
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time, value);
List<Map<String, String>> returnData = dataBuffer.getData(Collections.singleton(testFeedID1), TimeUnit.NANOSECONDS, nanotime,
nanotime + 100).get(testFeedID1);
Assert.assertEquals(returnData.size(), 1);
Map<String, String> returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value);
assertHasSameValue(returnValue, value);
dataBuffer.reset();
returnData = dataBuffer.getData(Collections.singleton(testFeedID1), TimeUnit.NANOSECONDS, nanotime,
nanotime + 100).get(testFeedID1);
Assert.assertNull(returnData);
value = new HashMap<String, String>();
value.put("value", "1.4");
value.put("status", "true");
time = System.currentTimeMillis();
nanotime = TimeUnit.NANOSECONDS.convert(time, TimeUnit.MILLISECONDS);
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time, value);
returnData = dataBuffer.getData(Collections.singleton(testFeedID1), TimeUnit.NANOSECONDS, nanotime,
nanotime + 100).get(testFeedID1);
Assert.assertEquals(returnData.size(), 1);
returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value);
assertHasSameValue(returnValue, value);
}
private void assertHasSameValue(Map<String, String> actualValue, Map<String, String> expectedValue) {
Assert.assertEquals(actualValue.size(), expectedValue.size());
for (String key : actualValue.keySet()) {
Assert.assertEquals(actualValue.get(key), expectedValue.get(key));
}
}
} | false | timeSequenceFeedAggregator_src_test_java_gov_nasa_arc_mct_buffer_internal_DataBufferResetTest.java |
62 | public class DataBufferTest {
private DataBuffer dataBuffer;
private String testFeedID1 = "TestPui1";
private String testFeedID2 = "TestPui2";
private File bufferLocation;
@BeforeMethod
public void setup() throws IOException {
Properties prop = new Properties();
prop.load(ClassLoader.getSystemResourceAsStream("properties/testFeed.properties"));
prop.put("buffer.partitions", "1");
prop.put("buffer.time.millis", "-1");
bufferLocation = File.createTempFile("mct-buffer", "");
bufferLocation.delete();
bufferLocation.mkdir();
prop.put("buffer.disk.loc", bufferLocation.toString());
dataBuffer = DataBufferFactory.getFastDiskDataBuffer(prop);
if (dataBuffer.isDataBufferClose()) {
dataBuffer.reset();
}
}
@AfterMethod
public void reset() {
if (dataBuffer != null) {
dataBuffer.closeBuffer();
}
DataBufferFactory.reset();
delete(bufferLocation);
}
private void delete(File f) {
if (f.isDirectory()) {
for (File f2 : f.listFiles()) {
delete(f2);
}
}
f.delete();
}
@Test
public void fastLOSTest() {
Assert.assertEquals(dataBuffer.getLOS(), LOS.medium);
}
@Test
public void putSingleDataTest() throws BufferFullException {
Map<String, String> value = new HashMap<String, String>();
value.put("value", "1.3");
value.put("status", "ok");
long time = System.currentTimeMillis();
long nanotime = TimeUnit.NANOSECONDS.convert(time, TimeUnit.MILLISECONDS);
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time, value);
List<Map<String, String>> returnData = dataBuffer.getData(Collections.singleton(testFeedID1),
TimeUnit.NANOSECONDS, nanotime, nanotime + 100).get(testFeedID1);
Assert.assertEquals(returnData.size(), 1);
Map<String, String> returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value);
assertHasSameValue(returnValue, value);
}
@Test
public void putBulkValueTest() throws BufferFullException {
Map<String, String> value = new HashMap<String, String>();
value.put("value", "1.3");
value.put("status", "ok");
long time = System.currentTimeMillis();
long nanotime = TimeUnit.NANOSECONDS.convert(time, TimeUnit.MILLISECONDS);
final AtomicInteger callbackCount = new AtomicInteger(0);
Runnable r = new Runnable() {
public void run() {
callbackCount.incrementAndGet();
}
};
Map<String, Map<Long, Map<String, String>>> bulkValue = new HashMap<String, Map<Long, Map<String, String>>>();
Map<Long, Map<String, String>> aValue = new HashMap<Long, Map<String, String>>();
aValue.put(time, value);
bulkValue.put(testFeedID1, aValue);
dataBuffer.putData(bulkValue, TimeUnit.MILLISECONDS, r);
List<Map<String, String>> returnData = dataBuffer.getData(Collections.singleton(testFeedID1),
TimeUnit.NANOSECONDS, nanotime, nanotime + 100).get(testFeedID1);
Assert.assertEquals(returnData.size(), 1);
Map<String, String> returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value);
assertHasSameValue(returnValue, value);
}
@Test
public void notExactTimestampTest() throws BufferFullException {
Map<String, String> value = new HashMap<String, String>();
value.put("value", "1.3");
value.put("status", "ok");
long time = System.nanoTime();
dataBuffer.putData(testFeedID1, TimeUnit.NANOSECONDS, time, value);
List<Map<String, String>> returnData = dataBuffer.getData(Collections.singleton(testFeedID1),
TimeUnit.NANOSECONDS, time - 100, time + 100).get(testFeedID1);
Assert.assertEquals(returnData.size(), 1);
Map<String, String> returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value);
assertHasSameValue(returnValue, value);
}
@Test
public void multipleFeedsTest() throws BufferFullException {
Map<String, String> value1 = new HashMap<String, String>();
value1.put("value", "1.3");
value1.put("status", "ok");
long time1 = System.currentTimeMillis();
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time1, value1);
Map<String, String> value2 = new HashMap<String, String>();
value2.put("value", "2.3");
value2.put("status", "ok2");
long time2 = System.currentTimeMillis();
dataBuffer.putData(testFeedID2, TimeUnit.MILLISECONDS, time2, value2);
List<Map<String, String>> returnData = dataBuffer.getData(Collections.singleton(testFeedID1),
TimeUnit.MILLISECONDS, time1, time1 + 100).get(testFeedID1);
Assert.assertEquals(returnData.size(), 1);
Map<String, String> returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value1);
assertHasSameValue(returnValue, value1);
returnData = dataBuffer.getData(Collections.singleton(testFeedID2), TimeUnit.MILLISECONDS, time2, time2 + 100).get(testFeedID2);
Assert.assertEquals(returnData.size(), 1);
returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value2);
assertHasSameValue(returnValue, value2);
}
@Test
public void isFullyWithinTimeSpanTest() throws BufferFullException {
Map<String, String> value1 = new HashMap<String, String>();
value1.put("value", "1.3");
value1.put("status", "ok");
long time1 = System.currentTimeMillis();
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time1, value1);
Map<String, String> value2 = new HashMap<String, String>();
value2.put("value", "2.3");
value2.put("status", "ok2");
long time2 = time1 + 100;
dataBuffer.putData(testFeedID2, TimeUnit.MILLISECONDS, time2, value2);
Map<String, String> value3 = new HashMap<String, String>();
value3.put("value", "2.3");
value3.put("status", "ok2");
long time3 = time1 + 200;
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time3, value3);
Map<String, String> value4 = new HashMap<String, String>();
value4.put("value", "2.3");
value4.put("status", "ok2");
long time4 = time1 + 300;
dataBuffer.putData(testFeedID2, TimeUnit.MILLISECONDS, time4, value4);
Assert.assertTrue(dataBuffer.isFullyWithinTimeSpan(testFeedID1, time1, TimeUnit.MILLISECONDS));
Assert.assertFalse(dataBuffer.isFullyWithinTimeSpan(testFeedID1, time1 - 1, TimeUnit.MILLISECONDS));
Assert.assertTrue(dataBuffer.isFullyWithinTimeSpan(testFeedID2, time2, TimeUnit.MILLISECONDS));
Assert.assertFalse(dataBuffer.isFullyWithinTimeSpan(testFeedID2, time2 - 1, TimeUnit.MILLISECONDS));
}
@Test
public void multipleFeedsSameTimeTest() throws BufferFullException {
Map<String, String> value1 = new HashMap<String, String>();
value1.put("value", "1.3");
value1.put("status", "ok");
long time = System.currentTimeMillis();
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time, value1);
Map<String, String> value2 = new HashMap<String, String>();
value2.put("value", "2.3");
value2.put("status", "ok2");
dataBuffer.putData(testFeedID2, TimeUnit.MILLISECONDS, time, value2);
List<Map<String, String>> returnData = dataBuffer.getData(Collections.singleton(testFeedID1),
TimeUnit.MILLISECONDS, time, time + 100).get(testFeedID1);
Assert.assertEquals(returnData.size(), 1);
Map<String, String> returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value1);
assertHasSameValue(returnValue, value1);
returnData = dataBuffer.getData(Collections.singleton(testFeedID2), TimeUnit.MILLISECONDS, time, time + 100).get(testFeedID2);
Assert.assertEquals(returnData.size(), 1);
returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value2);
assertHasSameValue(returnValue, value2);
}
@Test
public void putMultipleDataTest() throws InterruptedException, BufferFullException {
Map<Long, Map<String, String>> data = new HashMap<Long, Map<String, String>>();
Map<String, String> value1 = new HashMap<String, String>();
value1.put("value", "1.3");
value1.put("status", "ok");
long time1 = System.currentTimeMillis();
data.put(time1, value1);
Thread.sleep(200);
Map<String, String> value2 = new HashMap<String, String>();
value2.put("value", "1.4");
value2.put("status", "not-ok");
long time2 = System.currentTimeMillis();
data.put(time2, value2);
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, data);
List<Map<String, String>> returnData = dataBuffer.getData(Collections.singleton(testFeedID1),
TimeUnit.MILLISECONDS, time1, time1 + 100).get(testFeedID1);
Assert.assertEquals(returnData.size(), 1);
Map<String, String> returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value1);
assertHasSameValue(returnValue, value1);
returnData = dataBuffer.getData(Collections.singleton(testFeedID1), TimeUnit.MILLISECONDS, time1, time2 + 100).get(testFeedID1);
Assert.assertEquals(returnData.size(), 2);
returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value1);
assertHasSameValue(returnValue, value1);
returnValue = returnData.get(1);
Assert.assertNotSame(returnValue, value2);
assertHasSameValue(returnValue, value2);
}
@Test (enabled=false)
public void longRunningTest() throws InterruptedException, BrokenBarrierException {
final CyclicBarrier barrier = new CyclicBarrier(3);
final PutDataRunnable putDataTask = new PutDataRunnable(barrier);
final GetDataRunnable getDataTask = new GetDataRunnable(barrier);
final Thread t1 = new Thread(putDataTask);
final Thread t2 = new Thread(getDataTask);
t1.start();
t2.start();
Timer timer = new Timer();
TimerTask task = new TimerTask() {
@Override
public void run() {
putDataTask.interrupt();
getDataTask.interrupt();
}
};
timer.schedule(task, 6000);
barrier.await();
Assert.assertTrue(getDataTask.isPassed());
}
private void assertHasSameValue(Map<String, String> actualValue,
Map<String, String> expectedValue) {
Assert.assertEquals(actualValue.size(), expectedValue.size());
for (String key : actualValue.keySet()) {
Assert.assertEquals(actualValue.get(key), expectedValue.get(key));
}
}
private final class GetDataRunnable implements Runnable {
private boolean stop = false;
private CyclicBarrier barrier;
private boolean assertPassed = true;
public GetDataRunnable(CyclicBarrier barrier) {
this.barrier = barrier;
}
@Override
public void run() {
List<Map<String, String>> readData = new LinkedList<Map<String, String>>();
long startTime = System.currentTimeMillis();
long endTime = startTime;
int oldValue = -1;
while (!stop) {
List<Map<String, String>> data = dataBuffer.getData(Collections.singleton(testFeedID1),
TimeUnit.MILLISECONDS, startTime, endTime).get(testFeedID1);
if (data == null) {
data = Collections.emptyList();
}
int index = 0;
for (Map<String, String> record : data) {
int value = Integer.parseInt(record.get("value"));
long time = Long.parseLong(record.get("time"));
assertPassed = assertPassed && time <= endTime; // since this is now COD the last time may be less than the last requested
if (index++ > 0) {
assertPassed = assertPassed && time >= startTime;
}
assertPassed = assertPassed && (value >= oldValue);
if (!assertPassed) {
stop = true;
}
oldValue = value;
}
readData.addAll(data);
startTime = endTime;
endTime = System.currentTimeMillis();
}
try {
barrier.await();
} catch (InterruptedException e) {
throw new AssertionError(e);
} catch (BrokenBarrierException e) {
throw new AssertionError(e);
}
}
public void interrupt() {
this.stop = true;
}
public boolean isPassed() {
return assertPassed;
}
}
private final class PutDataRunnable implements Runnable {
private boolean stop = false;
private CyclicBarrier barrier;
public PutDataRunnable(CyclicBarrier barrier) {
this.barrier = barrier;
}
@Override
public void run() {
int i = 0;
while (!stop) {
Map<String, String> value = new HashMap<String, String>();
long time = System.currentTimeMillis();
value.put("value", String.valueOf(i));
value.put("time", String.valueOf(time));
try {
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time, value);
i++;
Thread.sleep(100);
} catch (InterruptedException e) {
// ignore interrupt
} catch (BufferFullException e1) {
e1.printStackTrace();
}
}
try {
barrier.await();
} catch (InterruptedException e) {
throw new AssertionError(e);
} catch (BrokenBarrierException e) {
throw new AssertionError(e);
}
}
public void interrupt() {
stop = true;
}
}
} | false | timeSequenceFeedAggregator_src_test_java_gov_nasa_arc_mct_buffer_internal_DataBufferTest.java |
63 | Runnable r = new Runnable() {
public void run() {
callbackCount.incrementAndGet();
}
}; | false | timeSequenceFeedAggregator_src_test_java_gov_nasa_arc_mct_buffer_internal_DataBufferTest.java |
64 | TimerTask task = new TimerTask() {
@Override
public void run() {
putDataTask.interrupt();
getDataTask.interrupt();
}
}; | false | timeSequenceFeedAggregator_src_test_java_gov_nasa_arc_mct_buffer_internal_DataBufferTest.java |
65 | private final class GetDataRunnable implements Runnable {
private boolean stop = false;
private CyclicBarrier barrier;
private boolean assertPassed = true;
public GetDataRunnable(CyclicBarrier barrier) {
this.barrier = barrier;
}
@Override
public void run() {
List<Map<String, String>> readData = new LinkedList<Map<String, String>>();
long startTime = System.currentTimeMillis();
long endTime = startTime;
int oldValue = -1;
while (!stop) {
List<Map<String, String>> data = dataBuffer.getData(Collections.singleton(testFeedID1),
TimeUnit.MILLISECONDS, startTime, endTime).get(testFeedID1);
if (data == null) {
data = Collections.emptyList();
}
int index = 0;
for (Map<String, String> record : data) {
int value = Integer.parseInt(record.get("value"));
long time = Long.parseLong(record.get("time"));
assertPassed = assertPassed && time <= endTime; // since this is now COD the last time may be less than the last requested
if (index++ > 0) {
assertPassed = assertPassed && time >= startTime;
}
assertPassed = assertPassed && (value >= oldValue);
if (!assertPassed) {
stop = true;
}
oldValue = value;
}
readData.addAll(data);
startTime = endTime;
endTime = System.currentTimeMillis();
}
try {
barrier.await();
} catch (InterruptedException e) {
throw new AssertionError(e);
} catch (BrokenBarrierException e) {
throw new AssertionError(e);
}
}
public void interrupt() {
this.stop = true;
}
public boolean isPassed() {
return assertPassed;
}
} | false | timeSequenceFeedAggregator_src_test_java_gov_nasa_arc_mct_buffer_internal_DataBufferTest.java |
66 | private final class PutDataRunnable implements Runnable {
private boolean stop = false;
private CyclicBarrier barrier;
public PutDataRunnable(CyclicBarrier barrier) {
this.barrier = barrier;
}
@Override
public void run() {
int i = 0;
while (!stop) {
Map<String, String> value = new HashMap<String, String>();
long time = System.currentTimeMillis();
value.put("value", String.valueOf(i));
value.put("time", String.valueOf(time));
try {
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time, value);
i++;
Thread.sleep(100);
} catch (InterruptedException e) {
// ignore interrupt
} catch (BufferFullException e1) {
e1.printStackTrace();
}
}
try {
barrier.await();
} catch (InterruptedException e) {
throw new AssertionError(e);
} catch (BrokenBarrierException e) {
throw new AssertionError(e);
}
}
public void interrupt() {
stop = true;
}
} | false | timeSequenceFeedAggregator_src_test_java_gov_nasa_arc_mct_buffer_internal_DataBufferTest.java |
67 | public class FeedAggregatorServiceTest {
private DataBuffer dataBuffer;
private DataBuffer memoryBuffer;
private FeedAggregatorService feedAggregatorService;
private String testFeedID1 = "TestPui1";
private String testFeedID2 = "TestPui2";
private File bufferLocation;
@Mock
private DataProvider mockDataProvider;
private List<Map<String, String>> data;
@SuppressWarnings("unchecked")
@BeforeMethod
public void setup() throws IOException {
Properties prop = new Properties();
prop.load(ClassLoader.getSystemResourceAsStream("properties/testFeed.properties"));
prop.put("buffer.partitions", "1");
prop.put("buffer.time.millis", "-1");
bufferLocation = File.createTempFile("mct-buffer", "");
bufferLocation.delete();
Assert.assertTrue(bufferLocation.mkdir());
prop.put("buffer.disk.loc", bufferLocation.toString());
dataBuffer = DataBufferFactory.getFastDiskDataBuffer(prop);
if (dataBuffer.isDataBufferClose()) {
dataBuffer.reset();
}
memoryBuffer = DataBufferFactory.getMemoryDataBuffer(prop);
feedAggregatorService = new FeedAggregatorService(prop);
MockitoAnnotations.initMocks(this);
Mockito.when(mockDataProvider.getLOS()).thenReturn(LOS.slow);
data = new ArrayList<Map<String,String>>();
Map<String, String> value1 = new HashMap<String, String>();
value1.put("value", "1.3");
value1.put("status", "ok");
data.add(value1);
Map<String, String> value2 = new HashMap<String, String>();
value2.put("value", "1.4");
value2.put("status", "ok");
data.add(value2);
Map<String, SortedMap<Long, Map<String, String>>> mapData = new HashMap<String, SortedMap<Long, Map<String,String>>>();
SortedMap<Long, Map<String, String>> sortedTimeData = new TreeMap<Long, Map<String,String>>();
sortedTimeData.put(System.currentTimeMillis(), value1);
sortedTimeData.put(System.currentTimeMillis()+100, value2);
mapData.put(testFeedID1, sortedTimeData);
Mockito.when(mockDataProvider.getData(Mockito.anySet(), Mockito.anyLong(), Mockito.anyLong(), Mockito.<TimeUnit>any())).thenReturn(mapData);
}
@AfterMethod
public void reset() {
if (dataBuffer != null) {
dataBuffer.closeBuffer();
}
if (memoryBuffer != null) {
memoryBuffer.closeBuffer();
}
DataBufferFactory.reset();
delete(bufferLocation);
}
private void delete(File f) {
if (f.isDirectory()) {
for (File f2 : f.listFiles()) {
delete(f2);
}
}
f.delete();
}
@Test(groups="feedAggregatorServiceTest")
public void multipleDataProvidersTest() {
long time = System.currentTimeMillis();
List<Map<String, String>> returnData = feedAggregatorService.getData(Collections.singleton(testFeedID1), TimeUnit.MILLISECONDS, time, time).get(testFeedID1);
Assert.assertNull(returnData);
feedAggregatorService.addDataProvider(mockDataProvider);
returnData = feedAggregatorService.getData(Collections.singleton(testFeedID1), TimeUnit.MILLISECONDS, time, time+100).get(testFeedID1);
Assert.assertEquals(returnData.size(), 2);
assertHasSameValue(returnData.get(0), data.get(0));
assertHasSameValue(returnData.get(1), data.get(1));
}
@Test(groups="feedAggregatorServiceTest")
public void multipleDataProvidersTest2() throws InterruptedException {
Map<String, String> value = new HashMap<String, String>();
value.put("value", "1.3");
value.put("status", "ok");
long time = System.currentTimeMillis();
feedAggregatorService.putData(testFeedID2, TimeUnit.MILLISECONDS, time, value);
feedAggregatorService.addDataProvider(mockDataProvider);
Set<String> feedIDs = new HashSet<String>();
feedIDs.add(testFeedID1);
feedIDs.add(testFeedID2);
Map<String, List<Map<String, String>>> returnData = feedAggregatorService.getData(feedIDs, TimeUnit.MILLISECONDS, time - 10000, time);
Assert.assertEquals(returnData.size(), 2);
List<Map<String, String>> data1 = returnData.get(testFeedID1);
Assert.assertEquals(data1.size(), 2);
assertHasSameValue(data1.get(0), data.get(0));
assertHasSameValue(data1.get(1), data.get(1));
List<Map<String, String>> data2 = returnData.get(testFeedID2);
Assert.assertEquals(data2.size(), 1);
assertHasSameValue(data2.get(0), value);
}
@Test(groups="feedAggregatorServiceTest")
public void putSingleTest() throws InterruptedException {
Map<String, String> value = new HashMap<String, String>();
value.put("value", "1.3");
value.put("status", "ok");
long time = System.currentTimeMillis();
feedAggregatorService.putData(testFeedID1, TimeUnit.MILLISECONDS, time, value);
Thread.sleep(1000);
List<Map<String, String>> returnData = feedAggregatorService.getData(Collections.singleton(testFeedID1), TimeUnit.MILLISECONDS, time, time+100).get(testFeedID1);
Assert.assertEquals(returnData.size(), 1);
Map<String, String> returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value);
assertHasSameValue(returnValue, value);
}
private void assertHasSameValue(Map<String, String> actualValue, Map<String, String> expectedValue) {
Assert.assertEquals(actualValue.size(), expectedValue.size());
for (String key : actualValue.keySet()) {
Assert.assertEquals(actualValue.get(key), expectedValue.get(key));
}
}
} | false | timeSequenceFeedAggregator_src_test_java_gov_nasa_arc_mct_buffer_internal_FeedAggregatorServiceTest.java |
68 | public class MemoryBufferResetTest {
private DataBuffer dataBuffer;
private String testFeedID1 = "TestPui1";
@BeforeMethod
public void setup() throws IOException {
Properties prop = new Properties();
prop.load(ClassLoader.getSystemResourceAsStream("properties/testFeed.properties"));
prop.put("buffer.partitions", "1");
prop.put("buffer.time.millis", "-1");
dataBuffer = DataBufferFactory.getMemoryDataBuffer(prop);
if (dataBuffer.isDataBufferClose()) {
dataBuffer.reset();
}
}
@AfterMethod
public void reset() {
if (dataBuffer != null) {
dataBuffer.reset();
}
DataBufferFactory.reset();
}
@Test
public void resetTest() throws BufferFullException {
Map<String, String> value = new HashMap<String, String>();
value.put("value", "1.3");
value.put("status", "ok");
long time = System.currentTimeMillis();
long nanotime = TimeUnit.NANOSECONDS.convert(time, TimeUnit.MILLISECONDS);
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time, value);
List<Map<String, String>> returnData = dataBuffer.getData(Collections.singleton(testFeedID1), TimeUnit.NANOSECONDS, nanotime,
nanotime + 100).get(testFeedID1);
Assert.assertEquals(returnData.size(), 1);
Map<String, String> returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value);
assertHasSameValue(returnValue, value);
dataBuffer.reset();
returnData = dataBuffer.getData(Collections.singleton(testFeedID1), TimeUnit.NANOSECONDS, nanotime,
nanotime + 100).get(testFeedID1);
Assert.assertNull(returnData);
value = new HashMap<String, String>();
value.put("value", "1.4");
value.put("status", "true");
time = System.currentTimeMillis();
nanotime = TimeUnit.NANOSECONDS.convert(time, TimeUnit.MILLISECONDS);
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time, value);
returnData = dataBuffer.getData(Collections.singleton(testFeedID1), TimeUnit.NANOSECONDS, nanotime,
nanotime + 100).get(testFeedID1);
Assert.assertEquals(returnData.size(), 1);
returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value);
assertHasSameValue(returnValue, value);
}
private void assertHasSameValue(Map<String, String> actualValue, Map<String, String> expectedValue) {
Assert.assertEquals(actualValue.size(), expectedValue.size());
for (String key : actualValue.keySet()) {
Assert.assertEquals(actualValue.get(key), expectedValue.get(key));
}
}
} | false | timeSequenceFeedAggregator_src_test_java_gov_nasa_arc_mct_buffer_internal_MemoryBufferResetTest.java |
69 | public class MemoryDataBufferTest {
private DataBuffer dataBuffer;
private String testFeedID1 = "TestPui1";
private String testFeedID2 = "TestPui2";
private MemoryBufferEnv env;
@BeforeMethod
public void setup() throws IOException {
Properties prop = new Properties();
prop.load(ClassLoader.getSystemResourceAsStream("properties/testFeed.properties"));
prop.put("memory.buffer.partition", "1");
prop.put("memory.buffer.time.millis", "-1");
env = new MemoryBufferEnv(prop);
dataBuffer = new DataBuffer(env, new MemoryDataBufferHelper());
if (dataBuffer.isDataBufferClose()) {
dataBuffer.reset();
}
}
@AfterMethod
public void reset() {
if (dataBuffer != null) {
dataBuffer.reset();
}
DataBufferFactory.reset();
}
@Test
public void fastLOSTest() {
Assert.assertEquals(dataBuffer.getLOS(), LOS.fast);
}
@Test
public void putSingleDataTest() throws BufferFullException {
Map<String, String> value = new HashMap<String, String>();
value.put("value", "1.3");
value.put("status", "ok");
long time = System.currentTimeMillis();
long nanotime = TimeUnit.NANOSECONDS.convert(time, TimeUnit.MILLISECONDS);
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time, value);
List<Map<String, String>> returnData = dataBuffer.getData(Collections.singleton(testFeedID1),
TimeUnit.NANOSECONDS, nanotime, nanotime + 100).get(testFeedID1);
Assert.assertEquals(returnData.size(), 1);
Map<String, String> returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value);
assertHasSameValue(returnValue, value);
}
@Test
public void putBulkValueTest() throws BufferFullException {
Map<String, String> value = new HashMap<String, String>();
value.put("value", "1.3");
value.put("status", "ok");
long time = System.currentTimeMillis();
long nanotime = TimeUnit.NANOSECONDS.convert(time, TimeUnit.MILLISECONDS);
final AtomicInteger callbackCount = new AtomicInteger(0);
Runnable r = new Runnable() {
public void run() {
callbackCount.incrementAndGet();
}
};
Map<String, Map<Long, Map<String, String>>> bulkValue = new HashMap<String, Map<Long, Map<String, String>>>();
Map<Long, Map<String, String>> aValue = new HashMap<Long, Map<String, String>>();
aValue.put(time, value);
bulkValue.put(testFeedID1, aValue);
dataBuffer.putData(bulkValue, TimeUnit.MILLISECONDS, r);
List<Map<String, String>> returnData = dataBuffer.getData(Collections.singleton(testFeedID1),
TimeUnit.NANOSECONDS, nanotime, nanotime + 100).get(testFeedID1);
Assert.assertEquals(returnData.size(), 1);
Map<String, String> returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value);
assertHasSameValue(returnValue, value);
}
@Test
public void notExactTimestampTest() throws BufferFullException {
Map<String, String> value = new HashMap<String, String>();
value.put("value", "1.3");
value.put("status", "ok");
long time = System.nanoTime();
dataBuffer.putData(testFeedID1, TimeUnit.NANOSECONDS, time, value);
List<Map<String, String>> returnData = dataBuffer.getData(Collections.singleton(testFeedID1),
TimeUnit.NANOSECONDS, time - 100, time + 100).get(testFeedID1);
Assert.assertEquals(returnData.size(), 1);
Map<String, String> returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value);
assertHasSameValue(returnValue, value);
}
@Test
public void multipleFeedsTest() throws BufferFullException {
Map<String, String> value1 = new HashMap<String, String>();
value1.put("value", "1.3");
value1.put("status", "ok");
long time1 = System.currentTimeMillis();
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time1, value1);
Map<String, String> value2 = new HashMap<String, String>();
value2.put("value", "2.3");
value2.put("status", "ok2");
long time2 = System.currentTimeMillis();
dataBuffer.putData(testFeedID2, TimeUnit.MILLISECONDS, time2, value2);
List<Map<String, String>> returnData = dataBuffer.getData(Collections.singleton(testFeedID1),
TimeUnit.MILLISECONDS, time1, time1 + 100).get(testFeedID1);
Assert.assertEquals(returnData.size(), 1);
Map<String, String> returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value1);
assertHasSameValue(returnValue, value1);
returnData = dataBuffer.getData(Collections.singleton(testFeedID2), TimeUnit.MILLISECONDS, time2, time2 + 100).get(testFeedID2);
Assert.assertEquals(returnData.size(), 1);
returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value2);
assertHasSameValue(returnValue, value2);
}
@Test
public void multipleFeedsSameTimeTest() throws BufferFullException {
Map<String, String> value1 = new HashMap<String, String>();
value1.put("value", "1.3");
value1.put("status", "ok");
long time = System.currentTimeMillis();
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time, value1);
Map<String, String> value2 = new HashMap<String, String>();
value2.put("value", "2.3");
value2.put("status", "ok2");
dataBuffer.putData(testFeedID2, TimeUnit.MILLISECONDS, time, value2);
List<Map<String, String>> returnData = dataBuffer.getData(Collections.singleton(testFeedID1),
TimeUnit.MILLISECONDS, time, time + 100).get(testFeedID1);
Assert.assertEquals(returnData.size(), 1);
Map<String, String> returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value1);
assertHasSameValue(returnValue, value1);
returnData = dataBuffer.getData(Collections.singleton(testFeedID2), TimeUnit.MILLISECONDS, time, time + 100).get(testFeedID2);
Assert.assertEquals(returnData.size(), 1);
returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value2);
assertHasSameValue(returnValue, value2);
}
@Test
public void putMultipleDataTest() throws InterruptedException, BufferFullException {
Map<Long, Map<String, String>> data = new HashMap<Long, Map<String, String>>();
Map<String, String> value1 = new HashMap<String, String>();
value1.put("value", "1.3");
value1.put("status", "ok");
long time1 = System.currentTimeMillis();
data.put(time1, value1);
Thread.sleep(200);
Map<String, String> value2 = new HashMap<String, String>();
value2.put("value", "1.4");
value2.put("status", "not-ok");
long time2 = System.currentTimeMillis();
data.put(time2, value2);
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, data);
List<Map<String, String>> returnData = dataBuffer.getData(Collections.singleton(testFeedID1),
TimeUnit.MILLISECONDS, time1, time1 + 100).get(testFeedID1);
Assert.assertEquals(returnData.size(), 1);
Map<String, String> returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value1);
assertHasSameValue(returnValue, value1);
returnData = dataBuffer.getData(Collections.singleton(testFeedID1), TimeUnit.MILLISECONDS, time1, time2 + 100).get(testFeedID1);
Assert.assertEquals(returnData.size(), 2);
returnValue = returnData.get(0);
Assert.assertNotSame(returnValue, value1);
assertHasSameValue(returnValue, value1);
returnValue = returnData.get(1);
Assert.assertNotSame(returnValue, value2);
assertHasSameValue(returnValue, value2);
}
@Test
public void longRunningTest() throws InterruptedException, BrokenBarrierException {
final CyclicBarrier barrier = new CyclicBarrier(3);
final PutDataRunnable putDataTask = new PutDataRunnable(barrier);
final GetDataRunnable getDataTask = new GetDataRunnable(barrier);
final Thread t1 = new Thread(putDataTask);
final Thread t2 = new Thread(getDataTask);
t1.start();
t2.start();
Timer timer = new Timer();
TimerTask task = new TimerTask() {
@Override
public void run() {
putDataTask.interrupt();
getDataTask.interrupt();
}
};
timer.schedule(task, 2000);
barrier.await();
Assert.assertTrue(getDataTask.isPassed());
}
@Test
public void getLastDataSubrangeTest() throws BufferFullException {
PartitionMemoryBuffer buffer = new PartitionMemoryBuffer(env);
Map<Long, Map<String, String>> feedData = new HashMap<Long, Map<String, String>>();
Map<String, String> value = new HashMap<String, String>();
value.put("value", "1.3");
value.put("status", "ok");
long time = System.nanoTime();
feedData.put(time, value);
Map<String, String> value2 = new HashMap<String, String>();
value2.put("value", "1.4");
value2.put("status", "ok");
feedData.put(time + 100, value2);
Map<String, String> value3 = new HashMap<String, String>();
value3.put("value", "1.5");
value3.put("status", "ok");
feedData.put(time + 200, value3);
Map<String, String> value4 = new HashMap<String, String>();
value4.put("value", "1.6");
value4.put("status", "ok");
feedData.put(time + 300, value4);
Map<String, Map<Long, Map<String, String>>> data = new HashMap<String, Map<Long, Map<String, String>>>();
data.put(testFeedID1, feedData);
buffer.putData(data, TimeUnit.NANOSECONDS);
SortedMap<Long, Map<String, String>> returnData = buffer.getLastData(Collections.singleton(testFeedID1),
TimeUnit.NANOSECONDS, time + 50, time + 250).get(testFeedID1);
Assert.assertEquals(returnData.size(), 1);
Map<String, String> returnValue = returnData.get(time + 200);
Assert.assertNotSame(returnValue, value3);
assertHasSameValue(returnValue, value3);
}
private void assertHasSameValue(Map<String, String> actualValue,
Map<String, String> expectedValue) {
Assert.assertEquals(actualValue.size(), expectedValue.size());
for (String key : actualValue.keySet()) {
Assert.assertEquals(actualValue.get(key), expectedValue.get(key));
}
}
private final class GetDataRunnable implements Runnable {
private boolean stop = false;
private CyclicBarrier barrier;
private boolean assertPassed = true;
public GetDataRunnable(CyclicBarrier barrier) {
this.barrier = barrier;
}
@Override
public void run() {
List<Map<String, String>> readData = new LinkedList<Map<String, String>>();
long startTime = System.currentTimeMillis();
long endTime = startTime;
int oldValue = -1;
while (!stop) {
List<Map<String, String>> data = dataBuffer.getData(Collections.singleton(testFeedID1),
TimeUnit.MILLISECONDS, startTime, endTime).get(testFeedID1);
if (data == null) {
data = Collections.emptyList();
}
for (Map<String, String> record : data) {
int value = Integer.parseInt(record.get("value"));
long time = Long.parseLong(record.get("time"));
assertPassed = assertPassed && (time >= startTime && time <= endTime);
assertPassed = assertPassed && (value >= oldValue);
if (!assertPassed) {
stop = true;
}
oldValue = value;
}
readData.addAll(data);
startTime = endTime;
endTime = System.currentTimeMillis();
}
try {
barrier.await();
} catch (InterruptedException e) {
throw new AssertionError(e);
} catch (BrokenBarrierException e) {
throw new AssertionError(e);
}
}
public void interrupt() {
this.stop = true;
}
public boolean isPassed() {
return assertPassed;
}
}
private final class PutDataRunnable implements Runnable {
private boolean stop = false;
private CyclicBarrier barrier;
public PutDataRunnable(CyclicBarrier barrier) {
this.barrier = barrier;
}
@Override
public void run() {
int i = 0;
while (!stop) {
Map<String, String> value = new HashMap<String, String>();
long time = System.currentTimeMillis();
value.put("value", String.valueOf(i));
value.put("time", String.valueOf(time));
try {
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time, value);
i++;
Thread.sleep(100);
} catch (InterruptedException e) {
// ignore interrupt
} catch (BufferFullException e) {
//
}
}
try {
barrier.await();
} catch (InterruptedException e) {
throw new AssertionError(e);
} catch (BrokenBarrierException e) {
throw new AssertionError(e);
}
}
public void interrupt() {
stop = true;
}
}
} | false | timeSequenceFeedAggregator_src_test_java_gov_nasa_arc_mct_buffer_internal_MemoryDataBufferTest.java |
70 | Runnable r = new Runnable() {
public void run() {
callbackCount.incrementAndGet();
}
}; | false | timeSequenceFeedAggregator_src_test_java_gov_nasa_arc_mct_buffer_internal_MemoryDataBufferTest.java |
71 | TimerTask task = new TimerTask() {
@Override
public void run() {
putDataTask.interrupt();
getDataTask.interrupt();
}
}; | false | timeSequenceFeedAggregator_src_test_java_gov_nasa_arc_mct_buffer_internal_MemoryDataBufferTest.java |
72 | private final class GetDataRunnable implements Runnable {
private boolean stop = false;
private CyclicBarrier barrier;
private boolean assertPassed = true;
public GetDataRunnable(CyclicBarrier barrier) {
this.barrier = barrier;
}
@Override
public void run() {
List<Map<String, String>> readData = new LinkedList<Map<String, String>>();
long startTime = System.currentTimeMillis();
long endTime = startTime;
int oldValue = -1;
while (!stop) {
List<Map<String, String>> data = dataBuffer.getData(Collections.singleton(testFeedID1),
TimeUnit.MILLISECONDS, startTime, endTime).get(testFeedID1);
if (data == null) {
data = Collections.emptyList();
}
for (Map<String, String> record : data) {
int value = Integer.parseInt(record.get("value"));
long time = Long.parseLong(record.get("time"));
assertPassed = assertPassed && (time >= startTime && time <= endTime);
assertPassed = assertPassed && (value >= oldValue);
if (!assertPassed) {
stop = true;
}
oldValue = value;
}
readData.addAll(data);
startTime = endTime;
endTime = System.currentTimeMillis();
}
try {
barrier.await();
} catch (InterruptedException e) {
throw new AssertionError(e);
} catch (BrokenBarrierException e) {
throw new AssertionError(e);
}
}
public void interrupt() {
this.stop = true;
}
public boolean isPassed() {
return assertPassed;
}
} | false | timeSequenceFeedAggregator_src_test_java_gov_nasa_arc_mct_buffer_internal_MemoryDataBufferTest.java |
73 | private final class PutDataRunnable implements Runnable {
private boolean stop = false;
private CyclicBarrier barrier;
public PutDataRunnable(CyclicBarrier barrier) {
this.barrier = barrier;
}
@Override
public void run() {
int i = 0;
while (!stop) {
Map<String, String> value = new HashMap<String, String>();
long time = System.currentTimeMillis();
value.put("value", String.valueOf(i));
value.put("time", String.valueOf(time));
try {
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time, value);
i++;
Thread.sleep(100);
} catch (InterruptedException e) {
// ignore interrupt
} catch (BufferFullException e) {
//
}
}
try {
barrier.await();
} catch (InterruptedException e) {
throw new AssertionError(e);
} catch (BrokenBarrierException e) {
throw new AssertionError(e);
}
}
public void interrupt() {
stop = true;
}
} | false | timeSequenceFeedAggregator_src_test_java_gov_nasa_arc_mct_buffer_internal_MemoryDataBufferTest.java |
74 | public class MetaDataBuffer {
private static final int NULL_TIMESTAMP = -1;
protected volatile PartitionMetaData[] partitionMetaDatas;
protected volatile int currentPartition = 0;
private DataBufferEnv metaEnv;
public MetaDataBuffer(DataBufferEnv metaEnv) {
this.metaEnv = metaEnv;
this.partitionMetaDatas = new PartitionMetaData[metaEnv.getNumOfBufferPartitions()];
}
protected synchronized PartitionMetaData getPartitionMetaData(int bufferPartition) {
return partitionMetaDatas[bufferPartition];
}
public int getNumOfPartitions() {
return metaEnv.getNumOfBufferPartitions();
}
public int getCurrentPartition() {
return currentPartition;
}
public synchronized long getStartTimestamp(int bufferPartition, String feedID) {
PartitionMetaData partitionMetaData = partitionMetaDatas[bufferPartition];
if (partitionMetaData == null) {
return NULL_TIMESTAMP;
} else {
return partitionMetaData.getStartTimestamp(feedID);
}
}
public synchronized long getEndTimestamp(int bufferPartition, String feedID) {
PartitionMetaData partitionMetaData = partitionMetaDatas[bufferPartition];
if (partitionMetaData == null) {
return NULL_TIMESTAMP;
} else {
return partitionMetaData.getEndTimestamp(feedID);
}
}
public boolean isWithinTimeSpan(int bufferPartition, String feedID, TimeUnit timeunit, long startTime, long endTime) {
long startTimeInNanos = TimeUnit.NANOSECONDS.convert(startTime, timeunit);
long endTimeInNanos = TimeUnit.NANOSECONDS.convert(endTime, timeunit);
synchronized (this) {
PartitionMetaData partitionMetaData = partitionMetaDatas[bufferPartition];
if (partitionMetaData == null) {
return false;
}
return partitionMetaData.isWithinTimeSpan(feedID, startTimeInNanos, endTimeInNanos);
}
}
public boolean isFullyWithinTimeSpan(int bufferPartition, String feedID, TimeUnit timeunit, long startTime) {
long startTimeInNanos = TimeUnit.NANOSECONDS.convert(startTime, timeunit);
synchronized (this) {
PartitionMetaData partitionMetaData = partitionMetaDatas[bufferPartition];
if (partitionMetaData == null) {
return false;
}
return partitionMetaData.isFullyWithinTimeSpan(feedID, startTimeInNanos);
}
}
public synchronized PartitionMetaData removePartitionMetaData(int bufferPartition) {
PartitionMetaData partitionMetaData = this.partitionMetaDatas[bufferPartition];
this.partitionMetaDatas[bufferPartition] = null;
return partitionMetaData;
}
public synchronized Set<String> resetPartitionMetaData(int bufferPartition) {
PartitionMetaData partitionMetaData = this.partitionMetaDatas[bufferPartition];
Set<String> rowoverFeedIDs = getRowoverFeedIDs(bufferPartition);
if (partitionMetaData != null) {
partitionMetaData.reset();
partitionMetaData.addFeeds(rowoverFeedIDs);
}
return rowoverFeedIDs;
}
private Set<String> getRowoverFeedIDs(int bufferPartition) {
PartitionMetaData targetMetaData = getPartitionMetaData(bufferPartition);
if (targetMetaData == null) { return Collections.emptySet(); }
Set<String> targetFeedIDs = new HashSet<String>(targetMetaData.getFeeds());
int i = bufferPartition;
i = metaEnv.previousBufferPartition(i);
while (i != bufferPartition) {
PartitionMetaData metaData = getPartitionMetaData(i);
if (metaData == null) { break; }
Set<String> feedIDs = metaData.getFeeds();
targetFeedIDs.removeAll(feedIDs);
i = metaEnv.previousBufferPartition(i);
}
return targetFeedIDs;
}
public synchronized boolean hasFeed(int bufferPartition, String feedID) {
PartitionMetaData partitionMetaData = partitionMetaDatas[bufferPartition];
if (partitionMetaData == null) {
return false;
} else {
return partitionMetaData.hasFeed(feedID);
}
}
public void updatePartitionMetaData(int bufferPartition, Map<String, PartitionTimestamps> timeStamps) {
PartitionMetaData partitionMetaData = partitionMetaDatas[bufferPartition];
if (partitionMetaData == null) {
partitionMetaData = new PartitionMetaData(bufferPartition);
partitionMetaData.setTimeStamp(timeStamps);
this.partitionMetaDatas[bufferPartition] = partitionMetaData;
} else {
partitionMetaData.updateTimestamp(timeStamps);
}
}
public synchronized void updatePartitionMetaData(int bufferPartition, String feedID, long startTime, long endTime) {
PartitionMetaData partitionMetaData = partitionMetaDatas[bufferPartition];
if (partitionMetaData == null) {
partitionMetaData = new PartitionMetaData(bufferPartition);
this.partitionMetaDatas[bufferPartition] = partitionMetaData;
}
partitionMetaData.updateTimestamp(feedID, startTime, endTime);
}
public void writePartitionMetaData(int bufferPartition) {
}
public void writeCurrentBufferPartition(int newCurrentBufferPartition) {
}
public void restart() {
metaEnv.restartEnvironment(false);
partitionMetaDatas = new PartitionMetaData[metaEnv.getNumOfBufferPartitions()];
}
public void close() {
partitionMetaDatas = new PartitionMetaData[metaEnv.getNumOfBufferPartitions()];
metaEnv = null;
}
public void closeAndRestartEnvironment() {
partitionMetaDatas = new PartitionMetaData[metaEnv.getNumOfBufferPartitions()];
metaEnv.closeAndRestartEnvironment();
}
public void closeDatabase() {
partitionMetaDatas = new PartitionMetaData[metaEnv.getNumOfBufferPartitions()];
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_internal_MetaDataBuffer.java |
75 | public interface PartitionDataBuffer {
public Map<String, SortedMap<Long, Map<String, String>>> getData(Set<String> feedIDs, TimeUnit timeUnit, long startTime, long endTime);
/**
* Similar to {@link #getData(Set, TimeUnit, long, long)}, but returns only the last point in the range for each feed.
*/
public Map<String, SortedMap<Long, Map<String, String>>> getLastData(Set<String> feedIDs, TimeUnit timeUnit, long startTime, long endTime);
public Map<String, PartitionTimestamps> putData(Map<String, Map<Long, Map<String, String>>> value, final TimeUnit timeUnit) throws BufferFullException;
public void putData(Map<String, Map<Long, Map<String, String>>> value, final TimeUnit timeUnit, MetaDataBuffer metadata, int metadataIndex) throws BufferFullException;
public boolean isActive();
public void removeBuffer();
public void closeBuffer();
public boolean isClosed();
public void inactive();
public DataBufferEnv getBufferEnv();
public void resetBuffer();
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_internal_PartitionDataBuffer.java |
76 | @Entity
public final class PartitionMetaData {
@PrimaryKey
private Integer partitionId;
private Map<String, PartitionTimestamps> timestamps = new HashMap<String, PartitionTimestamps>();
private volatile boolean currentPartition;
public PartitionMetaData() {
}
public PartitionMetaData(int partitionId) {
this.partitionId = partitionId;
}
public int getPartitionId() {
return this.partitionId;
}
public void setCurrentPartition(boolean currentPartition) {
this.currentPartition = currentPartition;
}
public boolean isCurrentPartition() {
return this.currentPartition;
}
public void updateTimestamp(String feedID, long startTime, long endTime) {
PartitionTimestamps ts = timestamps.get(feedID);
if (ts == null) {
timestamps.put(feedID, new PartitionTimestamps(startTime,endTime));
} else {
ts.merge(startTime,endTime);
}
}
public void updateTimestamp(String feedID, PartitionTimestamps originalTs) {
PartitionTimestamps ts = timestamps.get(feedID);
if (ts == null) {
timestamps.put(feedID, originalTs);
} else {
ts.merge(originalTs);
}
}
public void setTimeStamp(Map<String, PartitionTimestamps> ts) {
timestamps.clear();
for (String feedID : ts.keySet()) {
timestamps.put(feedID, ts.get(feedID).clone());
}
}
public boolean hasFeed(String feedID) {
return timestamps.containsKey(feedID);
}
public void addFeeds(Set<String> feedIDs) {
for (String feedID : feedIDs) {
if(!timestamps.containsKey(feedID)) {
timestamps.put(feedID, null);
}
}
}
public Set<String> getFeeds() {
return timestamps.keySet();
}
public void reset() {
timestamps.clear();
}
public long getStartTimestamp(String feedID) {
PartitionTimestamps ts = timestamps.get(feedID);
if (ts != null) {
return ts.getStartTimestamp();
}
return -1;
}
public long getEndTimestamp(String feedID) {
PartitionTimestamps ts = timestamps.get(feedID);
if (ts != null) {
return ts.getEndTimestamp();
}
return -1;
}
public boolean isWithinTimeSpan(String feedID, long startTime, long endTime) {
PartitionTimestamps timeStamp = timestamps.get(feedID);
if(timeStamp == null) {
return false;
}
long start = timeStamp.getStartTimestamp();
long end = timeStamp.getEndTimestamp();
return (startTime <= end || end == -1) && endTime >= start;
}
public boolean isFullyWithinTimeSpan(String feedID, long startTime) {
PartitionTimestamps timeStamp = timestamps.get(feedID);
if (timeStamp == null) {
return false;
}
if (startTime >= timeStamp.getStartTimestamp()) {
return true;
}
return false;
}
public void updateTimestamp(Map<String, PartitionTimestamps> ts) {
for (Entry<String, PartitionTimestamps> entry : ts.entrySet()) {
String feedID = entry.getKey();
PartitionTimestamps newTS = entry.getValue();
PartitionTimestamps timeStamp = timestamps.get(feedID);
if (timeStamp == null) {
timestamps.put(feedID, newTS.clone());
continue;
}
timeStamp.merge(newTS);
}
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_internal_PartitionMetaData.java |
77 | public class ReadMultipleMemoryPartitionsTest {
private DataBuffer dataBuffer;
private String testFeedID1 = "TestPui1";
@BeforeMethod
public void setup() throws IOException {
Properties prop = new Properties();
prop.load(ClassLoader.getSystemResourceAsStream("properties/testFeed.properties"));
prop.put("memory.buffer.partition", "2");
prop.put("memory.buffer.time.millis", "-1");
dataBuffer = DataBufferFactory.getMemoryDataBuffer(prop);
if (dataBuffer.isDataBufferClose()) {
dataBuffer.reset();
}
}
@AfterMethod
public void reset() {
if (dataBuffer != null) {
dataBuffer.reset();
}
DataBufferFactory.reset();
}
@Test
public void readMultiplePartitionsTest() throws InterruptedException, BufferFullException {
long time0 = System.currentTimeMillis();
Thread.sleep(3000);
Map<String, String> value11 = new HashMap<String, String>();
value11.put("value", "1.3");
value11.put("status", "ok");
long time11 = System.currentTimeMillis();
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time11, value11);
Thread.sleep(3000);
Map<String, String> value12 = new HashMap<String, String>();
value12.put("value", "1.4");
value12.put("status", "ok");
long time12 = System.currentTimeMillis();
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time12, value12);
List<Map<String, String>> data = dataBuffer.getData(Collections.singleton(testFeedID1), TimeUnit.MILLISECONDS, time11, time12).get(testFeedID1);
Assert.assertNotNull(data);
Assert.assertEquals(data.size(), 2);
Assert.assertEquals(data.get(0), value11);
Assert.assertEquals(data.get(1), value12);
dataBuffer.prepareForNextPartition();
dataBuffer.moveToNextPartition();
Thread.sleep(3000);
Map<String, String> value21 = new HashMap<String, String>();
value21.put("value", "2.3");
value21.put("status", "ok");
long time21 = System.currentTimeMillis();
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time21, value21);
Thread.sleep(3000);
Map<String, String> value22 = new HashMap<String, String>();
value22.put("value", "2.4");
value22.put("status", "ok");
long time22 = System.currentTimeMillis();
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time22, value22);
List<Map<String, String>> data2 = dataBuffer.getData(Collections.singleton(testFeedID1), TimeUnit.MILLISECONDS, time0, time22).get(testFeedID1);
Assert.assertNotNull(data2);
Assert.assertEquals(data2.size(), 4);
}
} | false | timeSequenceFeedAggregator_src_test_java_gov_nasa_arc_mct_buffer_internal_ReadMultipleMemoryPartitionsTest.java |
78 | public class ReadMultiplePartitionsTest {
private DataBuffer dataBuffer;
private String testFeedID1 = "TestPui1";
private File bufferLocation;
@BeforeMethod
public void setup() throws IOException {
Properties prop = new Properties();
prop.load(ClassLoader.getSystemResourceAsStream("properties/testFeed.properties"));
prop.put("buffer.partitions", "3");
prop.put("buffer.time.millis", "-1");
bufferLocation = File.createTempFile("mct-buffer", "");
bufferLocation.delete();
Assert.assertTrue(bufferLocation.mkdir());
prop.put("buffer.disk.loc", bufferLocation.toString());
dataBuffer = DataBufferFactory.getFastDiskDataBuffer(prop);
if (dataBuffer.isDataBufferClose()) {
dataBuffer.reset();
}
}
@AfterMethod
public void reset() {
if (dataBuffer != null) {
dataBuffer.closeBuffer();
}
DataBufferFactory.reset();
delete(bufferLocation);
}
private void delete(File f) {
if (f.isDirectory()) {
for (File f2 : f.listFiles()) {
delete(f2);
}
}
f.delete();
}
@Test
public void readMultiplePartitionsTest() throws InterruptedException, BufferFullException {
long time0 = System.currentTimeMillis();
Thread.sleep(5000);
Map<String, String> value11 = new HashMap<String, String>();
value11.put("value", "1.3");
value11.put("status", "ok");
long time11 = System.currentTimeMillis();
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time11, value11);
Thread.sleep(5000);
Map<String, String> value12 = new HashMap<String, String>();
value12.put("value", "1.4");
value12.put("status", "ok");
long time12 = System.currentTimeMillis();
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time12, value12);
List<Map<String, String>> data = dataBuffer.getData(Collections.singleton(testFeedID1), TimeUnit.MILLISECONDS, time11, time12).get(testFeedID1);
Assert.assertNotNull(data);
Assert.assertEquals(data.size(), 2);
Assert.assertEquals(data.get(0), value11);
Assert.assertEquals(data.get(1), value12);
dataBuffer.prepareForNextPartition();
dataBuffer.moveToNextPartition();
Thread.sleep(5000);
Map<String, String> value21 = new HashMap<String, String>();
value21.put("value", "2.3");
value21.put("status", "ok");
long time21 = System.currentTimeMillis();
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time21, value21);
Thread.sleep(5000);
Map<String, String> value22 = new HashMap<String, String>();
value22.put("value", "2.4");
value22.put("status", "ok");
long time22 = System.currentTimeMillis();
dataBuffer.putData(testFeedID1, TimeUnit.MILLISECONDS, time22, value22);
List<Map<String, String>> data2 = dataBuffer.getData(Collections.singleton(testFeedID1), TimeUnit.MILLISECONDS, time0, time22).get(testFeedID1);
Assert.assertNotNull(data2);
Assert.assertEquals(data2.size(), 4);
}
} | false | timeSequenceFeedAggregator_src_test_java_gov_nasa_arc_mct_buffer_internal_ReadMultiplePartitionsTest.java |
79 | public class SwitchBetweenParitionsTest {
private DataBuffer dataBuffer;
private File bufferLocation;
@BeforeMethod
public void setup() throws IOException {
Properties prop = new Properties();
prop.load(ClassLoader.getSystemResourceAsStream("properties/testFeed.properties"));
prop.put("buffer.partitions", "3");
prop.put("buffer.time.millis", "-1");
bufferLocation = File.createTempFile("mct-buffer", "");
bufferLocation.delete();
Assert.assertTrue(bufferLocation.mkdir());
prop.put("buffer.disk.loc", bufferLocation.toString());
dataBuffer = DataBufferFactory.getFastDiskDataBuffer(prop);
if (dataBuffer.isDataBufferClose()) {
dataBuffer.reset();
}
}
@AfterMethod
public void reset() {
if (dataBuffer != null) {
dataBuffer.closeBuffer();
}
DataBufferFactory.reset();
delete(bufferLocation);
}
private void delete(File f) {
if (f.isDirectory()) {
for (File f2 : f.listFiles()) {
delete(f2);
}
}
f.delete();
}
@Test
public void switchPartitionsTest() throws Exception {
int currentPartition = getCurrentBufferPartition();
Assert.assertEquals(currentPartition, 0);
dataBuffer.prepareForNextPartition();
currentPartition = getCurrentBufferPartition();
Assert.assertEquals(currentPartition, 0);
dataBuffer.moveToNextPartition();
currentPartition = getCurrentBufferPartition();
Assert.assertEquals(currentPartition, 1);
dataBuffer.prepareForNextPartition();
dataBuffer.moveToNextPartition();
currentPartition = getCurrentBufferPartition();
Assert.assertEquals(currentPartition, 2);
dataBuffer.prepareForNextPartition();
dataBuffer.moveToNextPartition();
currentPartition = getCurrentBufferPartition();
Assert.assertEquals(currentPartition, 0);
}
private int getCurrentBufferPartition() throws Exception {
Field f = DataBuffer.class.getDeclaredField("currentParition");
f.setAccessible(true);
PartitionFastDiskBuffer currentPartitionBuffer = (PartitionFastDiskBuffer)f.get(dataBuffer);
return currentPartitionBuffer.getBufferEnv().getCurrentBufferPartition();
}
} | false | timeSequenceFeedAggregator_src_test_java_gov_nasa_arc_mct_buffer_internal_SwitchBetweenParitionsTest.java |
80 | public class MemoryDataBufferHelper implements DataBufferHelper {
@Override
public MetaDataBuffer newMetaDataBuffer(DataBufferEnv env) {
if (env == null) {
return new MetaDataBuffer(new MemoryBufferEnv(null));
}
return new MetaDataBuffer(env);
}
@Override
public PartitionDataBuffer newPartitionBuffer(int partitionNo) {
return new PartitionMemoryBuffer(partitionNo);
}
@Override
public PartitionDataBuffer newPartitionBuffer(DataBufferEnv env) {
assert env instanceof MemoryBufferEnv;
return new PartitionMemoryBuffer((MemoryBufferEnv)env);
}
@Override
public DataBufferEnv newMetaDataBufferEnv(Properties prop) {
return new MemoryBufferEnv(prop);
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_memory_internal_MemoryDataBufferHelper.java |
81 | public class PartitionMemoryBuffer implements PartitionDataBuffer {
private static final Logger LOGGER = LoggerFactory.getLogger(PartitionMemoryBuffer.class);
private static final Logger READ_PERF_LOGGER = LoggerFactory.getLogger("gov.nasa.arc.mct.performance.memory.partitionbuffer.read");
private static final Logger WRITE_PERF_LOGGER = LoggerFactory.getLogger("gov.nasa.arc.mct.performance.memory.partitionbuffer.write");
private static final class TimeStampComparator implements Comparator<Long>, Serializable {
private static final long serialVersionUID = -665810351953536404L;
@Override
public int compare(Long o1, Long o2) {
return o1.compareTo(o2);
}
}
private static final Comparator<Long> TIMESTAMP_COMPARATOR = new TimeStampComparator();
private volatile Map<String, TreeMap<Long, Map<String, String>>> cachedData = new HashMap<String, TreeMap<Long, Map<String, String>>>();
private volatile SoftReference<Map<String, TreeMap<Long, Map<String, String>>>> claimableCachedData = null;
private final MemoryBufferEnv env;
private volatile boolean active;
public PartitionMemoryBuffer(int partitionNumber) {
this(new MemoryBufferEnv(null, partitionNumber));
this.active = true;
READ_PERF_LOGGER.debug("Newing memory partition {}", partitionNumber);
}
public PartitionMemoryBuffer(MemoryBufferEnv env) {
this.env = env;
this.active = true;
}
@Override
public void removeBuffer() {
cachedData = null;
this.env.closeAndRestartEnvironment();
}
@Override
public void closeBuffer() {
removeBuffer();
}
@Override
public DataBufferEnv getBufferEnv() {
return this.env;
}
private Map<String, TreeMap<Long, Map<String, String>>> getCachedData() {
Map<String, TreeMap<Long, Map<String, String>>> returnedCachedData = null;
if (cachedData != null) {
returnedCachedData = cachedData;
} else if (claimableCachedData != null) {
returnedCachedData = claimableCachedData.get();
}
if (returnedCachedData == null) {
returnedCachedData = Collections.emptyMap();
}
return returnedCachedData;
}
@Override
public Map<String, SortedMap<Long, Map<String, String>>> getLastData(Set<String> feedIDs, TimeUnit timeUnit, long startTime, long endTime) {
final ElapsedTimer timer = new ElapsedTimer();
timer.startInterval();
Map<String, TreeMap<Long, Map<String, String>>> cachedData = getCachedData();
Map<String, SortedMap<Long, Map<String, String>>> returnedData = new HashMap<String, SortedMap<Long, Map<String, String>>>();
for (String feedID : feedIDs) {
synchronized (this) {
TreeMap<Long, Map<String, String>> feedCachedData = cachedData.get(feedID);
if (feedCachedData == null) {
continue;
}
long start = TimeUnit.NANOSECONDS.convert(startTime, timeUnit);
long end = TimeUnit.NANOSECONDS.convert(endTime, timeUnit);
Entry<Long, Map<String, String>> feedSearchedData = feedCachedData.subMap(start, true, end, true).lastEntry();
if (feedSearchedData != null) {
SortedMap<Long, Map<String, String>> feedData = new TreeMap<Long, Map<String, String>>();
feedData.put(feedSearchedData.getKey(), feedSearchedData.getValue());
returnedData.put(feedID, feedData);
}
}
}
timer.stopInterval();
READ_PERF_LOGGER.debug("Time to get {} feeds from memory: {} from partition " + this.env.getCurrentBufferPartition(), feedIDs.size(), timer.getIntervalInMillis());
return returnedData;
}
@Override
public Map<String, SortedMap<Long, Map<String, String>>> getData(Set<String> feedIDs, TimeUnit timeUnit, long startTime,
long endTime) {
final ElapsedTimer timer = new ElapsedTimer();
timer.startInterval();
Map<String, TreeMap<Long, Map<String, String>>> cachedData = getCachedData();
Map<String, SortedMap<Long, Map<String, String>>> returnedData = new HashMap<String, SortedMap<Long, Map<String, String>>>();
startTime = TimeUnit.NANOSECONDS.convert(startTime, timeUnit);
endTime = TimeUnit.NANOSECONDS.convert(endTime, timeUnit);
for (String feedID : feedIDs) {
synchronized (this) {
TreeMap<Long, Map<String, String>> feedCachedData = cachedData.get(feedID);
if (feedCachedData == null) {
continue;
}
Map<Long, Map<String, String>> feedSearchedData = feedCachedData.subMap(startTime, true, endTime, true);
if (feedSearchedData != null && !feedSearchedData.isEmpty()) {
SortedMap<Long, Map<String, String>> feedData = new TreeMap<Long, Map<String, String>>();
feedData.putAll(feedSearchedData);
returnedData.put(feedID, feedData);
}
}
}
timer.stopInterval();
READ_PERF_LOGGER.debug("Time to get {} feeds from memory: {} from partition " + this.env.getCurrentBufferPartition(), feedIDs.size(), timer.getIntervalInMillis());
return returnedData;
}
@Override
public void inactive() {
claimableCachedData = new SoftReference<Map<String,TreeMap<Long,Map<String,String>>>>(cachedData);
cachedData = null;
this.active = false;
}
@Override
public boolean isActive() {
return this.active;
}
@Override
public boolean isClosed() {
return cachedData == null;
}
@Override
public Map<String, PartitionTimestamps> putData(Map<String, Map<Long, Map<String, String>>> value, TimeUnit timeUnit) {
final ElapsedTimer timer = new ElapsedTimer();
timer.startInterval();
Map<String, PartitionTimestamps> timestamps = new HashMap<String, PartitionTimestamps>();
Map<String, TreeMap<Long, Map<String, String>>> cachedData = getCachedData();
for (Entry<String, Map<Long, Map<String, String>>> entry : value.entrySet()) {
String feedID = entry.getKey();
long largestTime = 0;
long smallestTime = 0;
synchronized (this) {
TreeMap<Long, Map<String, String>> cachedFeedData = cachedData.get(feedID);
if (cachedFeedData == null) {
cachedFeedData = new TreeMap<Long, Map<String, String>>(TIMESTAMP_COMPARATOR);
cachedData.put(feedID, cachedFeedData);
}
for (Entry<Long, Map<String, String>> feedData : entry.getValue().entrySet()) {
Long time = feedData.getKey();
time = TimeUnit.NANOSECONDS.convert(time, timeUnit);
LOGGER.debug("Putting data for feed {} with time {}", feedID, time);
if (time.longValue() > largestTime) {
largestTime = time.longValue();
}
if (smallestTime == 0) {
smallestTime = time.longValue();
} else if (time.longValue() < smallestTime) {
smallestTime = time.longValue();
}
Map<String, String> clonedFeedData = new HashMap<String, String>(feedData.getValue());
cachedFeedData.put(time, clonedFeedData);
}
}
timestamps.put(feedID, new PartitionTimestamps(smallestTime, largestTime));
}
timer.stopInterval();
WRITE_PERF_LOGGER.debug("Time to write {} feeds: {} from partition " + this.env.getCurrentBufferPartition(), value.size(), timer.getIntervalInMillis());
return timestamps;
}
@Override
public void putData(Map<String, Map<Long, Map<String, String>>> value, TimeUnit timeUnit, MetaDataBuffer metadata, int metadataIndex) {
final ElapsedTimer timer = new ElapsedTimer();
timer.startInterval();
Map<String, TreeMap<Long, Map<String, String>>> cachedData = getCachedData();
for (Entry<String, Map<Long, Map<String, String>>> entry : value.entrySet()) {
String feedID = entry.getKey();
long largestTime = 0;
long smallestTime = 0;
synchronized (this) {
TreeMap<Long, Map<String, String>> cachedFeedData = cachedData.get(feedID);
if (cachedFeedData == null) {
cachedFeedData = new TreeMap<Long, Map<String, String>>(TIMESTAMP_COMPARATOR);
cachedData.put(feedID, cachedFeedData);
}
for (Entry<Long, Map<String, String>> feedData : entry.getValue().entrySet()) {
Long time = feedData.getKey();
time = TimeUnit.NANOSECONDS.convert(time, timeUnit);
LOGGER.debug("Putting data for feed {} with time {}", feedID, time);
if (time.longValue() > largestTime) {
largestTime = time.longValue();
}
if (smallestTime == 0) {
smallestTime = time.longValue();
} else if (time.longValue() < smallestTime) {
smallestTime = time.longValue();
}
Map<String, String> clonedFeedData = new HashMap<String, String>(feedData.getValue());
cachedFeedData.put(time, clonedFeedData);
}
}
metadata.updatePartitionMetaData(metadataIndex, feedID, smallestTime, largestTime);
}
timer.stopInterval();
if (WRITE_PERF_LOGGER.isDebugEnabled()) {
WRITE_PERF_LOGGER.debug("Time to write {} feeds: {} from partition " + this.env.getCurrentBufferPartition(), value.size(), timer.getIntervalInMillis());
}
}
@Override
public void resetBuffer() {
cachedData.clear();
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_memory_internal_PartitionMemoryBuffer.java |
82 | private static final class TimeStampComparator implements Comparator<Long>, Serializable {
private static final long serialVersionUID = -665810351953536404L;
@Override
public int compare(Long o1, Long o2) {
return o1.compareTo(o2);
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_memory_internal_PartitionMemoryBuffer.java |
83 | public class ElapsedTimer {
private long overallTime;
private long elapsedTimeStart;
private long elapsedTimeStop;
private long intervals;
public ElapsedTimer() {
overallTime = 0;
intervals = 0;
}
public void startInterval() {
elapsedTimeStart = getCurrentTime();
elapsedTimeStop = 0;
}
public void stopInterval() {
elapsedTimeStop = getCurrentTime();
intervals++;
overallTime += (elapsedTimeStop-elapsedTimeStart);
}
/**
* Gets the number of intervals that have been completed.
* @return intervals - long in millisecs.
*/
public long getIntervals() {
return intervals;
}
/**
* Return the last time interval in millis
* @return elapsedTimeStop - elapsedTimeStart
*/
public long getIntervalInMillis() {
return elapsedTimeStop - elapsedTimeStart;
}
/**
*
* @return sum of all interval times in milliseconds
*/
public long getTotalTime() {
return overallTime;
}
public double getMean() {
return getTotalTime()/((double)intervals);
}
long getCurrentTime() {
return TimeUnit.NANOSECONDS.toMillis(System.nanoTime());
}
} | false | timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_util_ElapsedTimer.java |
84 | public class CanvasActivator implements BundleActivator {
//private static final Logger logger = LoggerFactory.getLogger(ExampleActivator.class);
@Override
public void start(BundleContext context) {
//logger.info("starting bundle {0}", context.getBundle().getSymbolicName());
ServiceReference sr = context.getServiceReference(ComponentRegistry.class.getName());
Object o = context.getService(sr);
context.ungetService(sr);
assert o != null;
(new ComponentRegistryAccess()).setRegistry((ComponentRegistry)o);
sr = context.getServiceReference(PolicyManager.class.getName());
o = context.getService(sr);
context.ungetService(sr);
assert o != null;
(new PolicyManagerAccess()).setPolciyManager((PolicyManager)o);
sr = context.getServiceReference(MenuManager.class.getName());
o = context.getService(sr);
context.ungetService(sr);
assert o != null;
(new MenuManagerAccess()).setMenuManager((MenuManager)o);
}
@Override
public void stop(BundleContext context) {
(new ComponentRegistryAccess()).releaseRegistry(ComponentRegistryAccess.getComponentRegistry());
(new PolicyManagerAccess()).releasePolicyManager();
}
} | false | canvas_src_main_java_gov_nasa_arc_mct_canvas_CanvasActivator.java |
85 | public class CanvasActivatorTest {
public static long BUNDLE_ID = 12345;
@Mock
private BundleContext bc;
@Mock
private ComponentRegistry mockComponentRegistry;
@Mock
private PolicyManager mockPolicyManager;
@Mock
private MenuManager mockMenuManager;
@Mock
private ServiceReference mockCR;
@Mock
private ServiceReference mockPM;
@Mock
private ServiceReference mockMM;
@BeforeMethod
public void init() throws IOException {
MockitoAnnotations.initMocks(this);
Mockito.when(bc.getServiceReference(ComponentRegistry.class.getName())).thenReturn(mockCR);
Mockito.when(bc.getServiceReference(PolicyManager.class.getName())).thenReturn(mockPM);
Mockito.when(bc.getServiceReference(MenuManager.class.getName())).thenReturn(mockMM);
Mockito.when(bc.getService(mockCR)).thenReturn(mockComponentRegistry);
Mockito.when(bc.getService(mockPM)).thenReturn(mockPolicyManager);
Mockito.when(bc.getService(mockMM)).thenReturn(mockMenuManager);
}
@Test
public void testStartStop() throws Exception {
CanvasActivator activator = new CanvasActivator();
activator.start(bc);
Assert.assertSame(ComponentRegistryAccess.getComponentRegistry(), mockComponentRegistry);
Assert.assertSame(PolicyManagerAccess.getPolicyManager(), mockPolicyManager);
activator.stop(bc);
Assert.assertNull(ComponentRegistryAccess.getComponentRegistry());
Assert.assertNull(PolicyManagerAccess.getPolicyManager());
}
} | false | canvas_src_test_java_gov_nasa_arc_mct_canvas_CanvasActivatorTest.java |
86 | public class ComponentRegistryAccess {
private static AtomicReference<ComponentRegistry> registry =
new AtomicReference<ComponentRegistry>();
// this is not a traditional singleton as this class is created by the OSGi declarative services mechanism.
/**
* Returns the component registry instance. This will not return null as the cardinality of
* the component specified through the OSGi components services is 1.
* @return a component registry service instance
*/
public static ComponentRegistry getComponentRegistry() {
return registry.get();
}
/**
* set the active instance of the <code>ComponentRegistry</code>. This method is invoked by
* OSGi (see the OSGI-INF/component.xml file for additional details).
* @param componentRegistry available in MCT
*/
public void setRegistry(ComponentRegistry componentRegistry) {
registry.set(componentRegistry);
}
/**
* release the active instance of the <code>ComponentRegistry</code>. This method is invoked by
* OSGi (see the OSGI-INF/component.xml file for additional details).
* @param componentRegistry to be released
*/
public void releaseRegistry(ComponentRegistry componentRegistry) {
registry.set(null);
}
} | false | canvas_src_main_java_gov_nasa_arc_mct_canvas_ComponentRegistryAccess.java |
87 | public class ComponentRegistryAccessTest {
@Mock
private ComponentRegistry mockComponentRegistry;
@BeforeMethod
public void setup() {
MockitoAnnotations.initMocks(this);
}
@Test
public void componentRegistryAccessTest() {
(new ComponentRegistryAccess()).setRegistry(mockComponentRegistry);
Assert.assertSame(ComponentRegistryAccess.getComponentRegistry(), mockComponentRegistry);
(new ComponentRegistryAccess()).releaseRegistry(mockComponentRegistry);
Assert.assertNull(ComponentRegistryAccess.getComponentRegistry());
}
} | false | canvas_src_test_java_gov_nasa_arc_mct_canvas_ComponentRegistryAccessTest.java |
88 | public final class MenuManagerAccess {
private static AtomicReference<MenuManager> manager = new AtomicReference<MenuManager>();
public void setMenuManager(MenuManager menuManager) {
manager.set(menuManager);
}
public void releaseMenuManager() {
manager.set(null);
}
public static MenuManager getMenuManager() {
return manager.get();
}
} | false | canvas_src_main_java_gov_nasa_arc_mct_canvas_MenuManagerAccess.java |
89 | public final class PolicyManagerAccess {
private static AtomicReference<PolicyManager> manager = new AtomicReference<PolicyManager>();
public void setPolciyManager(PolicyManager policyManager) {
manager.set(policyManager);
}
public void releasePolicyManager() {
manager.set(null);
}
public static PolicyManager getPolicyManager() {
return manager.get();
}
} | false | canvas_src_main_java_gov_nasa_arc_mct_canvas_PolicyManagerAccess.java |
90 | public class PolicyManagerAccessTest {
@Mock
private PolicyManager mockPolicManager;
@BeforeMethod
public void setup() {
MockitoAnnotations.initMocks(this);
}
@Test
public void componentRegistryAccessTest() {
(new PolicyManagerAccess()).setPolciyManager(mockPolicManager);
Assert.assertSame(PolicyManagerAccess.getPolicyManager(), mockPolicManager);
(new PolicyManagerAccess()).releasePolicyManager();
Assert.assertNull(PolicyManagerAccess.getPolicyManager());
}
} | false | canvas_src_test_java_gov_nasa_arc_mct_canvas_PolicyManagerAccessTest.java |
91 | public class ControlAreaFormattingConstants {
// these next items determine the grid size drawn on the DesignCanvas...
public static final int NO_GRID_SIZE = 0;
public static final int FINE_GRID_SIZE = 3;
public static final int SMALL_GRID_SIZE = 9;
public static final int MED_GRID_SIZE = 36;
public static final int LARGE_GRID_SIZE = 72;
public static final int MAJOR_GRID_LINE = 72;
public static final int MAX_GRID_SIZE_IN_PIXELS = 1024;
public static final Color MINOR_GRID_LINE_COLOR = new Color(204,204,204);
public static final Color MAJOR_GRID_LINE_COLOR = new Color(153,153,153);
// Items for use in drawing borders...
public static final Color BorderColors[] = { new Color(000, 000, 000), // black
new Color(000, 000, 255), // blue
new Color(000, 128, 000), // Green
new Color(032, 179, 170), // light sea green
new Color(152, 251, 152), // Pale Green
new Color(255, 140, 000), // Dark Orange
new Color(255, 000, 255), // Magenta
new Color(255, 69, 000), // Orange Red
new Color(255, 215, 000), // Gold
new Color(047, 79, 79), // Dark Slate Gray
new Color(128, 128, 128), // Gray
new Color(100, 149, 237), // Corn Flower blue
new Color(000, 49, 042), // Brown
new Color(000, 176, 176), // Aquamarine
new Color(102, 051, 255) // Intermediate Violate blue
};
public static enum BorderStyle { SINGLE, DOUBLE, DASHED, DOTS, MIXED;
public static BorderStyle getBorderStyle(int i) {
switch (i) {
case 0: return SINGLE;
case 1: return DOUBLE;
case 2: return DASHED;
case 3: return DOTS;
case 4: return MIXED;
default: return SINGLE;
}
}
}
public static final int NUMBER_BORDER_STYLES = BorderStyle.values().length;
public final static String PANEL_BORDER_PROPERTY = "PANEL BORDER PROPERTY";
public final static String PANEL_BORDER_STYLE_PROPERTY = "PANEL BORDER STYLE PROPERTY";
public static enum PANEL_ZORDER { FRONT, BACK }
public static final String PANEL_ORDER = "PANEL_ORDER";
public static int UNDERLINE_OFF = -1;
public static final Map<TextAttribute, Object> underlineMap = new Hashtable<TextAttribute, Object>();
/**
* @author dcberrio
* Enumerated standard JVM font families
*/
public enum JVMFontFamily {
Dialog ("Dialog"),
DialogInput ("Input"),
Monospaced ("Monospaced"),
SansSerif ("Sans Serif"),
Serif ("Serif");
private final String displayName;
JVMFontFamily(String displayName) {
this.displayName = displayName;
}
/** Get pretty name for controls
* @return displayName
*/
public String getDisplayName() {
return displayName;
}
}
} | false | canvas_src_main_java_gov_nasa_arc_mct_canvas_formatting_ControlAreaFormattingConstants.java |
92 | public static enum BorderStyle { SINGLE, DOUBLE, DASHED, DOTS, MIXED;
public static BorderStyle getBorderStyle(int i) {
switch (i) {
case 0: return SINGLE;
case 1: return DOUBLE;
case 2: return DASHED;
case 3: return DOTS;
case 4: return MIXED;
default: return SINGLE;
}
}
} | false | canvas_src_main_java_gov_nasa_arc_mct_canvas_formatting_ControlAreaFormattingConstants.java |
93 | public enum JVMFontFamily {
Dialog ("Dialog"),
DialogInput ("Input"),
Monospaced ("Monospaced"),
SansSerif ("Sans Serif"),
Serif ("Serif");
private final String displayName;
JVMFontFamily(String displayName) {
this.displayName = displayName;
}
/** Get pretty name for controls
* @return displayName
*/
public String getDisplayName() {
return displayName;
}
} | false | canvas_src_main_java_gov_nasa_arc_mct_canvas_formatting_ControlAreaFormattingConstants.java |
94 | public static enum PANEL_ZORDER { FRONT, BACK } | false | canvas_src_main_java_gov_nasa_arc_mct_canvas_formatting_ControlAreaFormattingConstants.java |
95 | public class CanvasLayoutManager implements LayoutManager2 {
private final static Logger LOGGER = LoggerFactory.getLogger(CanvasLayoutManager.class);
private static enum LAYOUT_ARRANGEMENT {
free, tile, mix
}
public final static LAYOUT_ARRANGEMENT TILE = LAYOUT_ARRANGEMENT.tile;
public final static LAYOUT_ARRANGEMENT MIX = LAYOUT_ARRANGEMENT.mix;
private LinkedHashMap<Component, Rectangle> newlyAddedComponents = new LinkedHashMap<Component, Rectangle>();
private LAYOUT_ARRANGEMENT componentLayout;
private Point nextLocation = new Point(0, 0);
private Dimension minDimension = new Dimension(0, 0);
private Dimension preferredDimension = new Dimension(0, 0);
private boolean sizeUnknown = true;
private int gridSize = ControlAreaFormattingConstants.NO_GRID_SIZE;
private boolean snapToGrid = false;
private int snapGridSize = 1;
public CanvasLayoutManager() {
componentLayout = LAYOUT_ARRANGEMENT.free;
}
public CanvasLayoutManager(LAYOUT_ARRANGEMENT layout) {
componentLayout = layout;
}
@Override
public void addLayoutComponent(Component comp, Object constraints) {
assert constraints == null || constraints instanceof Rectangle;
Rectangle bound = (Rectangle) constraints;
newlyAddedComponents.put(comp, bound);
}
@Override
public float getLayoutAlignmentX(Container target) {
return 0;
}
@Override
public float getLayoutAlignmentY(Container target) {
return 0;
}
@Override
@SuppressWarnings("fallthrough")
public void invalidateLayout(Container target) {
try {
Set<Component> components = newlyAddedComponents.keySet();
LAYOUT_ARRANGEMENT layout = componentLayout;
layout = ((componentLayout == LAYOUT_ARRANGEMENT.mix) && (newlyAddedComponents.size() == 1)) ? LAYOUT_ARRANGEMENT.free
: componentLayout;
switch (layout) {
case mix:
if (!newlyAddedComponents.isEmpty()) {
Entry<Component, Rectangle> firstEntry = newlyAddedComponents.entrySet()
.iterator().next();
Rectangle r = firstEntry.getValue();
if (r != null) {
nextLocation = r.getLocation();
}
}
case tile:
Container parent = target.getParent();
Dimension parentDimension = parent.getBounds().getSize();
int largestHeight = 0;
for (Component comp : components) {
Rectangle r = newlyAddedComponents.get(comp);
if (r != null) {
if (nextLocation.x + r.width >= parentDimension.width) {
nextLocation.x = 0;
nextLocation.y += largestHeight;
largestHeight = 0;
}
largestHeight = Math.max(largestHeight, r.height);
nextLocation = marshalLocation(nextLocation);
comp.setBounds(nextLocation.x, nextLocation.y, r.width, r.height);
nextLocation.x += r.width;
} else {
r = marshalLocation(comp.getBounds());
nextLocation.x = r.x + r.width;
}
}
break;
case free:
default:
for (Component comp : components) {
Rectangle r = marshalLocation(newlyAddedComponents.get(comp));
if (r != null) {
comp.setBounds(r);
}
}
break;
}
} catch (Exception e) {
LOGGER.error(e.getMessage(), e);
} finally {
newlyAddedComponents.clear();
}
}
private Point marshalLocation(Point origPoint) {
if (origPoint == null) { return null; }
Point marshallPoint = new Point();
marshallPoint.x = origPoint.x - (origPoint.x % snapGridSize);
marshallPoint.y = origPoint.y - (origPoint.y % snapGridSize);
if (marshallPoint.x < origPoint.x) {
marshallPoint.x += snapGridSize;
}
if (marshallPoint.y < origPoint.y) {
marshallPoint.y += snapGridSize;
}
return marshallPoint;
}
public Rectangle marshalLocation(Rectangle origBound) {
if (origBound == null) { return null; }
origBound.x = origBound.x - (origBound.x % snapGridSize);
origBound.y = origBound.y - (origBound.y % snapGridSize);
return origBound;
}
@Override
public Dimension maximumLayoutSize(Container target) {
if (sizeUnknown) {
setSizes(target);
}
int width = target.getParent().getBounds().width, height = target.getParent().getBounds().height;
for (Component component : target.getComponents()) {
Point location = component.getLocation();
int rightX = location.x + component.getWidth();
int bottomY = location.y + component.getHeight();
if (rightX > width)
width = rightX;
if (bottomY > height)
height = bottomY;
}
Dimension dim = new Dimension(width, height);
// Always add the container's insets
Insets insets = target.getInsets();
dim.width += insets.left + insets.right;
dim.height += insets.top + insets.bottom;
return dim;
}
@Override
public void addLayoutComponent(String name, Component comp) {
//
}
@Override
public void layoutContainer(Container parent) {
try {
switch (componentLayout) {
case tile:
Component[] components = parent.getComponents();
Dimension parentDimension = parent.getBounds().getSize();
int largestHeight = 0;
Point nextLocation = new Point(0, 0);
for (Component comp : components) {
Rectangle r = comp.getBounds();
if (nextLocation.x + r.width >= parentDimension.width) {
nextLocation.x = 0;
nextLocation.y += largestHeight;
largestHeight = 0;
}
largestHeight = Math.max(largestHeight, r.height);
nextLocation = marshalLocation(nextLocation);
comp.setBounds(nextLocation.x, nextLocation.y, r.width, r.height);
nextLocation.x += r.width;
}
break;
case mix:
case free:
default:
break;
}
} catch (Exception e) {
e.printStackTrace();
} finally {
newlyAddedComponents.clear();
}
}
@Override
public Dimension minimumLayoutSize(Container parent) {
if (sizeUnknown) {
setSizes(parent);
}
Dimension dim = new Dimension(0, 0);
// Always add the container's insets
Insets insets = parent.getInsets();
dim.width = minDimension.width + insets.left + insets.right;
dim.height = minDimension.height + insets.top + insets.bottom;
return dim;
}
@Override
public Dimension preferredLayoutSize(Container parent) {
if (sizeUnknown) {
setSizes(parent);
}
Dimension dim = new Dimension(0, 0);
// Always add the container's insets
Insets insets = parent.getInsets();
dim.width = preferredDimension.width + insets.left + insets.right;
dim.height = preferredDimension.height + insets.top + insets.bottom;
return dim;
}
@Override
public void removeLayoutComponent(Component comp) {
//
}
private void setSizes(Container parent) {
preferredDimension = parent.getSize();
minDimension = parent.getSize();
sizeUnknown = false;
}
public int getGridSize() {
return gridSize;
}
public void setGridSize(int gridSize) {
this.gridSize = gridSize;
enableSnap(snapToGrid);
}
public boolean isSnapEnable() {
return this.snapToGrid;
}
public void enableSnap(boolean snapToGrid) {
this.snapToGrid = snapToGrid;
if (snapToGrid) {
if (gridSize != ControlAreaFormattingConstants.NO_GRID_SIZE) {
snapGridSize = gridSize;
return;
}
}
snapGridSize = 1;
}
public void switchLayout(LAYOUT_ARRANGEMENT layout) {
this.componentLayout = layout;
}
} | false | canvas_src_main_java_gov_nasa_arc_mct_canvas_layout_CanvasLayoutManager.java |
96 | private static enum LAYOUT_ARRANGEMENT {
free, tile, mix
} | false | canvas_src_main_java_gov_nasa_arc_mct_canvas_layout_CanvasLayoutManager.java |
97 | public class CanvasLayoutManagerTest {
private JPanel panel;
private JPanel parentPanel;
@BeforeMethod
public void setup() {
panel = new JPanel();
setupParent(panel);
}
@Test
public void testDefaultLayout() {
CanvasLayoutManager layoutMgr = new CanvasLayoutManager();
panel.setLayout(layoutMgr);
JPanel childPanel1 = new JPanel();
JPanel childPanel2 = new JPanel();
JPanel childPanel3 = new JPanel();
panel.add(childPanel1, new Rectangle(10, 20, 30, 40));
panel.add(childPanel2, new Rectangle(20, 30, 40, 50));
panel.add(childPanel3, new Rectangle(30, 40, 50, 60));
layoutMgr.invalidateLayout(panel);
Component[] childs = panel.getComponents();
Assert.assertEquals(childs.length, 3);
Assert.assertEquals(childs[0].getBounds(), new Rectangle(10, 20, 30, 40));
Assert.assertEquals(childs[1].getBounds(), new Rectangle(20, 30, 40, 50));
Assert.assertEquals(childs[2].getBounds(), new Rectangle(30, 40, 50, 60));
}
@Test
public void testTileLayout() {
CanvasLayoutManager layoutMgr = new CanvasLayoutManager(CanvasLayoutManager.TILE);
panel.setLayout(layoutMgr);
panel.setSize(100, 100);
JPanel childPanel1 = new JPanel();
JPanel childPanel2 = new JPanel();
JPanel childPanel3 = new JPanel();
panel.add(childPanel1, new Rectangle(10, 20, 30, 40));
panel.add(childPanel2, new Rectangle(20, 30, 40, 50));
panel.add(childPanel3, new Rectangle(30, 40, 50, 60));
layoutMgr.invalidateLayout(panel);
Component[] childs = panel.getComponents();
Assert.assertEquals(childs.length, 3);
Assert.assertEquals(childs[0].getBounds().getLocation(), new Point(0, 0));
Assert.assertEquals(childs[1].getBounds().getLocation(), new Point(30, 0));
Assert.assertEquals(childs[2].getBounds().getLocation(), new Point(0, 50));
}
@Test
public void snapToGrid() {
CanvasLayoutManager layoutMgr = new CanvasLayoutManager(CanvasLayoutManager.MIX);
layoutMgr.setGridSize(7);
layoutMgr.enableSnap(true);
panel.setLayout(layoutMgr);
panel.setSize(100, 100);
JPanel childPanel1 = new JPanel();
JPanel childPanel2 = new JPanel();
JPanel childPanel3 = new JPanel();
panel.add(childPanel1, new Rectangle(0, 0, 30, 40));
panel.add(childPanel2, new Rectangle(20, 30, 40, 50));
panel.add(childPanel3, new Rectangle(30, 40, 50, 60));
layoutMgr.invalidateLayout(panel);
Component[] childs = panel.getComponents();
Assert.assertEquals(childs.length, 3);
Assert.assertEquals(childs[0].getBounds().getLocation(), new Point(0, 0));
Assert.assertEquals(childs[1].getBounds().getLocation(), new Point(35, 0));
Assert.assertEquals(childs[2].getBounds().getLocation(), new Point(0, 56));
}
@Test
public void testMixLayout() {
CanvasLayoutManager layoutMgr = new CanvasLayoutManager(CanvasLayoutManager.MIX);
panel.setLayout(layoutMgr);
panel.setSize(100, 100);
JPanel childPanel1 = new JPanel();
panel.add(childPanel1, new Rectangle(10, 20, 30, 40));
layoutMgr.invalidateLayout(panel);
JPanel childPanel2 = new JPanel();
panel.add(childPanel2, new Rectangle(20, 30, 40, 50));
layoutMgr.invalidateLayout(panel);
JPanel childPanel3 = new JPanel();
panel.add(childPanel3, new Rectangle(30, 40, 50, 60));
layoutMgr.invalidateLayout(panel);
Component[] childs = panel.getComponents();
Assert.assertEquals(childs.length, 3);
Assert.assertEquals(childs[0].getBounds(), new Rectangle(10, 20, 30, 40));
Assert.assertEquals(childs[1].getBounds(), new Rectangle(20, 30, 40, 50));
Assert.assertEquals(childs[2].getBounds(), new Rectangle(30, 40, 50, 60));
panel = new JPanel();
setupParent(panel);
panel.setLayout(layoutMgr);
panel.setSize(100, 100);
childPanel1 = new JPanel();
childPanel2 = new JPanel();
childPanel3 = new JPanel();
panel.add(childPanel1, new Rectangle(0, 0, 30, 40));
panel.add(childPanel2, new Rectangle(20, 30, 40, 50));
panel.add(childPanel3, new Rectangle(30, 40, 50, 60));
layoutMgr.invalidateLayout(panel);
childs = panel.getComponents();
Assert.assertEquals(childs.length, 3);
Assert.assertEquals(childs[0].getBounds().getLocation(), new Point(0, 0));
Assert.assertEquals(childs[1].getBounds().getLocation(), new Point(30, 0));
Assert.assertEquals(childs[2].getBounds().getLocation(), new Point(0, 50));
}
private void setupParent(JPanel panel) {
parentPanel = new JPanel();
parentPanel.add(panel);
parentPanel.setSize(100, 100);
}
} | false | canvas_src_test_java_gov_nasa_arc_mct_canvas_layout_CanvasLayoutManagerTest.java |
98 | public enum CanvasViewStrategy {
CANVAS_OWNED() {
public ExtendedProperties getExistingProperties(MCTViewManifestationInfo info, ViewInfo desiredView) {
ExtendedProperties ep = null;
for (ExtendedProperties p : info.getOwnedProperties()) {
String viewType = p.getProperty(OWNED_TYPE_PROPERTY_NAME, String.class);
if (desiredView.getType().equals(viewType)) {
ep = p;
break;
}
}
return ep;
}
private void addAllExtendedProperties(MCTViewManifestationInfo manifestInfo, Set<ViewInfo> infos, ComponentInitializer ci) {
for (ViewInfo info:infos) {
// if manifest info contains the view then add the persisted properties to the component
ExtendedProperties savedProperties = getExistingProperties(manifestInfo, info);
if (savedProperties != null) {
ci.setViewRoleProperty(info.getType(), savedProperties);
} else {
ExtendedProperties propertiesFromComponent = ci.getViewRoleProperties(info.getType());
if (propertiesFromComponent == null) {
propertiesFromComponent = new ExtendedProperties();
}
assert propertiesFromComponent != null : "properties should not be null";
propertiesFromComponent.addProperty(OWNED_TYPE_PROPERTY_NAME, info.getType());
manifestInfo.getOwnedProperties().add(propertiesFromComponent);
}
}
}
@Override
public View createViewFromManifestInfo(ViewInfo info, AbstractComponent comp, AbstractComponent canvas, MCTViewManifestationInfo canvasContent) {
AbstractComponent clonedComponent = PlatformAccess.getPlatform().getPersistenceProvider().getComponent(comp.getComponentId());
ComponentInitializer ci = clonedComponent.getCapability(ComponentInitializer.class);
addAllExtendedProperties(canvasContent, comp.getViewInfos(ViewType.EMBEDDED), ci);
ci.setWorkUnitDelegate(canvas);
return info.createView(clonedComponent);
}
};
public abstract View createViewFromManifestInfo(ViewInfo info, AbstractComponent component, AbstractComponent canvas, MCTViewManifestationInfo canvasContent);
public abstract ExtendedProperties getExistingProperties(MCTViewManifestationInfo info, ViewInfo desiredView);
public static final String OWNED_TYPE_PROPERTY_NAME = "gov.nasa.arc.mct.canvas.view.Canvas.OwnedPropertiesType";
} | false | canvas_src_main_java_gov_nasa_arc_mct_canvas_panel_CanvasViewStrategy.java |
99 | CANVAS_OWNED() {
public ExtendedProperties getExistingProperties(MCTViewManifestationInfo info, ViewInfo desiredView) {
ExtendedProperties ep = null;
for (ExtendedProperties p : info.getOwnedProperties()) {
String viewType = p.getProperty(OWNED_TYPE_PROPERTY_NAME, String.class);
if (desiredView.getType().equals(viewType)) {
ep = p;
break;
}
}
return ep;
}
private void addAllExtendedProperties(MCTViewManifestationInfo manifestInfo, Set<ViewInfo> infos, ComponentInitializer ci) {
for (ViewInfo info:infos) {
// if manifest info contains the view then add the persisted properties to the component
ExtendedProperties savedProperties = getExistingProperties(manifestInfo, info);
if (savedProperties != null) {
ci.setViewRoleProperty(info.getType(), savedProperties);
} else {
ExtendedProperties propertiesFromComponent = ci.getViewRoleProperties(info.getType());
if (propertiesFromComponent == null) {
propertiesFromComponent = new ExtendedProperties();
}
assert propertiesFromComponent != null : "properties should not be null";
propertiesFromComponent.addProperty(OWNED_TYPE_PROPERTY_NAME, info.getType());
manifestInfo.getOwnedProperties().add(propertiesFromComponent);
}
}
}
@Override
public View createViewFromManifestInfo(ViewInfo info, AbstractComponent comp, AbstractComponent canvas, MCTViewManifestationInfo canvasContent) {
AbstractComponent clonedComponent = PlatformAccess.getPlatform().getPersistenceProvider().getComponent(comp.getComponentId());
ComponentInitializer ci = clonedComponent.getCapability(ComponentInitializer.class);
addAllExtendedProperties(canvasContent, comp.getViewInfos(ViewType.EMBEDDED), ci);
ci.setWorkUnitDelegate(canvas);
return info.createView(clonedComponent);
}
}; | false | canvas_src_main_java_gov_nasa_arc_mct_canvas_panel_CanvasViewStrategy.java |