id
int64 22
34.9k
| comment_id
int64 0
328
| comment
stringlengths 2
2.55k
| code
stringlengths 31
107k
| classification
stringclasses 6
values | isFinished
bool 1
class | code_context_2
stringlengths 21
27.3k
| code_context_10
stringlengths 29
27.3k
| code_context_20
stringlengths 29
27.3k
|
---|---|---|---|---|---|---|---|---|
1,139 | 2 | // Transform baseline time series | private void transformTimeSeries(String metricName, AnomalyDetectionContext anomalyDetectionContext) {
// Transform the observed (current) time series
if (anomalyDetectionContext.getTransformedCurrent(metricName) == null) {
anomalyDetectionContext.setTransformedCurrent(metricName, anomalyDetectionContext.getCurrent(metricName));
}
List<TransformationFunction> currentTimeSeriesTransformationChain =
getCurrentTimeSeriesTransformationChain();
if (CollectionUtils.isNotEmpty(currentTimeSeriesTransformationChain)) {
for (TransformationFunction tf : currentTimeSeriesTransformationChain) {
anomalyDetectionContext
.setTransformedCurrent(metricName, tf.transform(anomalyDetectionContext.getTransformedCurrent(metricName),
anomalyDetectionContext));
}
}
// Transform baseline time series
if (anomalyDetectionContext.getTransformedBaselines(metricName) == null) {
anomalyDetectionContext.setTransformedBaselines(metricName, anomalyDetectionContext.getBaselines(metricName));
}
List<TransformationFunction> baselineTimeSeriesTransformationChain =
getBaselineTimeSeriesTransformationChain();
if (CollectionUtils.isNotEmpty(anomalyDetectionContext.getTransformedBaselines(metricName))
&& CollectionUtils.isNotEmpty(baselineTimeSeriesTransformationChain)) {
for (TransformationFunction tf : baselineTimeSeriesTransformationChain) {
List<TimeSeries> transformedBaselines = new ArrayList<>();
for (TimeSeries ts : anomalyDetectionContext.getTransformedBaselines(metricName)) {
TimeSeries transformedTS = tf.transform(ts, anomalyDetectionContext);
transformedBaselines.add(transformedTS);
}
anomalyDetectionContext.setTransformedBaselines(metricName, transformedBaselines);
}
}
} | NONSATD | true | }
}
// Transform baseline time series
if (anomalyDetectionContext.getTransformedBaselines(metricName) == null) {
anomalyDetectionContext.setTransformedBaselines(metricName, anomalyDetectionContext.getBaselines(metricName)); | }
List<TransformationFunction> currentTimeSeriesTransformationChain =
getCurrentTimeSeriesTransformationChain();
if (CollectionUtils.isNotEmpty(currentTimeSeriesTransformationChain)) {
for (TransformationFunction tf : currentTimeSeriesTransformationChain) {
anomalyDetectionContext
.setTransformedCurrent(metricName, tf.transform(anomalyDetectionContext.getTransformedCurrent(metricName),
anomalyDetectionContext));
}
}
// Transform baseline time series
if (anomalyDetectionContext.getTransformedBaselines(metricName) == null) {
anomalyDetectionContext.setTransformedBaselines(metricName, anomalyDetectionContext.getBaselines(metricName));
}
List<TransformationFunction> baselineTimeSeriesTransformationChain =
getBaselineTimeSeriesTransformationChain();
if (CollectionUtils.isNotEmpty(anomalyDetectionContext.getTransformedBaselines(metricName))
&& CollectionUtils.isNotEmpty(baselineTimeSeriesTransformationChain)) {
for (TransformationFunction tf : baselineTimeSeriesTransformationChain) {
List<TimeSeries> transformedBaselines = new ArrayList<>();
for (TimeSeries ts : anomalyDetectionContext.getTransformedBaselines(metricName)) { | private void transformTimeSeries(String metricName, AnomalyDetectionContext anomalyDetectionContext) {
// Transform the observed (current) time series
if (anomalyDetectionContext.getTransformedCurrent(metricName) == null) {
anomalyDetectionContext.setTransformedCurrent(metricName, anomalyDetectionContext.getCurrent(metricName));
}
List<TransformationFunction> currentTimeSeriesTransformationChain =
getCurrentTimeSeriesTransformationChain();
if (CollectionUtils.isNotEmpty(currentTimeSeriesTransformationChain)) {
for (TransformationFunction tf : currentTimeSeriesTransformationChain) {
anomalyDetectionContext
.setTransformedCurrent(metricName, tf.transform(anomalyDetectionContext.getTransformedCurrent(metricName),
anomalyDetectionContext));
}
}
// Transform baseline time series
if (anomalyDetectionContext.getTransformedBaselines(metricName) == null) {
anomalyDetectionContext.setTransformedBaselines(metricName, anomalyDetectionContext.getBaselines(metricName));
}
List<TransformationFunction> baselineTimeSeriesTransformationChain =
getBaselineTimeSeriesTransformationChain();
if (CollectionUtils.isNotEmpty(anomalyDetectionContext.getTransformedBaselines(metricName))
&& CollectionUtils.isNotEmpty(baselineTimeSeriesTransformationChain)) {
for (TransformationFunction tf : baselineTimeSeriesTransformationChain) {
List<TimeSeries> transformedBaselines = new ArrayList<>();
for (TimeSeries ts : anomalyDetectionContext.getTransformedBaselines(metricName)) {
TimeSeries transformedTS = tf.transform(ts, anomalyDetectionContext);
transformedBaselines.add(transformedTS);
}
anomalyDetectionContext.setTransformedBaselines(metricName, transformedBaselines);
}
}
} |
25,731 | 0 | /*. . . . . . . . . . . . . . . . . . . . . . . . . . . . . .*/
/** Emit code for the symbol constant class, optionally including non terms,
* if they have been requested.
* @param out stream to produce output on.
* @param emit_non_terms do we emit constants for non terminals?
* @param sym_interface should we emit an interface, rather than a class?
*/ | public static void symbols(PrintWriter out,
boolean emit_non_terms, boolean sym_interface)
{
terminal term;
non_terminal nt;
String class_or_interface = (sym_interface) ? "interface" : "class";
long start_time = System.currentTimeMillis();
/* top of file */
out.println();
out.println("//----------------------------------------------------");
out.println("// The following code was generated by " + version.title_str);
out.println("//----------------------------------------------------");
out.println();
emit_package(out);
/* class header */
out.println("/** CUP generated " + class_or_interface +
" containing symbol constants. */");
out.println("public " + class_or_interface + " " +
symbol_const_class_name + " {");
out.println(" /* terminals */");
/* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */
out.println(" public static final int " + term.name() + " = " +
term.index() + ";");
}
/* Emit names of terminals */
out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) {
out.print(" \"");
out.print(terminal.find(i).name());
out.print("\"");
if (i < terminal.number() - 1) {
out.print(",");
}
out.println();
}
out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */
if (emit_non_terms)
{
out.println();
out.println(" /* non terminals */");
/* walk over the non terminals *//* later might sort these */
for (Enumeration e = non_terminal.all(); e.hasMoreElements();)
{
nt = (non_terminal) e.nextElement();
// ****
// TUM Comment: here we could add a typesafe enumeration
// ****
/* output a constant decl for the terminal */
out.println(" static final int " + nt.name() + " = " +
nt.index() + ";");
}
}
/* end of class */
out.println("}");
out.println();
symbols_time = System.currentTimeMillis() - start_time;
} | NONSATD | true | public static void symbols(PrintWriter out,
boolean emit_non_terms, boolean sym_interface)
{
terminal term;
non_terminal nt;
String class_or_interface = (sym_interface) ? "interface" : "class";
long start_time = System.currentTimeMillis();
/* top of file */
out.println();
out.println("//----------------------------------------------------");
out.println("// The following code was generated by " + version.title_str);
out.println("//----------------------------------------------------");
out.println();
emit_package(out);
/* class header */
out.println("/** CUP generated " + class_or_interface +
" containing symbol constants. */");
out.println("public " + class_or_interface + " " +
symbol_const_class_name + " {");
out.println(" /* terminals */");
/* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */
out.println(" public static final int " + term.name() + " = " +
term.index() + ";");
}
/* Emit names of terminals */
out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) {
out.print(" \"");
out.print(terminal.find(i).name());
out.print("\"");
if (i < terminal.number() - 1) {
out.print(",");
}
out.println();
}
out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */
if (emit_non_terms)
{
out.println();
out.println(" /* non terminals */");
/* walk over the non terminals *//* later might sort these */
for (Enumeration e = non_terminal.all(); e.hasMoreElements();)
{
nt = (non_terminal) e.nextElement();
// ****
// TUM Comment: here we could add a typesafe enumeration
// ****
/* output a constant decl for the terminal */
out.println(" static final int " + nt.name() + " = " +
nt.index() + ";");
}
}
/* end of class */
out.println("}");
out.println();
symbols_time = System.currentTimeMillis() - start_time;
} | public static void symbols(PrintWriter out,
boolean emit_non_terms, boolean sym_interface)
{
terminal term;
non_terminal nt;
String class_or_interface = (sym_interface) ? "interface" : "class";
long start_time = System.currentTimeMillis();
/* top of file */
out.println();
out.println("//----------------------------------------------------");
out.println("// The following code was generated by " + version.title_str);
out.println("//----------------------------------------------------");
out.println();
emit_package(out);
/* class header */
out.println("/** CUP generated " + class_or_interface +
" containing symbol constants. */");
out.println("public " + class_or_interface + " " +
symbol_const_class_name + " {");
out.println(" /* terminals */");
/* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */
out.println(" public static final int " + term.name() + " = " +
term.index() + ";");
}
/* Emit names of terminals */
out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) {
out.print(" \"");
out.print(terminal.find(i).name());
out.print("\"");
if (i < terminal.number() - 1) {
out.print(",");
}
out.println();
}
out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */
if (emit_non_terms)
{
out.println();
out.println(" /* non terminals */");
/* walk over the non terminals *//* later might sort these */
for (Enumeration e = non_terminal.all(); e.hasMoreElements();)
{
nt = (non_terminal) e.nextElement();
// ****
// TUM Comment: here we could add a typesafe enumeration
// ****
/* output a constant decl for the terminal */
out.println(" static final int " + nt.name() + " = " +
nt.index() + ";");
}
}
/* end of class */
out.println("}");
out.println();
symbols_time = System.currentTimeMillis() - start_time;
} | public static void symbols(PrintWriter out,
boolean emit_non_terms, boolean sym_interface)
{
terminal term;
non_terminal nt;
String class_or_interface = (sym_interface) ? "interface" : "class";
long start_time = System.currentTimeMillis();
/* top of file */
out.println();
out.println("//----------------------------------------------------");
out.println("// The following code was generated by " + version.title_str);
out.println("//----------------------------------------------------");
out.println();
emit_package(out);
/* class header */
out.println("/** CUP generated " + class_or_interface +
" containing symbol constants. */");
out.println("public " + class_or_interface + " " +
symbol_const_class_name + " {");
out.println(" /* terminals */");
/* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */
out.println(" public static final int " + term.name() + " = " +
term.index() + ";");
}
/* Emit names of terminals */
out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) {
out.print(" \"");
out.print(terminal.find(i).name());
out.print("\"");
if (i < terminal.number() - 1) {
out.print(",");
}
out.println();
}
out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */
if (emit_non_terms)
{
out.println();
out.println(" /* non terminals */");
/* walk over the non terminals *//* later might sort these */
for (Enumeration e = non_terminal.all(); e.hasMoreElements();)
{
nt = (non_terminal) e.nextElement();
// ****
// TUM Comment: here we could add a typesafe enumeration
// ****
/* output a constant decl for the terminal */
out.println(" static final int " + nt.name() + " = " +
nt.index() + ";");
}
}
/* end of class */
out.println("}");
out.println();
symbols_time = System.currentTimeMillis() - start_time;
} |
25,731 | 1 | /* top of file */ | public static void symbols(PrintWriter out,
boolean emit_non_terms, boolean sym_interface)
{
terminal term;
non_terminal nt;
String class_or_interface = (sym_interface) ? "interface" : "class";
long start_time = System.currentTimeMillis();
/* top of file */
out.println();
out.println("//----------------------------------------------------");
out.println("// The following code was generated by " + version.title_str);
out.println("//----------------------------------------------------");
out.println();
emit_package(out);
/* class header */
out.println("/** CUP generated " + class_or_interface +
" containing symbol constants. */");
out.println("public " + class_or_interface + " " +
symbol_const_class_name + " {");
out.println(" /* terminals */");
/* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */
out.println(" public static final int " + term.name() + " = " +
term.index() + ";");
}
/* Emit names of terminals */
out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) {
out.print(" \"");
out.print(terminal.find(i).name());
out.print("\"");
if (i < terminal.number() - 1) {
out.print(",");
}
out.println();
}
out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */
if (emit_non_terms)
{
out.println();
out.println(" /* non terminals */");
/* walk over the non terminals *//* later might sort these */
for (Enumeration e = non_terminal.all(); e.hasMoreElements();)
{
nt = (non_terminal) e.nextElement();
// ****
// TUM Comment: here we could add a typesafe enumeration
// ****
/* output a constant decl for the terminal */
out.println(" static final int " + nt.name() + " = " +
nt.index() + ";");
}
}
/* end of class */
out.println("}");
out.println();
symbols_time = System.currentTimeMillis() - start_time;
} | NONSATD | true | String class_or_interface = (sym_interface) ? "interface" : "class";
long start_time = System.currentTimeMillis();
/* top of file */
out.println();
out.println("//----------------------------------------------------"); | public static void symbols(PrintWriter out,
boolean emit_non_terms, boolean sym_interface)
{
terminal term;
non_terminal nt;
String class_or_interface = (sym_interface) ? "interface" : "class";
long start_time = System.currentTimeMillis();
/* top of file */
out.println();
out.println("//----------------------------------------------------");
out.println("// The following code was generated by " + version.title_str);
out.println("//----------------------------------------------------");
out.println();
emit_package(out);
/* class header */
out.println("/** CUP generated " + class_or_interface +
" containing symbol constants. */");
out.println("public " + class_or_interface + " " + | public static void symbols(PrintWriter out,
boolean emit_non_terms, boolean sym_interface)
{
terminal term;
non_terminal nt;
String class_or_interface = (sym_interface) ? "interface" : "class";
long start_time = System.currentTimeMillis();
/* top of file */
out.println();
out.println("//----------------------------------------------------");
out.println("// The following code was generated by " + version.title_str);
out.println("//----------------------------------------------------");
out.println();
emit_package(out);
/* class header */
out.println("/** CUP generated " + class_or_interface +
" containing symbol constants. */");
out.println("public " + class_or_interface + " " +
symbol_const_class_name + " {");
out.println(" /* terminals */");
/* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */
out.println(" public static final int " + term.name() + " = " +
term.index() + ";");
} |
25,731 | 2 | /* class header */ | public static void symbols(PrintWriter out,
boolean emit_non_terms, boolean sym_interface)
{
terminal term;
non_terminal nt;
String class_or_interface = (sym_interface) ? "interface" : "class";
long start_time = System.currentTimeMillis();
/* top of file */
out.println();
out.println("//----------------------------------------------------");
out.println("// The following code was generated by " + version.title_str);
out.println("//----------------------------------------------------");
out.println();
emit_package(out);
/* class header */
out.println("/** CUP generated " + class_or_interface +
" containing symbol constants. */");
out.println("public " + class_or_interface + " " +
symbol_const_class_name + " {");
out.println(" /* terminals */");
/* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */
out.println(" public static final int " + term.name() + " = " +
term.index() + ";");
}
/* Emit names of terminals */
out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) {
out.print(" \"");
out.print(terminal.find(i).name());
out.print("\"");
if (i < terminal.number() - 1) {
out.print(",");
}
out.println();
}
out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */
if (emit_non_terms)
{
out.println();
out.println(" /* non terminals */");
/* walk over the non terminals *//* later might sort these */
for (Enumeration e = non_terminal.all(); e.hasMoreElements();)
{
nt = (non_terminal) e.nextElement();
// ****
// TUM Comment: here we could add a typesafe enumeration
// ****
/* output a constant decl for the terminal */
out.println(" static final int " + nt.name() + " = " +
nt.index() + ";");
}
}
/* end of class */
out.println("}");
out.println();
symbols_time = System.currentTimeMillis() - start_time;
} | NONSATD | true | out.println();
emit_package(out);
/* class header */
out.println("/** CUP generated " + class_or_interface +
" containing symbol constants. */"); | non_terminal nt;
String class_or_interface = (sym_interface) ? "interface" : "class";
long start_time = System.currentTimeMillis();
/* top of file */
out.println();
out.println("//----------------------------------------------------");
out.println("// The following code was generated by " + version.title_str);
out.println("//----------------------------------------------------");
out.println();
emit_package(out);
/* class header */
out.println("/** CUP generated " + class_or_interface +
" containing symbol constants. */");
out.println("public " + class_or_interface + " " +
symbol_const_class_name + " {");
out.println(" /* terminals */");
/* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */ | public static void symbols(PrintWriter out,
boolean emit_non_terms, boolean sym_interface)
{
terminal term;
non_terminal nt;
String class_or_interface = (sym_interface) ? "interface" : "class";
long start_time = System.currentTimeMillis();
/* top of file */
out.println();
out.println("//----------------------------------------------------");
out.println("// The following code was generated by " + version.title_str);
out.println("//----------------------------------------------------");
out.println();
emit_package(out);
/* class header */
out.println("/** CUP generated " + class_or_interface +
" containing symbol constants. */");
out.println("public " + class_or_interface + " " +
symbol_const_class_name + " {");
out.println(" /* terminals */");
/* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */
out.println(" public static final int " + term.name() + " = " +
term.index() + ";");
}
/* Emit names of terminals */
out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) {
out.print(" \"");
out.print(terminal.find(i).name());
out.print("\"");
if (i < terminal.number() - 1) { |
25,731 | 3 | /* walk over the terminals */
/* later might sort these */ | public static void symbols(PrintWriter out,
boolean emit_non_terms, boolean sym_interface)
{
terminal term;
non_terminal nt;
String class_or_interface = (sym_interface) ? "interface" : "class";
long start_time = System.currentTimeMillis();
/* top of file */
out.println();
out.println("//----------------------------------------------------");
out.println("// The following code was generated by " + version.title_str);
out.println("//----------------------------------------------------");
out.println();
emit_package(out);
/* class header */
out.println("/** CUP generated " + class_or_interface +
" containing symbol constants. */");
out.println("public " + class_or_interface + " " +
symbol_const_class_name + " {");
out.println(" /* terminals */");
/* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */
out.println(" public static final int " + term.name() + " = " +
term.index() + ";");
}
/* Emit names of terminals */
out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) {
out.print(" \"");
out.print(terminal.find(i).name());
out.print("\"");
if (i < terminal.number() - 1) {
out.print(",");
}
out.println();
}
out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */
if (emit_non_terms)
{
out.println();
out.println(" /* non terminals */");
/* walk over the non terminals *//* later might sort these */
for (Enumeration e = non_terminal.all(); e.hasMoreElements();)
{
nt = (non_terminal) e.nextElement();
// ****
// TUM Comment: here we could add a typesafe enumeration
// ****
/* output a constant decl for the terminal */
out.println(" static final int " + nt.name() + " = " +
nt.index() + ";");
}
}
/* end of class */
out.println("}");
out.println();
symbols_time = System.currentTimeMillis() - start_time;
} | NONSATD | true | symbol_const_class_name + " {");
out.println(" /* terminals */");
/* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{ | out.println("// The following code was generated by " + version.title_str);
out.println("//----------------------------------------------------");
out.println();
emit_package(out);
/* class header */
out.println("/** CUP generated " + class_or_interface +
" containing symbol constants. */");
out.println("public " + class_or_interface + " " +
symbol_const_class_name + " {");
out.println(" /* terminals */");
/* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */
out.println(" public static final int " + term.name() + " = " +
term.index() + ";");
}
/* Emit names of terminals */
out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) { | public static void symbols(PrintWriter out,
boolean emit_non_terms, boolean sym_interface)
{
terminal term;
non_terminal nt;
String class_or_interface = (sym_interface) ? "interface" : "class";
long start_time = System.currentTimeMillis();
/* top of file */
out.println();
out.println("//----------------------------------------------------");
out.println("// The following code was generated by " + version.title_str);
out.println("//----------------------------------------------------");
out.println();
emit_package(out);
/* class header */
out.println("/** CUP generated " + class_or_interface +
" containing symbol constants. */");
out.println("public " + class_or_interface + " " +
symbol_const_class_name + " {");
out.println(" /* terminals */");
/* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */
out.println(" public static final int " + term.name() + " = " +
term.index() + ";");
}
/* Emit names of terminals */
out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) {
out.print(" \"");
out.print(terminal.find(i).name());
out.print("\"");
if (i < terminal.number() - 1) {
out.print(",");
}
out.println();
}
out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */ |
25,731 | 4 | /* output a constant decl for the terminal */ | public static void symbols(PrintWriter out,
boolean emit_non_terms, boolean sym_interface)
{
terminal term;
non_terminal nt;
String class_or_interface = (sym_interface) ? "interface" : "class";
long start_time = System.currentTimeMillis();
/* top of file */
out.println();
out.println("//----------------------------------------------------");
out.println("// The following code was generated by " + version.title_str);
out.println("//----------------------------------------------------");
out.println();
emit_package(out);
/* class header */
out.println("/** CUP generated " + class_or_interface +
" containing symbol constants. */");
out.println("public " + class_or_interface + " " +
symbol_const_class_name + " {");
out.println(" /* terminals */");
/* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */
out.println(" public static final int " + term.name() + " = " +
term.index() + ";");
}
/* Emit names of terminals */
out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) {
out.print(" \"");
out.print(terminal.find(i).name());
out.print("\"");
if (i < terminal.number() - 1) {
out.print(",");
}
out.println();
}
out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */
if (emit_non_terms)
{
out.println();
out.println(" /* non terminals */");
/* walk over the non terminals *//* later might sort these */
for (Enumeration e = non_terminal.all(); e.hasMoreElements();)
{
nt = (non_terminal) e.nextElement();
// ****
// TUM Comment: here we could add a typesafe enumeration
// ****
/* output a constant decl for the terminal */
out.println(" static final int " + nt.name() + " = " +
nt.index() + ";");
}
}
/* end of class */
out.println("}");
out.println();
symbols_time = System.currentTimeMillis() - start_time;
} | NONSATD | true | {
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */
out.println(" public static final int " + term.name() + " = " +
term.index() + ";"); | /* class header */
out.println("/** CUP generated " + class_or_interface +
" containing symbol constants. */");
out.println("public " + class_or_interface + " " +
symbol_const_class_name + " {");
out.println(" /* terminals */");
/* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */
out.println(" public static final int " + term.name() + " = " +
term.index() + ";");
}
/* Emit names of terminals */
out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) {
out.print(" \"");
out.print(terminal.find(i).name());
out.print("\"");
if (i < terminal.number() - 1) { | non_terminal nt;
String class_or_interface = (sym_interface) ? "interface" : "class";
long start_time = System.currentTimeMillis();
/* top of file */
out.println();
out.println("//----------------------------------------------------");
out.println("// The following code was generated by " + version.title_str);
out.println("//----------------------------------------------------");
out.println();
emit_package(out);
/* class header */
out.println("/** CUP generated " + class_or_interface +
" containing symbol constants. */");
out.println("public " + class_or_interface + " " +
symbol_const_class_name + " {");
out.println(" /* terminals */");
/* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */
out.println(" public static final int " + term.name() + " = " +
term.index() + ";");
}
/* Emit names of terminals */
out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) {
out.print(" \"");
out.print(terminal.find(i).name());
out.print("\"");
if (i < terminal.number() - 1) {
out.print(",");
}
out.println();
}
out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */
if (emit_non_terms)
{
out.println();
out.println(" /* non terminals */"); |
25,731 | 5 | /* Emit names of terminals */ | public static void symbols(PrintWriter out,
boolean emit_non_terms, boolean sym_interface)
{
terminal term;
non_terminal nt;
String class_or_interface = (sym_interface) ? "interface" : "class";
long start_time = System.currentTimeMillis();
/* top of file */
out.println();
out.println("//----------------------------------------------------");
out.println("// The following code was generated by " + version.title_str);
out.println("//----------------------------------------------------");
out.println();
emit_package(out);
/* class header */
out.println("/** CUP generated " + class_or_interface +
" containing symbol constants. */");
out.println("public " + class_or_interface + " " +
symbol_const_class_name + " {");
out.println(" /* terminals */");
/* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */
out.println(" public static final int " + term.name() + " = " +
term.index() + ";");
}
/* Emit names of terminals */
out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) {
out.print(" \"");
out.print(terminal.find(i).name());
out.print("\"");
if (i < terminal.number() - 1) {
out.print(",");
}
out.println();
}
out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */
if (emit_non_terms)
{
out.println();
out.println(" /* non terminals */");
/* walk over the non terminals *//* later might sort these */
for (Enumeration e = non_terminal.all(); e.hasMoreElements();)
{
nt = (non_terminal) e.nextElement();
// ****
// TUM Comment: here we could add a typesafe enumeration
// ****
/* output a constant decl for the terminal */
out.println(" static final int " + nt.name() + " = " +
nt.index() + ";");
}
}
/* end of class */
out.println("}");
out.println();
symbols_time = System.currentTimeMillis() - start_time;
} | NONSATD | true | term.index() + ";");
}
/* Emit names of terminals */
out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) { | symbol_const_class_name + " {");
out.println(" /* terminals */");
/* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */
out.println(" public static final int " + term.name() + " = " +
term.index() + ";");
}
/* Emit names of terminals */
out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) {
out.print(" \"");
out.print(terminal.find(i).name());
out.print("\"");
if (i < terminal.number() - 1) {
out.print(",");
}
out.println();
} | out.println();
out.println("//----------------------------------------------------");
out.println("// The following code was generated by " + version.title_str);
out.println("//----------------------------------------------------");
out.println();
emit_package(out);
/* class header */
out.println("/** CUP generated " + class_or_interface +
" containing symbol constants. */");
out.println("public " + class_or_interface + " " +
symbol_const_class_name + " {");
out.println(" /* terminals */");
/* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */
out.println(" public static final int " + term.name() + " = " +
term.index() + ";");
}
/* Emit names of terminals */
out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) {
out.print(" \"");
out.print(terminal.find(i).name());
out.print("\"");
if (i < terminal.number() - 1) {
out.print(",");
}
out.println();
}
out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */
if (emit_non_terms)
{
out.println();
out.println(" /* non terminals */");
/* walk over the non terminals *//* later might sort these */
for (Enumeration e = non_terminal.all(); e.hasMoreElements();)
{
nt = (non_terminal) e.nextElement(); |
25,731 | 6 | /* do the non terminals if they want them (parser doesn't need them) */ | public static void symbols(PrintWriter out,
boolean emit_non_terms, boolean sym_interface)
{
terminal term;
non_terminal nt;
String class_or_interface = (sym_interface) ? "interface" : "class";
long start_time = System.currentTimeMillis();
/* top of file */
out.println();
out.println("//----------------------------------------------------");
out.println("// The following code was generated by " + version.title_str);
out.println("//----------------------------------------------------");
out.println();
emit_package(out);
/* class header */
out.println("/** CUP generated " + class_or_interface +
" containing symbol constants. */");
out.println("public " + class_or_interface + " " +
symbol_const_class_name + " {");
out.println(" /* terminals */");
/* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */
out.println(" public static final int " + term.name() + " = " +
term.index() + ";");
}
/* Emit names of terminals */
out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) {
out.print(" \"");
out.print(terminal.find(i).name());
out.print("\"");
if (i < terminal.number() - 1) {
out.print(",");
}
out.println();
}
out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */
if (emit_non_terms)
{
out.println();
out.println(" /* non terminals */");
/* walk over the non terminals *//* later might sort these */
for (Enumeration e = non_terminal.all(); e.hasMoreElements();)
{
nt = (non_terminal) e.nextElement();
// ****
// TUM Comment: here we could add a typesafe enumeration
// ****
/* output a constant decl for the terminal */
out.println(" static final int " + nt.name() + " = " +
nt.index() + ";");
}
}
/* end of class */
out.println("}");
out.println();
symbols_time = System.currentTimeMillis() - start_time;
} | NONSATD | true | }
out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */
if (emit_non_terms)
{ | for (int i = 0; i < terminal.number(); i++) {
out.print(" \"");
out.print(terminal.find(i).name());
out.print("\"");
if (i < terminal.number() - 1) {
out.print(",");
}
out.println();
}
out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */
if (emit_non_terms)
{
out.println();
out.println(" /* non terminals */");
/* walk over the non terminals *//* later might sort these */
for (Enumeration e = non_terminal.all(); e.hasMoreElements();)
{
nt = (non_terminal) e.nextElement();
// ****
// TUM Comment: here we could add a typesafe enumeration | /* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */
out.println(" public static final int " + term.name() + " = " +
term.index() + ";");
}
/* Emit names of terminals */
out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) {
out.print(" \"");
out.print(terminal.find(i).name());
out.print("\"");
if (i < terminal.number() - 1) {
out.print(",");
}
out.println();
}
out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */
if (emit_non_terms)
{
out.println();
out.println(" /* non terminals */");
/* walk over the non terminals *//* later might sort these */
for (Enumeration e = non_terminal.all(); e.hasMoreElements();)
{
nt = (non_terminal) e.nextElement();
// ****
// TUM Comment: here we could add a typesafe enumeration
// ****
/* output a constant decl for the terminal */
out.println(" static final int " + nt.name() + " = " +
nt.index() + ";");
}
}
/* end of class */
out.println("}");
out.println();
symbols_time = System.currentTimeMillis() - start_time; |
25,731 | 7 | /* walk over the non terminals */
/* later might sort these */ | public static void symbols(PrintWriter out,
boolean emit_non_terms, boolean sym_interface)
{
terminal term;
non_terminal nt;
String class_or_interface = (sym_interface) ? "interface" : "class";
long start_time = System.currentTimeMillis();
/* top of file */
out.println();
out.println("//----------------------------------------------------");
out.println("// The following code was generated by " + version.title_str);
out.println("//----------------------------------------------------");
out.println();
emit_package(out);
/* class header */
out.println("/** CUP generated " + class_or_interface +
" containing symbol constants. */");
out.println("public " + class_or_interface + " " +
symbol_const_class_name + " {");
out.println(" /* terminals */");
/* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */
out.println(" public static final int " + term.name() + " = " +
term.index() + ";");
}
/* Emit names of terminals */
out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) {
out.print(" \"");
out.print(terminal.find(i).name());
out.print("\"");
if (i < terminal.number() - 1) {
out.print(",");
}
out.println();
}
out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */
if (emit_non_terms)
{
out.println();
out.println(" /* non terminals */");
/* walk over the non terminals *//* later might sort these */
for (Enumeration e = non_terminal.all(); e.hasMoreElements();)
{
nt = (non_terminal) e.nextElement();
// ****
// TUM Comment: here we could add a typesafe enumeration
// ****
/* output a constant decl for the terminal */
out.println(" static final int " + nt.name() + " = " +
nt.index() + ";");
}
}
/* end of class */
out.println("}");
out.println();
symbols_time = System.currentTimeMillis() - start_time;
} | NONSATD | true | out.println();
out.println(" /* non terminals */");
/* walk over the non terminals *//* later might sort these */
for (Enumeration e = non_terminal.all(); e.hasMoreElements();)
{ | out.print(",");
}
out.println();
}
out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */
if (emit_non_terms)
{
out.println();
out.println(" /* non terminals */");
/* walk over the non terminals *//* later might sort these */
for (Enumeration e = non_terminal.all(); e.hasMoreElements();)
{
nt = (non_terminal) e.nextElement();
// ****
// TUM Comment: here we could add a typesafe enumeration
// ****
/* output a constant decl for the terminal */
out.println(" static final int " + nt.name() + " = " +
nt.index() + ";");
} | out.println(" public static final int " + term.name() + " = " +
term.index() + ";");
}
/* Emit names of terminals */
out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) {
out.print(" \"");
out.print(terminal.find(i).name());
out.print("\"");
if (i < terminal.number() - 1) {
out.print(",");
}
out.println();
}
out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */
if (emit_non_terms)
{
out.println();
out.println(" /* non terminals */");
/* walk over the non terminals *//* later might sort these */
for (Enumeration e = non_terminal.all(); e.hasMoreElements();)
{
nt = (non_terminal) e.nextElement();
// ****
// TUM Comment: here we could add a typesafe enumeration
// ****
/* output a constant decl for the terminal */
out.println(" static final int " + nt.name() + " = " +
nt.index() + ";");
}
}
/* end of class */
out.println("}");
out.println();
symbols_time = System.currentTimeMillis() - start_time;
} |
25,731 | 8 | // ****
// TUM Comment: here we could add a typesafe enumeration
// ****
/* output a constant decl for the terminal */ | public static void symbols(PrintWriter out,
boolean emit_non_terms, boolean sym_interface)
{
terminal term;
non_terminal nt;
String class_or_interface = (sym_interface) ? "interface" : "class";
long start_time = System.currentTimeMillis();
/* top of file */
out.println();
out.println("//----------------------------------------------------");
out.println("// The following code was generated by " + version.title_str);
out.println("//----------------------------------------------------");
out.println();
emit_package(out);
/* class header */
out.println("/** CUP generated " + class_or_interface +
" containing symbol constants. */");
out.println("public " + class_or_interface + " " +
symbol_const_class_name + " {");
out.println(" /* terminals */");
/* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */
out.println(" public static final int " + term.name() + " = " +
term.index() + ";");
}
/* Emit names of terminals */
out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) {
out.print(" \"");
out.print(terminal.find(i).name());
out.print("\"");
if (i < terminal.number() - 1) {
out.print(",");
}
out.println();
}
out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */
if (emit_non_terms)
{
out.println();
out.println(" /* non terminals */");
/* walk over the non terminals *//* later might sort these */
for (Enumeration e = non_terminal.all(); e.hasMoreElements();)
{
nt = (non_terminal) e.nextElement();
// ****
// TUM Comment: here we could add a typesafe enumeration
// ****
/* output a constant decl for the terminal */
out.println(" static final int " + nt.name() + " = " +
nt.index() + ";");
}
}
/* end of class */
out.println("}");
out.println();
symbols_time = System.currentTimeMillis() - start_time;
} | NONSATD | true | {
nt = (non_terminal) e.nextElement();
// ****
// TUM Comment: here we could add a typesafe enumeration
// ****
/* output a constant decl for the terminal */
out.println(" static final int " + nt.name() + " = " +
nt.index() + ";"); | out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */
if (emit_non_terms)
{
out.println();
out.println(" /* non terminals */");
/* walk over the non terminals *//* later might sort these */
for (Enumeration e = non_terminal.all(); e.hasMoreElements();)
{
nt = (non_terminal) e.nextElement();
// ****
// TUM Comment: here we could add a typesafe enumeration
// ****
/* output a constant decl for the terminal */
out.println(" static final int " + nt.name() + " = " +
nt.index() + ";");
}
}
/* end of class */
out.println("}");
out.println();
symbols_time = System.currentTimeMillis() - start_time;
} | out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) {
out.print(" \"");
out.print(terminal.find(i).name());
out.print("\"");
if (i < terminal.number() - 1) {
out.print(",");
}
out.println();
}
out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */
if (emit_non_terms)
{
out.println();
out.println(" /* non terminals */");
/* walk over the non terminals *//* later might sort these */
for (Enumeration e = non_terminal.all(); e.hasMoreElements();)
{
nt = (non_terminal) e.nextElement();
// ****
// TUM Comment: here we could add a typesafe enumeration
// ****
/* output a constant decl for the terminal */
out.println(" static final int " + nt.name() + " = " +
nt.index() + ";");
}
}
/* end of class */
out.println("}");
out.println();
symbols_time = System.currentTimeMillis() - start_time;
} |
25,731 | 9 | /* end of class */ | public static void symbols(PrintWriter out,
boolean emit_non_terms, boolean sym_interface)
{
terminal term;
non_terminal nt;
String class_or_interface = (sym_interface) ? "interface" : "class";
long start_time = System.currentTimeMillis();
/* top of file */
out.println();
out.println("//----------------------------------------------------");
out.println("// The following code was generated by " + version.title_str);
out.println("//----------------------------------------------------");
out.println();
emit_package(out);
/* class header */
out.println("/** CUP generated " + class_or_interface +
" containing symbol constants. */");
out.println("public " + class_or_interface + " " +
symbol_const_class_name + " {");
out.println(" /* terminals */");
/* walk over the terminals *//* later might sort these */
for (Enumeration e = terminal.all(); e.hasMoreElements();)
{
term = (terminal) e.nextElement();
/* output a constant decl for the terminal */
out.println(" public static final int " + term.name() + " = " +
term.index() + ";");
}
/* Emit names of terminals */
out.println(" public static final String[] terminalNames = new String[] {");
for (int i = 0; i < terminal.number(); i++) {
out.print(" \"");
out.print(terminal.find(i).name());
out.print("\"");
if (i < terminal.number() - 1) {
out.print(",");
}
out.println();
}
out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */
if (emit_non_terms)
{
out.println();
out.println(" /* non terminals */");
/* walk over the non terminals *//* later might sort these */
for (Enumeration e = non_terminal.all(); e.hasMoreElements();)
{
nt = (non_terminal) e.nextElement();
// ****
// TUM Comment: here we could add a typesafe enumeration
// ****
/* output a constant decl for the terminal */
out.println(" static final int " + nt.name() + " = " +
nt.index() + ";");
}
}
/* end of class */
out.println("}");
out.println();
symbols_time = System.currentTimeMillis() - start_time;
} | NONSATD | true | }
}
/* end of class */
out.println("}");
out.println(); | {
nt = (non_terminal) e.nextElement();
// ****
// TUM Comment: here we could add a typesafe enumeration
// ****
/* output a constant decl for the terminal */
out.println(" static final int " + nt.name() + " = " +
nt.index() + ";");
}
}
/* end of class */
out.println("}");
out.println();
symbols_time = System.currentTimeMillis() - start_time;
} | out.println();
}
out.println(" };");
/* do the non terminals if they want them (parser doesn't need them) */
if (emit_non_terms)
{
out.println();
out.println(" /* non terminals */");
/* walk over the non terminals *//* later might sort these */
for (Enumeration e = non_terminal.all(); e.hasMoreElements();)
{
nt = (non_terminal) e.nextElement();
// ****
// TUM Comment: here we could add a typesafe enumeration
// ****
/* output a constant decl for the terminal */
out.println(" static final int " + nt.name() + " = " +
nt.index() + ";");
}
}
/* end of class */
out.println("}");
out.println();
symbols_time = System.currentTimeMillis() - start_time;
} |
17,543 | 0 | /**
* INTERNAL:
* Return the value of the object if it already is registered, otherwise null.
*/ | public Object checkIfAlreadyRegistered(Object object, ClassDescriptor descriptor) {
// Don't register read-only classes
if (isClassReadOnly(object.getClass(), descriptor)) {
return null;
}
// Check if the working copy is again being registered in which case we return the same working copy
Object registeredObject = getCloneMapping().get(object);
if (registeredObject != null) {
return object;
}
// Check if object exists in my new objects if it is in the new objects cache then it means domain object is being
// re-registered and we should return the same working clone. This check holds only for the new registered objects
// PERF: Avoid initialization of new objects if none.
if (hasNewObjects()) {
registeredObject = getNewObjectsOriginalToClone().get(object);
if (registeredObject != null) {
return registeredObject;
}
}
if (this.isNestedUnitOfWork) {
// bug # 3228185
//may be a new object from a parent Unit Of Work, let's check our new object in parent list to see
//if it has already been registered locally
if (hasNewObjectsInParentOriginalToClone()) {
registeredObject = getNewObjectsInParentOriginalToClone().get(object);
}
if (registeredObject != null) {
return registeredObject;
}
}
return null;
} | NONSATD | true | public Object checkIfAlreadyRegistered(Object object, ClassDescriptor descriptor) {
// Don't register read-only classes
if (isClassReadOnly(object.getClass(), descriptor)) {
return null;
}
// Check if the working copy is again being registered in which case we return the same working copy
Object registeredObject = getCloneMapping().get(object);
if (registeredObject != null) {
return object;
}
// Check if object exists in my new objects if it is in the new objects cache then it means domain object is being
// re-registered and we should return the same working clone. This check holds only for the new registered objects
// PERF: Avoid initialization of new objects if none.
if (hasNewObjects()) {
registeredObject = getNewObjectsOriginalToClone().get(object);
if (registeredObject != null) {
return registeredObject;
}
}
if (this.isNestedUnitOfWork) {
// bug # 3228185
//may be a new object from a parent Unit Of Work, let's check our new object in parent list to see
//if it has already been registered locally
if (hasNewObjectsInParentOriginalToClone()) {
registeredObject = getNewObjectsInParentOriginalToClone().get(object);
}
if (registeredObject != null) {
return registeredObject;
}
}
return null;
} | public Object checkIfAlreadyRegistered(Object object, ClassDescriptor descriptor) {
// Don't register read-only classes
if (isClassReadOnly(object.getClass(), descriptor)) {
return null;
}
// Check if the working copy is again being registered in which case we return the same working copy
Object registeredObject = getCloneMapping().get(object);
if (registeredObject != null) {
return object;
}
// Check if object exists in my new objects if it is in the new objects cache then it means domain object is being
// re-registered and we should return the same working clone. This check holds only for the new registered objects
// PERF: Avoid initialization of new objects if none.
if (hasNewObjects()) {
registeredObject = getNewObjectsOriginalToClone().get(object);
if (registeredObject != null) {
return registeredObject;
}
}
if (this.isNestedUnitOfWork) {
// bug # 3228185
//may be a new object from a parent Unit Of Work, let's check our new object in parent list to see
//if it has already been registered locally
if (hasNewObjectsInParentOriginalToClone()) {
registeredObject = getNewObjectsInParentOriginalToClone().get(object);
}
if (registeredObject != null) {
return registeredObject;
}
}
return null;
} | public Object checkIfAlreadyRegistered(Object object, ClassDescriptor descriptor) {
// Don't register read-only classes
if (isClassReadOnly(object.getClass(), descriptor)) {
return null;
}
// Check if the working copy is again being registered in which case we return the same working copy
Object registeredObject = getCloneMapping().get(object);
if (registeredObject != null) {
return object;
}
// Check if object exists in my new objects if it is in the new objects cache then it means domain object is being
// re-registered and we should return the same working clone. This check holds only for the new registered objects
// PERF: Avoid initialization of new objects if none.
if (hasNewObjects()) {
registeredObject = getNewObjectsOriginalToClone().get(object);
if (registeredObject != null) {
return registeredObject;
}
}
if (this.isNestedUnitOfWork) {
// bug # 3228185
//may be a new object from a parent Unit Of Work, let's check our new object in parent list to see
//if it has already been registered locally
if (hasNewObjectsInParentOriginalToClone()) {
registeredObject = getNewObjectsInParentOriginalToClone().get(object);
}
if (registeredObject != null) {
return registeredObject;
}
}
return null;
} |
17,543 | 1 | // Don't register read-only classes | public Object checkIfAlreadyRegistered(Object object, ClassDescriptor descriptor) {
// Don't register read-only classes
if (isClassReadOnly(object.getClass(), descriptor)) {
return null;
}
// Check if the working copy is again being registered in which case we return the same working copy
Object registeredObject = getCloneMapping().get(object);
if (registeredObject != null) {
return object;
}
// Check if object exists in my new objects if it is in the new objects cache then it means domain object is being
// re-registered and we should return the same working clone. This check holds only for the new registered objects
// PERF: Avoid initialization of new objects if none.
if (hasNewObjects()) {
registeredObject = getNewObjectsOriginalToClone().get(object);
if (registeredObject != null) {
return registeredObject;
}
}
if (this.isNestedUnitOfWork) {
// bug # 3228185
//may be a new object from a parent Unit Of Work, let's check our new object in parent list to see
//if it has already been registered locally
if (hasNewObjectsInParentOriginalToClone()) {
registeredObject = getNewObjectsInParentOriginalToClone().get(object);
}
if (registeredObject != null) {
return registeredObject;
}
}
return null;
} | NONSATD | true | public Object checkIfAlreadyRegistered(Object object, ClassDescriptor descriptor) {
// Don't register read-only classes
if (isClassReadOnly(object.getClass(), descriptor)) {
return null; | public Object checkIfAlreadyRegistered(Object object, ClassDescriptor descriptor) {
// Don't register read-only classes
if (isClassReadOnly(object.getClass(), descriptor)) {
return null;
}
// Check if the working copy is again being registered in which case we return the same working copy
Object registeredObject = getCloneMapping().get(object);
if (registeredObject != null) {
return object;
}
// Check if object exists in my new objects if it is in the new objects cache then it means domain object is being
// re-registered and we should return the same working clone. This check holds only for the new registered objects | public Object checkIfAlreadyRegistered(Object object, ClassDescriptor descriptor) {
// Don't register read-only classes
if (isClassReadOnly(object.getClass(), descriptor)) {
return null;
}
// Check if the working copy is again being registered in which case we return the same working copy
Object registeredObject = getCloneMapping().get(object);
if (registeredObject != null) {
return object;
}
// Check if object exists in my new objects if it is in the new objects cache then it means domain object is being
// re-registered and we should return the same working clone. This check holds only for the new registered objects
// PERF: Avoid initialization of new objects if none.
if (hasNewObjects()) {
registeredObject = getNewObjectsOriginalToClone().get(object);
if (registeredObject != null) {
return registeredObject;
}
}
if (this.isNestedUnitOfWork) {
// bug # 3228185
//may be a new object from a parent Unit Of Work, let's check our new object in parent list to see |
17,543 | 2 | // Check if the working copy is again being registered in which case we return the same working copy | public Object checkIfAlreadyRegistered(Object object, ClassDescriptor descriptor) {
// Don't register read-only classes
if (isClassReadOnly(object.getClass(), descriptor)) {
return null;
}
// Check if the working copy is again being registered in which case we return the same working copy
Object registeredObject = getCloneMapping().get(object);
if (registeredObject != null) {
return object;
}
// Check if object exists in my new objects if it is in the new objects cache then it means domain object is being
// re-registered and we should return the same working clone. This check holds only for the new registered objects
// PERF: Avoid initialization of new objects if none.
if (hasNewObjects()) {
registeredObject = getNewObjectsOriginalToClone().get(object);
if (registeredObject != null) {
return registeredObject;
}
}
if (this.isNestedUnitOfWork) {
// bug # 3228185
//may be a new object from a parent Unit Of Work, let's check our new object in parent list to see
//if it has already been registered locally
if (hasNewObjectsInParentOriginalToClone()) {
registeredObject = getNewObjectsInParentOriginalToClone().get(object);
}
if (registeredObject != null) {
return registeredObject;
}
}
return null;
} | NONSATD | true | return null;
}
// Check if the working copy is again being registered in which case we return the same working copy
Object registeredObject = getCloneMapping().get(object);
if (registeredObject != null) { | public Object checkIfAlreadyRegistered(Object object, ClassDescriptor descriptor) {
// Don't register read-only classes
if (isClassReadOnly(object.getClass(), descriptor)) {
return null;
}
// Check if the working copy is again being registered in which case we return the same working copy
Object registeredObject = getCloneMapping().get(object);
if (registeredObject != null) {
return object;
}
// Check if object exists in my new objects if it is in the new objects cache then it means domain object is being
// re-registered and we should return the same working clone. This check holds only for the new registered objects
// PERF: Avoid initialization of new objects if none.
if (hasNewObjects()) {
registeredObject = getNewObjectsOriginalToClone().get(object);
if (registeredObject != null) { | public Object checkIfAlreadyRegistered(Object object, ClassDescriptor descriptor) {
// Don't register read-only classes
if (isClassReadOnly(object.getClass(), descriptor)) {
return null;
}
// Check if the working copy is again being registered in which case we return the same working copy
Object registeredObject = getCloneMapping().get(object);
if (registeredObject != null) {
return object;
}
// Check if object exists in my new objects if it is in the new objects cache then it means domain object is being
// re-registered and we should return the same working clone. This check holds only for the new registered objects
// PERF: Avoid initialization of new objects if none.
if (hasNewObjects()) {
registeredObject = getNewObjectsOriginalToClone().get(object);
if (registeredObject != null) {
return registeredObject;
}
}
if (this.isNestedUnitOfWork) {
// bug # 3228185
//may be a new object from a parent Unit Of Work, let's check our new object in parent list to see
//if it has already been registered locally
if (hasNewObjectsInParentOriginalToClone()) {
registeredObject = getNewObjectsInParentOriginalToClone().get(object);
} |
17,543 | 3 | // Check if object exists in my new objects if it is in the new objects cache then it means domain object is being
// re-registered and we should return the same working clone. This check holds only for the new registered objects
// PERF: Avoid initialization of new objects if none. | public Object checkIfAlreadyRegistered(Object object, ClassDescriptor descriptor) {
// Don't register read-only classes
if (isClassReadOnly(object.getClass(), descriptor)) {
return null;
}
// Check if the working copy is again being registered in which case we return the same working copy
Object registeredObject = getCloneMapping().get(object);
if (registeredObject != null) {
return object;
}
// Check if object exists in my new objects if it is in the new objects cache then it means domain object is being
// re-registered and we should return the same working clone. This check holds only for the new registered objects
// PERF: Avoid initialization of new objects if none.
if (hasNewObjects()) {
registeredObject = getNewObjectsOriginalToClone().get(object);
if (registeredObject != null) {
return registeredObject;
}
}
if (this.isNestedUnitOfWork) {
// bug # 3228185
//may be a new object from a parent Unit Of Work, let's check our new object in parent list to see
//if it has already been registered locally
if (hasNewObjectsInParentOriginalToClone()) {
registeredObject = getNewObjectsInParentOriginalToClone().get(object);
}
if (registeredObject != null) {
return registeredObject;
}
}
return null;
} | NONSATD | true | return object;
}
// Check if object exists in my new objects if it is in the new objects cache then it means domain object is being
// re-registered and we should return the same working clone. This check holds only for the new registered objects
// PERF: Avoid initialization of new objects if none.
if (hasNewObjects()) {
registeredObject = getNewObjectsOriginalToClone().get(object); | public Object checkIfAlreadyRegistered(Object object, ClassDescriptor descriptor) {
// Don't register read-only classes
if (isClassReadOnly(object.getClass(), descriptor)) {
return null;
}
// Check if the working copy is again being registered in which case we return the same working copy
Object registeredObject = getCloneMapping().get(object);
if (registeredObject != null) {
return object;
}
// Check if object exists in my new objects if it is in the new objects cache then it means domain object is being
// re-registered and we should return the same working clone. This check holds only for the new registered objects
// PERF: Avoid initialization of new objects if none.
if (hasNewObjects()) {
registeredObject = getNewObjectsOriginalToClone().get(object);
if (registeredObject != null) {
return registeredObject;
}
}
if (this.isNestedUnitOfWork) {
// bug # 3228185
//may be a new object from a parent Unit Of Work, let's check our new object in parent list to see
//if it has already been registered locally | public Object checkIfAlreadyRegistered(Object object, ClassDescriptor descriptor) {
// Don't register read-only classes
if (isClassReadOnly(object.getClass(), descriptor)) {
return null;
}
// Check if the working copy is again being registered in which case we return the same working copy
Object registeredObject = getCloneMapping().get(object);
if (registeredObject != null) {
return object;
}
// Check if object exists in my new objects if it is in the new objects cache then it means domain object is being
// re-registered and we should return the same working clone. This check holds only for the new registered objects
// PERF: Avoid initialization of new objects if none.
if (hasNewObjects()) {
registeredObject = getNewObjectsOriginalToClone().get(object);
if (registeredObject != null) {
return registeredObject;
}
}
if (this.isNestedUnitOfWork) {
// bug # 3228185
//may be a new object from a parent Unit Of Work, let's check our new object in parent list to see
//if it has already been registered locally
if (hasNewObjectsInParentOriginalToClone()) {
registeredObject = getNewObjectsInParentOriginalToClone().get(object);
}
if (registeredObject != null) {
return registeredObject;
}
}
return null;
} |
17,543 | 4 | // bug # 3228185
//may be a new object from a parent Unit Of Work, let's check our new object in parent list to see
//if it has already been registered locally | public Object checkIfAlreadyRegistered(Object object, ClassDescriptor descriptor) {
// Don't register read-only classes
if (isClassReadOnly(object.getClass(), descriptor)) {
return null;
}
// Check if the working copy is again being registered in which case we return the same working copy
Object registeredObject = getCloneMapping().get(object);
if (registeredObject != null) {
return object;
}
// Check if object exists in my new objects if it is in the new objects cache then it means domain object is being
// re-registered and we should return the same working clone. This check holds only for the new registered objects
// PERF: Avoid initialization of new objects if none.
if (hasNewObjects()) {
registeredObject = getNewObjectsOriginalToClone().get(object);
if (registeredObject != null) {
return registeredObject;
}
}
if (this.isNestedUnitOfWork) {
// bug # 3228185
//may be a new object from a parent Unit Of Work, let's check our new object in parent list to see
//if it has already been registered locally
if (hasNewObjectsInParentOriginalToClone()) {
registeredObject = getNewObjectsInParentOriginalToClone().get(object);
}
if (registeredObject != null) {
return registeredObject;
}
}
return null;
} | DEFECT | true | }
if (this.isNestedUnitOfWork) {
// bug # 3228185
//may be a new object from a parent Unit Of Work, let's check our new object in parent list to see
//if it has already been registered locally
if (hasNewObjectsInParentOriginalToClone()) {
registeredObject = getNewObjectsInParentOriginalToClone().get(object); | // Check if object exists in my new objects if it is in the new objects cache then it means domain object is being
// re-registered and we should return the same working clone. This check holds only for the new registered objects
// PERF: Avoid initialization of new objects if none.
if (hasNewObjects()) {
registeredObject = getNewObjectsOriginalToClone().get(object);
if (registeredObject != null) {
return registeredObject;
}
}
if (this.isNestedUnitOfWork) {
// bug # 3228185
//may be a new object from a parent Unit Of Work, let's check our new object in parent list to see
//if it has already been registered locally
if (hasNewObjectsInParentOriginalToClone()) {
registeredObject = getNewObjectsInParentOriginalToClone().get(object);
}
if (registeredObject != null) {
return registeredObject;
}
}
return null;
} | public Object checkIfAlreadyRegistered(Object object, ClassDescriptor descriptor) {
// Don't register read-only classes
if (isClassReadOnly(object.getClass(), descriptor)) {
return null;
}
// Check if the working copy is again being registered in which case we return the same working copy
Object registeredObject = getCloneMapping().get(object);
if (registeredObject != null) {
return object;
}
// Check if object exists in my new objects if it is in the new objects cache then it means domain object is being
// re-registered and we should return the same working clone. This check holds only for the new registered objects
// PERF: Avoid initialization of new objects if none.
if (hasNewObjects()) {
registeredObject = getNewObjectsOriginalToClone().get(object);
if (registeredObject != null) {
return registeredObject;
}
}
if (this.isNestedUnitOfWork) {
// bug # 3228185
//may be a new object from a parent Unit Of Work, let's check our new object in parent list to see
//if it has already been registered locally
if (hasNewObjectsInParentOriginalToClone()) {
registeredObject = getNewObjectsInParentOriginalToClone().get(object);
}
if (registeredObject != null) {
return registeredObject;
}
}
return null;
} |
25,738 | 0 | // TODO: may want to switch to using optimistic locking in the future for better concurrency
// that's why this code is here... need to retry in a loop closely around/in versionAdd | boolean getUpdatedDocument(AddUpdateCommand cmd, long versionOnUpdate) throws IOException {
if (!AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) return false;
Set<String> inPlaceUpdatedFields = AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd);
if (inPlaceUpdatedFields.size() > 0) { // non-empty means this is suitable for in-place updates
if (docMerger.doInPlaceUpdateMerge(cmd, inPlaceUpdatedFields)) {
return true;
} else {
// in-place update failed, so fall through and re-try the same with a full atomic update
}
}
// full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId();
SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found
mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) {
throw new SolrException(ErrorCode.BAD_REQUEST, "This schema does not support partial updates to nested docs. See ref guide.");
}
String oldRootDocRootFieldVal = (String) oldRootDocWithChildren.getFieldValue(IndexSchema.ROOT_FIELD_NAME);
if(req.getSchema().savesChildDocRelations() && oldRootDocRootFieldVal != null &&
!idString.equals(oldRootDocRootFieldVal)) {
// this is an update where the updated doc is not the root document
SolrInputDocument sdocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(),
idBytes, RealTimeGetComponent.Resolution.DOC_WITH_CHILDREN);
mergedDoc = docMerger.mergeChildDoc(sdoc, oldRootDocWithChildren, sdocWithChildren);
} else {
mergedDoc = docMerger.merge(sdoc, oldRootDocWithChildren);
}
}
cmd.solrDoc = mergedDoc;
return true;
} | DESIGN | true | boolean getUpdatedDocument(AddUpdateCommand cmd, long versionOnUpdate) throws IOException {
if (!AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) return false;
Set<String> inPlaceUpdatedFields = AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd);
if (inPlaceUpdatedFields.size() > 0) { // non-empty means this is suitable for in-place updates
if (docMerger.doInPlaceUpdateMerge(cmd, inPlaceUpdatedFields)) {
return true;
} else {
// in-place update failed, so fall through and re-try the same with a full atomic update
}
}
// full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId();
SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found
mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) {
throw new SolrException(ErrorCode.BAD_REQUEST, "This schema does not support partial updates to nested docs. See ref guide.");
}
String oldRootDocRootFieldVal = (String) oldRootDocWithChildren.getFieldValue(IndexSchema.ROOT_FIELD_NAME);
if(req.getSchema().savesChildDocRelations() && oldRootDocRootFieldVal != null &&
!idString.equals(oldRootDocRootFieldVal)) {
// this is an update where the updated doc is not the root document
SolrInputDocument sdocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(),
idBytes, RealTimeGetComponent.Resolution.DOC_WITH_CHILDREN);
mergedDoc = docMerger.mergeChildDoc(sdoc, oldRootDocWithChildren, sdocWithChildren);
} else {
mergedDoc = docMerger.merge(sdoc, oldRootDocWithChildren);
}
}
cmd.solrDoc = mergedDoc;
return true;
} | boolean getUpdatedDocument(AddUpdateCommand cmd, long versionOnUpdate) throws IOException {
if (!AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) return false;
Set<String> inPlaceUpdatedFields = AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd);
if (inPlaceUpdatedFields.size() > 0) { // non-empty means this is suitable for in-place updates
if (docMerger.doInPlaceUpdateMerge(cmd, inPlaceUpdatedFields)) {
return true;
} else {
// in-place update failed, so fall through and re-try the same with a full atomic update
}
}
// full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId();
SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found
mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) {
throw new SolrException(ErrorCode.BAD_REQUEST, "This schema does not support partial updates to nested docs. See ref guide.");
}
String oldRootDocRootFieldVal = (String) oldRootDocWithChildren.getFieldValue(IndexSchema.ROOT_FIELD_NAME);
if(req.getSchema().savesChildDocRelations() && oldRootDocRootFieldVal != null &&
!idString.equals(oldRootDocRootFieldVal)) {
// this is an update where the updated doc is not the root document
SolrInputDocument sdocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(),
idBytes, RealTimeGetComponent.Resolution.DOC_WITH_CHILDREN);
mergedDoc = docMerger.mergeChildDoc(sdoc, oldRootDocWithChildren, sdocWithChildren);
} else {
mergedDoc = docMerger.merge(sdoc, oldRootDocWithChildren);
}
}
cmd.solrDoc = mergedDoc;
return true;
} | boolean getUpdatedDocument(AddUpdateCommand cmd, long versionOnUpdate) throws IOException {
if (!AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) return false;
Set<String> inPlaceUpdatedFields = AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd);
if (inPlaceUpdatedFields.size() > 0) { // non-empty means this is suitable for in-place updates
if (docMerger.doInPlaceUpdateMerge(cmd, inPlaceUpdatedFields)) {
return true;
} else {
// in-place update failed, so fall through and re-try the same with a full atomic update
}
}
// full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId();
SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found
mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) {
throw new SolrException(ErrorCode.BAD_REQUEST, "This schema does not support partial updates to nested docs. See ref guide.");
}
String oldRootDocRootFieldVal = (String) oldRootDocWithChildren.getFieldValue(IndexSchema.ROOT_FIELD_NAME);
if(req.getSchema().savesChildDocRelations() && oldRootDocRootFieldVal != null &&
!idString.equals(oldRootDocRootFieldVal)) {
// this is an update where the updated doc is not the root document
SolrInputDocument sdocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(),
idBytes, RealTimeGetComponent.Resolution.DOC_WITH_CHILDREN);
mergedDoc = docMerger.mergeChildDoc(sdoc, oldRootDocWithChildren, sdocWithChildren);
} else {
mergedDoc = docMerger.merge(sdoc, oldRootDocWithChildren);
}
}
cmd.solrDoc = mergedDoc;
return true;
} |
25,738 | 1 | // non-empty means this is suitable for in-place updates | boolean getUpdatedDocument(AddUpdateCommand cmd, long versionOnUpdate) throws IOException {
if (!AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) return false;
Set<String> inPlaceUpdatedFields = AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd);
if (inPlaceUpdatedFields.size() > 0) { // non-empty means this is suitable for in-place updates
if (docMerger.doInPlaceUpdateMerge(cmd, inPlaceUpdatedFields)) {
return true;
} else {
// in-place update failed, so fall through and re-try the same with a full atomic update
}
}
// full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId();
SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found
mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) {
throw new SolrException(ErrorCode.BAD_REQUEST, "This schema does not support partial updates to nested docs. See ref guide.");
}
String oldRootDocRootFieldVal = (String) oldRootDocWithChildren.getFieldValue(IndexSchema.ROOT_FIELD_NAME);
if(req.getSchema().savesChildDocRelations() && oldRootDocRootFieldVal != null &&
!idString.equals(oldRootDocRootFieldVal)) {
// this is an update where the updated doc is not the root document
SolrInputDocument sdocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(),
idBytes, RealTimeGetComponent.Resolution.DOC_WITH_CHILDREN);
mergedDoc = docMerger.mergeChildDoc(sdoc, oldRootDocWithChildren, sdocWithChildren);
} else {
mergedDoc = docMerger.merge(sdoc, oldRootDocWithChildren);
}
}
cmd.solrDoc = mergedDoc;
return true;
} | NONSATD | true | if (!AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) return false;
Set<String> inPlaceUpdatedFields = AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd);
if (inPlaceUpdatedFields.size() > 0) { // non-empty means this is suitable for in-place updates
if (docMerger.doInPlaceUpdateMerge(cmd, inPlaceUpdatedFields)) {
return true; | boolean getUpdatedDocument(AddUpdateCommand cmd, long versionOnUpdate) throws IOException {
if (!AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) return false;
Set<String> inPlaceUpdatedFields = AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd);
if (inPlaceUpdatedFields.size() > 0) { // non-empty means this is suitable for in-place updates
if (docMerger.doInPlaceUpdateMerge(cmd, inPlaceUpdatedFields)) {
return true;
} else {
// in-place update failed, so fall through and re-try the same with a full atomic update
}
}
// full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId(); | boolean getUpdatedDocument(AddUpdateCommand cmd, long versionOnUpdate) throws IOException {
if (!AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) return false;
Set<String> inPlaceUpdatedFields = AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd);
if (inPlaceUpdatedFields.size() > 0) { // non-empty means this is suitable for in-place updates
if (docMerger.doInPlaceUpdateMerge(cmd, inPlaceUpdatedFields)) {
return true;
} else {
// in-place update failed, so fall through and re-try the same with a full atomic update
}
}
// full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId();
SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString + |
25,738 | 2 | // in-place update failed, so fall through and re-try the same with a full atomic update | boolean getUpdatedDocument(AddUpdateCommand cmd, long versionOnUpdate) throws IOException {
if (!AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) return false;
Set<String> inPlaceUpdatedFields = AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd);
if (inPlaceUpdatedFields.size() > 0) { // non-empty means this is suitable for in-place updates
if (docMerger.doInPlaceUpdateMerge(cmd, inPlaceUpdatedFields)) {
return true;
} else {
// in-place update failed, so fall through and re-try the same with a full atomic update
}
}
// full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId();
SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found
mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) {
throw new SolrException(ErrorCode.BAD_REQUEST, "This schema does not support partial updates to nested docs. See ref guide.");
}
String oldRootDocRootFieldVal = (String) oldRootDocWithChildren.getFieldValue(IndexSchema.ROOT_FIELD_NAME);
if(req.getSchema().savesChildDocRelations() && oldRootDocRootFieldVal != null &&
!idString.equals(oldRootDocRootFieldVal)) {
// this is an update where the updated doc is not the root document
SolrInputDocument sdocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(),
idBytes, RealTimeGetComponent.Resolution.DOC_WITH_CHILDREN);
mergedDoc = docMerger.mergeChildDoc(sdoc, oldRootDocWithChildren, sdocWithChildren);
} else {
mergedDoc = docMerger.merge(sdoc, oldRootDocWithChildren);
}
}
cmd.solrDoc = mergedDoc;
return true;
} | NONSATD | true | return true;
} else {
// in-place update failed, so fall through and re-try the same with a full atomic update
}
} | boolean getUpdatedDocument(AddUpdateCommand cmd, long versionOnUpdate) throws IOException {
if (!AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) return false;
Set<String> inPlaceUpdatedFields = AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd);
if (inPlaceUpdatedFields.size() > 0) { // non-empty means this is suitable for in-place updates
if (docMerger.doInPlaceUpdateMerge(cmd, inPlaceUpdatedFields)) {
return true;
} else {
// in-place update failed, so fall through and re-try the same with a full atomic update
}
}
// full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId();
SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error | boolean getUpdatedDocument(AddUpdateCommand cmd, long versionOnUpdate) throws IOException {
if (!AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) return false;
Set<String> inPlaceUpdatedFields = AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd);
if (inPlaceUpdatedFields.size() > 0) { // non-empty means this is suitable for in-place updates
if (docMerger.doInPlaceUpdateMerge(cmd, inPlaceUpdatedFields)) {
return true;
} else {
// in-place update failed, so fall through and re-try the same with a full atomic update
}
}
// full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId();
SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD); |
25,738 | 3 | // full (non-inplace) atomic update | boolean getUpdatedDocument(AddUpdateCommand cmd, long versionOnUpdate) throws IOException {
if (!AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) return false;
Set<String> inPlaceUpdatedFields = AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd);
if (inPlaceUpdatedFields.size() > 0) { // non-empty means this is suitable for in-place updates
if (docMerger.doInPlaceUpdateMerge(cmd, inPlaceUpdatedFields)) {
return true;
} else {
// in-place update failed, so fall through and re-try the same with a full atomic update
}
}
// full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId();
SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found
mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) {
throw new SolrException(ErrorCode.BAD_REQUEST, "This schema does not support partial updates to nested docs. See ref guide.");
}
String oldRootDocRootFieldVal = (String) oldRootDocWithChildren.getFieldValue(IndexSchema.ROOT_FIELD_NAME);
if(req.getSchema().savesChildDocRelations() && oldRootDocRootFieldVal != null &&
!idString.equals(oldRootDocRootFieldVal)) {
// this is an update where the updated doc is not the root document
SolrInputDocument sdocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(),
idBytes, RealTimeGetComponent.Resolution.DOC_WITH_CHILDREN);
mergedDoc = docMerger.mergeChildDoc(sdoc, oldRootDocWithChildren, sdocWithChildren);
} else {
mergedDoc = docMerger.merge(sdoc, oldRootDocWithChildren);
}
}
cmd.solrDoc = mergedDoc;
return true;
} | NONSATD | true | }
}
// full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId(); | boolean getUpdatedDocument(AddUpdateCommand cmd, long versionOnUpdate) throws IOException {
if (!AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) return false;
Set<String> inPlaceUpdatedFields = AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd);
if (inPlaceUpdatedFields.size() > 0) { // non-empty means this is suitable for in-place updates
if (docMerger.doInPlaceUpdateMerge(cmd, inPlaceUpdatedFields)) {
return true;
} else {
// in-place update failed, so fall through and re-try the same with a full atomic update
}
}
// full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId();
SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard | boolean getUpdatedDocument(AddUpdateCommand cmd, long versionOnUpdate) throws IOException {
if (!AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) return false;
Set<String> inPlaceUpdatedFields = AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd);
if (inPlaceUpdatedFields.size() > 0) { // non-empty means this is suitable for in-place updates
if (docMerger.doInPlaceUpdateMerge(cmd, inPlaceUpdatedFields)) {
return true;
} else {
// in-place update failed, so fall through and re-try the same with a full atomic update
}
}
// full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId();
SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) { |
25,738 | 4 | // could just let the optimistic locking throw the error | boolean getUpdatedDocument(AddUpdateCommand cmd, long versionOnUpdate) throws IOException {
if (!AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) return false;
Set<String> inPlaceUpdatedFields = AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd);
if (inPlaceUpdatedFields.size() > 0) { // non-empty means this is suitable for in-place updates
if (docMerger.doInPlaceUpdateMerge(cmd, inPlaceUpdatedFields)) {
return true;
} else {
// in-place update failed, so fall through and re-try the same with a full atomic update
}
}
// full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId();
SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found
mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) {
throw new SolrException(ErrorCode.BAD_REQUEST, "This schema does not support partial updates to nested docs. See ref guide.");
}
String oldRootDocRootFieldVal = (String) oldRootDocWithChildren.getFieldValue(IndexSchema.ROOT_FIELD_NAME);
if(req.getSchema().savesChildDocRelations() && oldRootDocRootFieldVal != null &&
!idString.equals(oldRootDocRootFieldVal)) {
// this is an update where the updated doc is not the root document
SolrInputDocument sdocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(),
idBytes, RealTimeGetComponent.Resolution.DOC_WITH_CHILDREN);
mergedDoc = docMerger.mergeChildDoc(sdoc, oldRootDocWithChildren, sdocWithChildren);
} else {
mergedDoc = docMerger.merge(sdoc, oldRootDocWithChildren);
}
}
cmd.solrDoc = mergedDoc;
return true;
} | NONSATD | true | if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) { | // in-place update failed, so fall through and re-try the same with a full atomic update
}
}
// full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId();
SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD); | boolean getUpdatedDocument(AddUpdateCommand cmd, long versionOnUpdate) throws IOException {
if (!AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) return false;
Set<String> inPlaceUpdatedFields = AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd);
if (inPlaceUpdatedFields.size() > 0) { // non-empty means this is suitable for in-place updates
if (docMerger.doInPlaceUpdateMerge(cmd, inPlaceUpdatedFields)) {
return true;
} else {
// in-place update failed, so fall through and re-try the same with a full atomic update
}
}
// full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId();
SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found
mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) { |
25,738 | 5 | // the specified document could not be found in this shard
// and was explicitly routed using _route_ | boolean getUpdatedDocument(AddUpdateCommand cmd, long versionOnUpdate) throws IOException {
if (!AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) return false;
Set<String> inPlaceUpdatedFields = AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd);
if (inPlaceUpdatedFields.size() > 0) { // non-empty means this is suitable for in-place updates
if (docMerger.doInPlaceUpdateMerge(cmd, inPlaceUpdatedFields)) {
return true;
} else {
// in-place update failed, so fall through and re-try the same with a full atomic update
}
}
// full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId();
SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found
mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) {
throw new SolrException(ErrorCode.BAD_REQUEST, "This schema does not support partial updates to nested docs. See ref guide.");
}
String oldRootDocRootFieldVal = (String) oldRootDocWithChildren.getFieldValue(IndexSchema.ROOT_FIELD_NAME);
if(req.getSchema().savesChildDocRelations() && oldRootDocRootFieldVal != null &&
!idString.equals(oldRootDocRootFieldVal)) {
// this is an update where the updated doc is not the root document
SolrInputDocument sdocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(),
idBytes, RealTimeGetComponent.Resolution.DOC_WITH_CHILDREN);
mergedDoc = docMerger.mergeChildDoc(sdoc, oldRootDocWithChildren, sdocWithChildren);
} else {
mergedDoc = docMerger.merge(sdoc, oldRootDocWithChildren);
}
}
cmd.solrDoc = mergedDoc;
return true;
} | NONSATD | true | throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString + | // full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId();
SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found | boolean getUpdatedDocument(AddUpdateCommand cmd, long versionOnUpdate) throws IOException {
if (!AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) return false;
Set<String> inPlaceUpdatedFields = AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd);
if (inPlaceUpdatedFields.size() > 0) { // non-empty means this is suitable for in-place updates
if (docMerger.doInPlaceUpdateMerge(cmd, inPlaceUpdatedFields)) {
return true;
} else {
// in-place update failed, so fall through and re-try the same with a full atomic update
}
}
// full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId();
SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found
mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) {
throw new SolrException(ErrorCode.BAD_REQUEST, "This schema does not support partial updates to nested docs. See ref guide.");
}
String oldRootDocRootFieldVal = (String) oldRootDocWithChildren.getFieldValue(IndexSchema.ROOT_FIELD_NAME);
if(req.getSchema().savesChildDocRelations() && oldRootDocRootFieldVal != null && |
25,738 | 6 | // create a new doc by default if an old one wasn't found | boolean getUpdatedDocument(AddUpdateCommand cmd, long versionOnUpdate) throws IOException {
if (!AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) return false;
Set<String> inPlaceUpdatedFields = AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd);
if (inPlaceUpdatedFields.size() > 0) { // non-empty means this is suitable for in-place updates
if (docMerger.doInPlaceUpdateMerge(cmd, inPlaceUpdatedFields)) {
return true;
} else {
// in-place update failed, so fall through and re-try the same with a full atomic update
}
}
// full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId();
SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found
mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) {
throw new SolrException(ErrorCode.BAD_REQUEST, "This schema does not support partial updates to nested docs. See ref guide.");
}
String oldRootDocRootFieldVal = (String) oldRootDocWithChildren.getFieldValue(IndexSchema.ROOT_FIELD_NAME);
if(req.getSchema().savesChildDocRelations() && oldRootDocRootFieldVal != null &&
!idString.equals(oldRootDocRootFieldVal)) {
// this is an update where the updated doc is not the root document
SolrInputDocument sdocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(),
idBytes, RealTimeGetComponent.Resolution.DOC_WITH_CHILDREN);
mergedDoc = docMerger.mergeChildDoc(sdoc, oldRootDocWithChildren, sdocWithChildren);
} else {
mergedDoc = docMerger.merge(sdoc, oldRootDocWithChildren);
}
}
cmd.solrDoc = mergedDoc;
return true;
} | NONSATD | true | SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found
mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else { | // and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found
mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) {
throw new SolrException(ErrorCode.BAD_REQUEST, "This schema does not support partial updates to nested docs. See ref guide.");
}
String oldRootDocRootFieldVal = (String) oldRootDocWithChildren.getFieldValue(IndexSchema.ROOT_FIELD_NAME);
if(req.getSchema().savesChildDocRelations() && oldRootDocRootFieldVal != null && | SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId();
SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found
mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) {
throw new SolrException(ErrorCode.BAD_REQUEST, "This schema does not support partial updates to nested docs. See ref guide.");
}
String oldRootDocRootFieldVal = (String) oldRootDocWithChildren.getFieldValue(IndexSchema.ROOT_FIELD_NAME);
if(req.getSchema().savesChildDocRelations() && oldRootDocRootFieldVal != null &&
!idString.equals(oldRootDocRootFieldVal)) {
// this is an update where the updated doc is not the root document
SolrInputDocument sdocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(),
idBytes, RealTimeGetComponent.Resolution.DOC_WITH_CHILDREN);
mergedDoc = docMerger.mergeChildDoc(sdoc, oldRootDocWithChildren, sdocWithChildren);
} else {
mergedDoc = docMerger.merge(sdoc, oldRootDocWithChildren);
}
}
cmd.solrDoc = mergedDoc; |
25,738 | 7 | // Safety check: don't allow an update to an existing doc that has children, unless we actually support this. | boolean getUpdatedDocument(AddUpdateCommand cmd, long versionOnUpdate) throws IOException {
if (!AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) return false;
Set<String> inPlaceUpdatedFields = AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd);
if (inPlaceUpdatedFields.size() > 0) { // non-empty means this is suitable for in-place updates
if (docMerger.doInPlaceUpdateMerge(cmd, inPlaceUpdatedFields)) {
return true;
} else {
// in-place update failed, so fall through and re-try the same with a full atomic update
}
}
// full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId();
SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found
mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) {
throw new SolrException(ErrorCode.BAD_REQUEST, "This schema does not support partial updates to nested docs. See ref guide.");
}
String oldRootDocRootFieldVal = (String) oldRootDocWithChildren.getFieldValue(IndexSchema.ROOT_FIELD_NAME);
if(req.getSchema().savesChildDocRelations() && oldRootDocRootFieldVal != null &&
!idString.equals(oldRootDocRootFieldVal)) {
// this is an update where the updated doc is not the root document
SolrInputDocument sdocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(),
idBytes, RealTimeGetComponent.Resolution.DOC_WITH_CHILDREN);
mergedDoc = docMerger.mergeChildDoc(sdoc, oldRootDocWithChildren, sdocWithChildren);
} else {
mergedDoc = docMerger.merge(sdoc, oldRootDocWithChildren);
}
}
cmd.solrDoc = mergedDoc;
return true;
} | NONSATD | true | mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false | ", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found
mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) {
throw new SolrException(ErrorCode.BAD_REQUEST, "This schema does not support partial updates to nested docs. See ref guide.");
}
String oldRootDocRootFieldVal = (String) oldRootDocWithChildren.getFieldValue(IndexSchema.ROOT_FIELD_NAME);
if(req.getSchema().savesChildDocRelations() && oldRootDocRootFieldVal != null &&
!idString.equals(oldRootDocRootFieldVal)) {
// this is an update where the updated doc is not the root document
SolrInputDocument sdocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), | SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found
mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) {
throw new SolrException(ErrorCode.BAD_REQUEST, "This schema does not support partial updates to nested docs. See ref guide.");
}
String oldRootDocRootFieldVal = (String) oldRootDocWithChildren.getFieldValue(IndexSchema.ROOT_FIELD_NAME);
if(req.getSchema().savesChildDocRelations() && oldRootDocRootFieldVal != null &&
!idString.equals(oldRootDocRootFieldVal)) {
// this is an update where the updated doc is not the root document
SolrInputDocument sdocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(),
idBytes, RealTimeGetComponent.Resolution.DOC_WITH_CHILDREN);
mergedDoc = docMerger.mergeChildDoc(sdoc, oldRootDocWithChildren, sdocWithChildren);
} else {
mergedDoc = docMerger.merge(sdoc, oldRootDocWithChildren);
}
}
cmd.solrDoc = mergedDoc;
return true;
} |
25,738 | 8 | // however, next line we see it doesn't support child docs | boolean getUpdatedDocument(AddUpdateCommand cmd, long versionOnUpdate) throws IOException {
if (!AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) return false;
Set<String> inPlaceUpdatedFields = AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd);
if (inPlaceUpdatedFields.size() > 0) { // non-empty means this is suitable for in-place updates
if (docMerger.doInPlaceUpdateMerge(cmd, inPlaceUpdatedFields)) {
return true;
} else {
// in-place update failed, so fall through and re-try the same with a full atomic update
}
}
// full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId();
SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found
mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) {
throw new SolrException(ErrorCode.BAD_REQUEST, "This schema does not support partial updates to nested docs. See ref guide.");
}
String oldRootDocRootFieldVal = (String) oldRootDocWithChildren.getFieldValue(IndexSchema.ROOT_FIELD_NAME);
if(req.getSchema().savesChildDocRelations() && oldRootDocRootFieldVal != null &&
!idString.equals(oldRootDocRootFieldVal)) {
// this is an update where the updated doc is not the root document
SolrInputDocument sdocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(),
idBytes, RealTimeGetComponent.Resolution.DOC_WITH_CHILDREN);
mergedDoc = docMerger.mergeChildDoc(sdoc, oldRootDocWithChildren, sdocWithChildren);
} else {
mergedDoc = docMerger.merge(sdoc, oldRootDocWithChildren);
}
}
cmd.solrDoc = mergedDoc;
return true;
} | NONSATD | true | } else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) { | }
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found
mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) {
throw new SolrException(ErrorCode.BAD_REQUEST, "This schema does not support partial updates to nested docs. See ref guide.");
}
String oldRootDocRootFieldVal = (String) oldRootDocWithChildren.getFieldValue(IndexSchema.ROOT_FIELD_NAME);
if(req.getSchema().savesChildDocRelations() && oldRootDocRootFieldVal != null &&
!idString.equals(oldRootDocRootFieldVal)) {
// this is an update where the updated doc is not the root document
SolrInputDocument sdocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(),
idBytes, RealTimeGetComponent.Resolution.DOC_WITH_CHILDREN); | if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found
mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) {
throw new SolrException(ErrorCode.BAD_REQUEST, "This schema does not support partial updates to nested docs. See ref guide.");
}
String oldRootDocRootFieldVal = (String) oldRootDocWithChildren.getFieldValue(IndexSchema.ROOT_FIELD_NAME);
if(req.getSchema().savesChildDocRelations() && oldRootDocRootFieldVal != null &&
!idString.equals(oldRootDocRootFieldVal)) {
// this is an update where the updated doc is not the root document
SolrInputDocument sdocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(),
idBytes, RealTimeGetComponent.Resolution.DOC_WITH_CHILDREN);
mergedDoc = docMerger.mergeChildDoc(sdoc, oldRootDocWithChildren, sdocWithChildren);
} else {
mergedDoc = docMerger.merge(sdoc, oldRootDocWithChildren);
}
}
cmd.solrDoc = mergedDoc;
return true;
} |
25,738 | 9 | // this is an update where the updated doc is not the root document | boolean getUpdatedDocument(AddUpdateCommand cmd, long versionOnUpdate) throws IOException {
if (!AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) return false;
Set<String> inPlaceUpdatedFields = AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd);
if (inPlaceUpdatedFields.size() > 0) { // non-empty means this is suitable for in-place updates
if (docMerger.doInPlaceUpdateMerge(cmd, inPlaceUpdatedFields)) {
return true;
} else {
// in-place update failed, so fall through and re-try the same with a full atomic update
}
}
// full (non-inplace) atomic update
SolrInputDocument sdoc = cmd.getSolrInputDocument();
BytesRef idBytes = cmd.getIndexedId();
String idString = cmd.getPrintableId();
SolrInputDocument oldRootDocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(), idBytes, RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN);
if (oldRootDocWithChildren == null) {
if (versionOnUpdate > 0) {
// could just let the optimistic locking throw the error
throw new SolrException(ErrorCode.CONFLICT, "Document not found for update. id=" + idString);
} else if (req.getParams().get(ShardParams._ROUTE_) != null) {
// the specified document could not be found in this shard
// and was explicitly routed using _route_
throw new SolrException(ErrorCode.BAD_REQUEST,
"Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found
mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) {
throw new SolrException(ErrorCode.BAD_REQUEST, "This schema does not support partial updates to nested docs. See ref guide.");
}
String oldRootDocRootFieldVal = (String) oldRootDocWithChildren.getFieldValue(IndexSchema.ROOT_FIELD_NAME);
if(req.getSchema().savesChildDocRelations() && oldRootDocRootFieldVal != null &&
!idString.equals(oldRootDocRootFieldVal)) {
// this is an update where the updated doc is not the root document
SolrInputDocument sdocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(),
idBytes, RealTimeGetComponent.Resolution.DOC_WITH_CHILDREN);
mergedDoc = docMerger.mergeChildDoc(sdoc, oldRootDocWithChildren, sdocWithChildren);
} else {
mergedDoc = docMerger.merge(sdoc, oldRootDocWithChildren);
}
}
cmd.solrDoc = mergedDoc;
return true;
} | NONSATD | true | if(req.getSchema().savesChildDocRelations() && oldRootDocRootFieldVal != null &&
!idString.equals(oldRootDocRootFieldVal)) {
// this is an update where the updated doc is not the root document
SolrInputDocument sdocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(),
idBytes, RealTimeGetComponent.Resolution.DOC_WITH_CHILDREN); | } else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) {
throw new SolrException(ErrorCode.BAD_REQUEST, "This schema does not support partial updates to nested docs. See ref guide.");
}
String oldRootDocRootFieldVal = (String) oldRootDocWithChildren.getFieldValue(IndexSchema.ROOT_FIELD_NAME);
if(req.getSchema().savesChildDocRelations() && oldRootDocRootFieldVal != null &&
!idString.equals(oldRootDocRootFieldVal)) {
// this is an update where the updated doc is not the root document
SolrInputDocument sdocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(),
idBytes, RealTimeGetComponent.Resolution.DOC_WITH_CHILDREN);
mergedDoc = docMerger.mergeChildDoc(sdoc, oldRootDocWithChildren, sdocWithChildren);
} else {
mergedDoc = docMerger.merge(sdoc, oldRootDocWithChildren);
}
}
cmd.solrDoc = mergedDoc;
return true;
} | "Could not find document id=" + idString +
", perhaps the wrong \"_route_\" param was supplied");
}
} else {
oldRootDocWithChildren.remove(CommonParams.VERSION_FIELD);
}
SolrInputDocument mergedDoc;
if(idField == null || oldRootDocWithChildren == null) {
// create a new doc by default if an old one wasn't found
mergedDoc = docMerger.merge(sdoc, new SolrInputDocument());
} else {
// Safety check: don't allow an update to an existing doc that has children, unless we actually support this.
if (req.getSchema().isUsableForChildDocs() // however, next line we see it doesn't support child docs
&& req.getSchema().supportsPartialUpdatesOfChildDocs() == false
&& req.getSearcher().count(new TermQuery(new Term(IndexSchema.ROOT_FIELD_NAME, idBytes))) > 1) {
throw new SolrException(ErrorCode.BAD_REQUEST, "This schema does not support partial updates to nested docs. See ref guide.");
}
String oldRootDocRootFieldVal = (String) oldRootDocWithChildren.getFieldValue(IndexSchema.ROOT_FIELD_NAME);
if(req.getSchema().savesChildDocRelations() && oldRootDocRootFieldVal != null &&
!idString.equals(oldRootDocRootFieldVal)) {
// this is an update where the updated doc is not the root document
SolrInputDocument sdocWithChildren = RealTimeGetComponent.getInputDocument(cmd.getReq().getCore(),
idBytes, RealTimeGetComponent.Resolution.DOC_WITH_CHILDREN);
mergedDoc = docMerger.mergeChildDoc(sdoc, oldRootDocWithChildren, sdocWithChildren);
} else {
mergedDoc = docMerger.merge(sdoc, oldRootDocWithChildren);
}
}
cmd.solrDoc = mergedDoc;
return true;
} |
17,549 | 0 | /**
* INTERNAL:
* This method will perform a delete operation on the provided objects pre-determining
* the objects that will be deleted by a commit of the UnitOfWork including privately
* owned objects. It does not execute a query for the deletion of these objects as the
* normal deleteobject operation does. Mainly implemented to provide EJB 3.0 deleteObject
* support.
*/ | public void performRemove(Object toBeDeleted, Map visitedObjects) {
if (toBeDeleted == null) {
return;
}
ClassDescriptor descriptor = getDescriptor(toBeDeleted);
if ((descriptor == null) || descriptor.isDescriptorTypeAggregate()) {
throw new IllegalArgumentException(ExceptionLocalization.buildMessage("not_an_entity", new Object[] { toBeDeleted }));
}
logDebugMessage(toBeDeleted, "deleting_object");
//bug 4568370+4599010; fix EntityManager.remove() to handle new objects
if (getDeletedObjects().containsKey(toBeDeleted)){
return;
}
visitedObjects.put(toBeDeleted,toBeDeleted);
Object registeredObject = checkIfAlreadyRegistered(toBeDeleted, descriptor);
if (registeredObject == null) {
Object primaryKey = descriptor.getObjectBuilder().extractPrimaryKeyFromObject(toBeDeleted, this);
DoesExistQuery existQuery = descriptor.getQueryManager().getDoesExistQuery();
existQuery = (DoesExistQuery)existQuery.clone();
existQuery.setObject(toBeDeleted);
existQuery.setPrimaryKey(primaryKey);
existQuery.setDescriptor(descriptor);
existQuery.setIsExecutionClone(true);
existQuery.setCheckCacheFirst(true);
if (((Boolean)executeQuery(existQuery)).booleanValue()){
throw new IllegalArgumentException(ExceptionLocalization.buildMessage("cannot_remove_detatched_entity", new Object[]{toBeDeleted}));
}//else, it is a new or previously deleted object that should be ignored (and delete should cascade)
} else {
//fire events only if this is a managed object
if (descriptor.getEventManager().hasAnyEventListeners()) {
org.eclipse.persistence.descriptors.DescriptorEvent event = new org.eclipse.persistence.descriptors.DescriptorEvent(toBeDeleted);
event.setEventCode(DescriptorEventManager.PreRemoveEvent);
event.setSession(this);
descriptor.getEventManager().executeEvent(event);
}
if (hasNewObjects() && getNewObjectsCloneToOriginal().containsKey(registeredObject)){
unregisterObject(registeredObject, DescriptorIterator.NoCascading);
} else {
getDeletedObjects().put(toBeDeleted, toBeDeleted);
}
}
descriptor.getObjectBuilder().cascadePerformRemove(toBeDeleted, this, visitedObjects);
} | NONSATD | true | public void performRemove(Object toBeDeleted, Map visitedObjects) {
if (toBeDeleted == null) {
return;
}
ClassDescriptor descriptor = getDescriptor(toBeDeleted);
if ((descriptor == null) || descriptor.isDescriptorTypeAggregate()) {
throw new IllegalArgumentException(ExceptionLocalization.buildMessage("not_an_entity", new Object[] { toBeDeleted }));
}
logDebugMessage(toBeDeleted, "deleting_object");
//bug 4568370+4599010; fix EntityManager.remove() to handle new objects
if (getDeletedObjects().containsKey(toBeDeleted)){
return;
}
visitedObjects.put(toBeDeleted,toBeDeleted);
Object registeredObject = checkIfAlreadyRegistered(toBeDeleted, descriptor);
if (registeredObject == null) {
Object primaryKey = descriptor.getObjectBuilder().extractPrimaryKeyFromObject(toBeDeleted, this);
DoesExistQuery existQuery = descriptor.getQueryManager().getDoesExistQuery();
existQuery = (DoesExistQuery)existQuery.clone();
existQuery.setObject(toBeDeleted);
existQuery.setPrimaryKey(primaryKey);
existQuery.setDescriptor(descriptor);
existQuery.setIsExecutionClone(true);
existQuery.setCheckCacheFirst(true);
if (((Boolean)executeQuery(existQuery)).booleanValue()){
throw new IllegalArgumentException(ExceptionLocalization.buildMessage("cannot_remove_detatched_entity", new Object[]{toBeDeleted}));
}//else, it is a new or previously deleted object that should be ignored (and delete should cascade)
} else {
//fire events only if this is a managed object
if (descriptor.getEventManager().hasAnyEventListeners()) {
org.eclipse.persistence.descriptors.DescriptorEvent event = new org.eclipse.persistence.descriptors.DescriptorEvent(toBeDeleted);
event.setEventCode(DescriptorEventManager.PreRemoveEvent);
event.setSession(this);
descriptor.getEventManager().executeEvent(event);
}
if (hasNewObjects() && getNewObjectsCloneToOriginal().containsKey(registeredObject)){
unregisterObject(registeredObject, DescriptorIterator.NoCascading);
} else {
getDeletedObjects().put(toBeDeleted, toBeDeleted);
}
}
descriptor.getObjectBuilder().cascadePerformRemove(toBeDeleted, this, visitedObjects);
} | public void performRemove(Object toBeDeleted, Map visitedObjects) {
if (toBeDeleted == null) {
return;
}
ClassDescriptor descriptor = getDescriptor(toBeDeleted);
if ((descriptor == null) || descriptor.isDescriptorTypeAggregate()) {
throw new IllegalArgumentException(ExceptionLocalization.buildMessage("not_an_entity", new Object[] { toBeDeleted }));
}
logDebugMessage(toBeDeleted, "deleting_object");
//bug 4568370+4599010; fix EntityManager.remove() to handle new objects
if (getDeletedObjects().containsKey(toBeDeleted)){
return;
}
visitedObjects.put(toBeDeleted,toBeDeleted);
Object registeredObject = checkIfAlreadyRegistered(toBeDeleted, descriptor);
if (registeredObject == null) {
Object primaryKey = descriptor.getObjectBuilder().extractPrimaryKeyFromObject(toBeDeleted, this);
DoesExistQuery existQuery = descriptor.getQueryManager().getDoesExistQuery();
existQuery = (DoesExistQuery)existQuery.clone();
existQuery.setObject(toBeDeleted);
existQuery.setPrimaryKey(primaryKey);
existQuery.setDescriptor(descriptor);
existQuery.setIsExecutionClone(true);
existQuery.setCheckCacheFirst(true);
if (((Boolean)executeQuery(existQuery)).booleanValue()){
throw new IllegalArgumentException(ExceptionLocalization.buildMessage("cannot_remove_detatched_entity", new Object[]{toBeDeleted}));
}//else, it is a new or previously deleted object that should be ignored (and delete should cascade)
} else {
//fire events only if this is a managed object
if (descriptor.getEventManager().hasAnyEventListeners()) {
org.eclipse.persistence.descriptors.DescriptorEvent event = new org.eclipse.persistence.descriptors.DescriptorEvent(toBeDeleted);
event.setEventCode(DescriptorEventManager.PreRemoveEvent);
event.setSession(this);
descriptor.getEventManager().executeEvent(event);
}
if (hasNewObjects() && getNewObjectsCloneToOriginal().containsKey(registeredObject)){
unregisterObject(registeredObject, DescriptorIterator.NoCascading);
} else {
getDeletedObjects().put(toBeDeleted, toBeDeleted);
}
}
descriptor.getObjectBuilder().cascadePerformRemove(toBeDeleted, this, visitedObjects);
} | public void performRemove(Object toBeDeleted, Map visitedObjects) {
if (toBeDeleted == null) {
return;
}
ClassDescriptor descriptor = getDescriptor(toBeDeleted);
if ((descriptor == null) || descriptor.isDescriptorTypeAggregate()) {
throw new IllegalArgumentException(ExceptionLocalization.buildMessage("not_an_entity", new Object[] { toBeDeleted }));
}
logDebugMessage(toBeDeleted, "deleting_object");
//bug 4568370+4599010; fix EntityManager.remove() to handle new objects
if (getDeletedObjects().containsKey(toBeDeleted)){
return;
}
visitedObjects.put(toBeDeleted,toBeDeleted);
Object registeredObject = checkIfAlreadyRegistered(toBeDeleted, descriptor);
if (registeredObject == null) {
Object primaryKey = descriptor.getObjectBuilder().extractPrimaryKeyFromObject(toBeDeleted, this);
DoesExistQuery existQuery = descriptor.getQueryManager().getDoesExistQuery();
existQuery = (DoesExistQuery)existQuery.clone();
existQuery.setObject(toBeDeleted);
existQuery.setPrimaryKey(primaryKey);
existQuery.setDescriptor(descriptor);
existQuery.setIsExecutionClone(true);
existQuery.setCheckCacheFirst(true);
if (((Boolean)executeQuery(existQuery)).booleanValue()){
throw new IllegalArgumentException(ExceptionLocalization.buildMessage("cannot_remove_detatched_entity", new Object[]{toBeDeleted}));
}//else, it is a new or previously deleted object that should be ignored (and delete should cascade)
} else {
//fire events only if this is a managed object
if (descriptor.getEventManager().hasAnyEventListeners()) {
org.eclipse.persistence.descriptors.DescriptorEvent event = new org.eclipse.persistence.descriptors.DescriptorEvent(toBeDeleted);
event.setEventCode(DescriptorEventManager.PreRemoveEvent);
event.setSession(this);
descriptor.getEventManager().executeEvent(event);
}
if (hasNewObjects() && getNewObjectsCloneToOriginal().containsKey(registeredObject)){
unregisterObject(registeredObject, DescriptorIterator.NoCascading);
} else {
getDeletedObjects().put(toBeDeleted, toBeDeleted);
}
}
descriptor.getObjectBuilder().cascadePerformRemove(toBeDeleted, this, visitedObjects);
} |
17,549 | 1 | //bug 4568370+4599010; fix EntityManager.remove() to handle new objects | public void performRemove(Object toBeDeleted, Map visitedObjects) {
if (toBeDeleted == null) {
return;
}
ClassDescriptor descriptor = getDescriptor(toBeDeleted);
if ((descriptor == null) || descriptor.isDescriptorTypeAggregate()) {
throw new IllegalArgumentException(ExceptionLocalization.buildMessage("not_an_entity", new Object[] { toBeDeleted }));
}
logDebugMessage(toBeDeleted, "deleting_object");
//bug 4568370+4599010; fix EntityManager.remove() to handle new objects
if (getDeletedObjects().containsKey(toBeDeleted)){
return;
}
visitedObjects.put(toBeDeleted,toBeDeleted);
Object registeredObject = checkIfAlreadyRegistered(toBeDeleted, descriptor);
if (registeredObject == null) {
Object primaryKey = descriptor.getObjectBuilder().extractPrimaryKeyFromObject(toBeDeleted, this);
DoesExistQuery existQuery = descriptor.getQueryManager().getDoesExistQuery();
existQuery = (DoesExistQuery)existQuery.clone();
existQuery.setObject(toBeDeleted);
existQuery.setPrimaryKey(primaryKey);
existQuery.setDescriptor(descriptor);
existQuery.setIsExecutionClone(true);
existQuery.setCheckCacheFirst(true);
if (((Boolean)executeQuery(existQuery)).booleanValue()){
throw new IllegalArgumentException(ExceptionLocalization.buildMessage("cannot_remove_detatched_entity", new Object[]{toBeDeleted}));
}//else, it is a new or previously deleted object that should be ignored (and delete should cascade)
} else {
//fire events only if this is a managed object
if (descriptor.getEventManager().hasAnyEventListeners()) {
org.eclipse.persistence.descriptors.DescriptorEvent event = new org.eclipse.persistence.descriptors.DescriptorEvent(toBeDeleted);
event.setEventCode(DescriptorEventManager.PreRemoveEvent);
event.setSession(this);
descriptor.getEventManager().executeEvent(event);
}
if (hasNewObjects() && getNewObjectsCloneToOriginal().containsKey(registeredObject)){
unregisterObject(registeredObject, DescriptorIterator.NoCascading);
} else {
getDeletedObjects().put(toBeDeleted, toBeDeleted);
}
}
descriptor.getObjectBuilder().cascadePerformRemove(toBeDeleted, this, visitedObjects);
} | DEFECT | true | }
logDebugMessage(toBeDeleted, "deleting_object");
//bug 4568370+4599010; fix EntityManager.remove() to handle new objects
if (getDeletedObjects().containsKey(toBeDeleted)){
return; | public void performRemove(Object toBeDeleted, Map visitedObjects) {
if (toBeDeleted == null) {
return;
}
ClassDescriptor descriptor = getDescriptor(toBeDeleted);
if ((descriptor == null) || descriptor.isDescriptorTypeAggregate()) {
throw new IllegalArgumentException(ExceptionLocalization.buildMessage("not_an_entity", new Object[] { toBeDeleted }));
}
logDebugMessage(toBeDeleted, "deleting_object");
//bug 4568370+4599010; fix EntityManager.remove() to handle new objects
if (getDeletedObjects().containsKey(toBeDeleted)){
return;
}
visitedObjects.put(toBeDeleted,toBeDeleted);
Object registeredObject = checkIfAlreadyRegistered(toBeDeleted, descriptor);
if (registeredObject == null) {
Object primaryKey = descriptor.getObjectBuilder().extractPrimaryKeyFromObject(toBeDeleted, this);
DoesExistQuery existQuery = descriptor.getQueryManager().getDoesExistQuery();
existQuery = (DoesExistQuery)existQuery.clone();
existQuery.setObject(toBeDeleted); | public void performRemove(Object toBeDeleted, Map visitedObjects) {
if (toBeDeleted == null) {
return;
}
ClassDescriptor descriptor = getDescriptor(toBeDeleted);
if ((descriptor == null) || descriptor.isDescriptorTypeAggregate()) {
throw new IllegalArgumentException(ExceptionLocalization.buildMessage("not_an_entity", new Object[] { toBeDeleted }));
}
logDebugMessage(toBeDeleted, "deleting_object");
//bug 4568370+4599010; fix EntityManager.remove() to handle new objects
if (getDeletedObjects().containsKey(toBeDeleted)){
return;
}
visitedObjects.put(toBeDeleted,toBeDeleted);
Object registeredObject = checkIfAlreadyRegistered(toBeDeleted, descriptor);
if (registeredObject == null) {
Object primaryKey = descriptor.getObjectBuilder().extractPrimaryKeyFromObject(toBeDeleted, this);
DoesExistQuery existQuery = descriptor.getQueryManager().getDoesExistQuery();
existQuery = (DoesExistQuery)existQuery.clone();
existQuery.setObject(toBeDeleted);
existQuery.setPrimaryKey(primaryKey);
existQuery.setDescriptor(descriptor);
existQuery.setIsExecutionClone(true);
existQuery.setCheckCacheFirst(true);
if (((Boolean)executeQuery(existQuery)).booleanValue()){
throw new IllegalArgumentException(ExceptionLocalization.buildMessage("cannot_remove_detatched_entity", new Object[]{toBeDeleted}));
}//else, it is a new or previously deleted object that should be ignored (and delete should cascade)
} else {
//fire events only if this is a managed object
if (descriptor.getEventManager().hasAnyEventListeners()) { |
17,549 | 2 | //else, it is a new or previously deleted object that should be ignored (and delete should cascade) | public void performRemove(Object toBeDeleted, Map visitedObjects) {
if (toBeDeleted == null) {
return;
}
ClassDescriptor descriptor = getDescriptor(toBeDeleted);
if ((descriptor == null) || descriptor.isDescriptorTypeAggregate()) {
throw new IllegalArgumentException(ExceptionLocalization.buildMessage("not_an_entity", new Object[] { toBeDeleted }));
}
logDebugMessage(toBeDeleted, "deleting_object");
//bug 4568370+4599010; fix EntityManager.remove() to handle new objects
if (getDeletedObjects().containsKey(toBeDeleted)){
return;
}
visitedObjects.put(toBeDeleted,toBeDeleted);
Object registeredObject = checkIfAlreadyRegistered(toBeDeleted, descriptor);
if (registeredObject == null) {
Object primaryKey = descriptor.getObjectBuilder().extractPrimaryKeyFromObject(toBeDeleted, this);
DoesExistQuery existQuery = descriptor.getQueryManager().getDoesExistQuery();
existQuery = (DoesExistQuery)existQuery.clone();
existQuery.setObject(toBeDeleted);
existQuery.setPrimaryKey(primaryKey);
existQuery.setDescriptor(descriptor);
existQuery.setIsExecutionClone(true);
existQuery.setCheckCacheFirst(true);
if (((Boolean)executeQuery(existQuery)).booleanValue()){
throw new IllegalArgumentException(ExceptionLocalization.buildMessage("cannot_remove_detatched_entity", new Object[]{toBeDeleted}));
}//else, it is a new or previously deleted object that should be ignored (and delete should cascade)
} else {
//fire events only if this is a managed object
if (descriptor.getEventManager().hasAnyEventListeners()) {
org.eclipse.persistence.descriptors.DescriptorEvent event = new org.eclipse.persistence.descriptors.DescriptorEvent(toBeDeleted);
event.setEventCode(DescriptorEventManager.PreRemoveEvent);
event.setSession(this);
descriptor.getEventManager().executeEvent(event);
}
if (hasNewObjects() && getNewObjectsCloneToOriginal().containsKey(registeredObject)){
unregisterObject(registeredObject, DescriptorIterator.NoCascading);
} else {
getDeletedObjects().put(toBeDeleted, toBeDeleted);
}
}
descriptor.getObjectBuilder().cascadePerformRemove(toBeDeleted, this, visitedObjects);
} | NONSATD | true | if (((Boolean)executeQuery(existQuery)).booleanValue()){
throw new IllegalArgumentException(ExceptionLocalization.buildMessage("cannot_remove_detatched_entity", new Object[]{toBeDeleted}));
}//else, it is a new or previously deleted object that should be ignored (and delete should cascade)
} else {
//fire events only if this is a managed object | Object primaryKey = descriptor.getObjectBuilder().extractPrimaryKeyFromObject(toBeDeleted, this);
DoesExistQuery existQuery = descriptor.getQueryManager().getDoesExistQuery();
existQuery = (DoesExistQuery)existQuery.clone();
existQuery.setObject(toBeDeleted);
existQuery.setPrimaryKey(primaryKey);
existQuery.setDescriptor(descriptor);
existQuery.setIsExecutionClone(true);
existQuery.setCheckCacheFirst(true);
if (((Boolean)executeQuery(existQuery)).booleanValue()){
throw new IllegalArgumentException(ExceptionLocalization.buildMessage("cannot_remove_detatched_entity", new Object[]{toBeDeleted}));
}//else, it is a new or previously deleted object that should be ignored (and delete should cascade)
} else {
//fire events only if this is a managed object
if (descriptor.getEventManager().hasAnyEventListeners()) {
org.eclipse.persistence.descriptors.DescriptorEvent event = new org.eclipse.persistence.descriptors.DescriptorEvent(toBeDeleted);
event.setEventCode(DescriptorEventManager.PreRemoveEvent);
event.setSession(this);
descriptor.getEventManager().executeEvent(event);
}
if (hasNewObjects() && getNewObjectsCloneToOriginal().containsKey(registeredObject)){
unregisterObject(registeredObject, DescriptorIterator.NoCascading); | throw new IllegalArgumentException(ExceptionLocalization.buildMessage("not_an_entity", new Object[] { toBeDeleted }));
}
logDebugMessage(toBeDeleted, "deleting_object");
//bug 4568370+4599010; fix EntityManager.remove() to handle new objects
if (getDeletedObjects().containsKey(toBeDeleted)){
return;
}
visitedObjects.put(toBeDeleted,toBeDeleted);
Object registeredObject = checkIfAlreadyRegistered(toBeDeleted, descriptor);
if (registeredObject == null) {
Object primaryKey = descriptor.getObjectBuilder().extractPrimaryKeyFromObject(toBeDeleted, this);
DoesExistQuery existQuery = descriptor.getQueryManager().getDoesExistQuery();
existQuery = (DoesExistQuery)existQuery.clone();
existQuery.setObject(toBeDeleted);
existQuery.setPrimaryKey(primaryKey);
existQuery.setDescriptor(descriptor);
existQuery.setIsExecutionClone(true);
existQuery.setCheckCacheFirst(true);
if (((Boolean)executeQuery(existQuery)).booleanValue()){
throw new IllegalArgumentException(ExceptionLocalization.buildMessage("cannot_remove_detatched_entity", new Object[]{toBeDeleted}));
}//else, it is a new or previously deleted object that should be ignored (and delete should cascade)
} else {
//fire events only if this is a managed object
if (descriptor.getEventManager().hasAnyEventListeners()) {
org.eclipse.persistence.descriptors.DescriptorEvent event = new org.eclipse.persistence.descriptors.DescriptorEvent(toBeDeleted);
event.setEventCode(DescriptorEventManager.PreRemoveEvent);
event.setSession(this);
descriptor.getEventManager().executeEvent(event);
}
if (hasNewObjects() && getNewObjectsCloneToOriginal().containsKey(registeredObject)){
unregisterObject(registeredObject, DescriptorIterator.NoCascading);
} else {
getDeletedObjects().put(toBeDeleted, toBeDeleted);
}
}
descriptor.getObjectBuilder().cascadePerformRemove(toBeDeleted, this, visitedObjects);
} |
17,549 | 3 | //fire events only if this is a managed object | public void performRemove(Object toBeDeleted, Map visitedObjects) {
if (toBeDeleted == null) {
return;
}
ClassDescriptor descriptor = getDescriptor(toBeDeleted);
if ((descriptor == null) || descriptor.isDescriptorTypeAggregate()) {
throw new IllegalArgumentException(ExceptionLocalization.buildMessage("not_an_entity", new Object[] { toBeDeleted }));
}
logDebugMessage(toBeDeleted, "deleting_object");
//bug 4568370+4599010; fix EntityManager.remove() to handle new objects
if (getDeletedObjects().containsKey(toBeDeleted)){
return;
}
visitedObjects.put(toBeDeleted,toBeDeleted);
Object registeredObject = checkIfAlreadyRegistered(toBeDeleted, descriptor);
if (registeredObject == null) {
Object primaryKey = descriptor.getObjectBuilder().extractPrimaryKeyFromObject(toBeDeleted, this);
DoesExistQuery existQuery = descriptor.getQueryManager().getDoesExistQuery();
existQuery = (DoesExistQuery)existQuery.clone();
existQuery.setObject(toBeDeleted);
existQuery.setPrimaryKey(primaryKey);
existQuery.setDescriptor(descriptor);
existQuery.setIsExecutionClone(true);
existQuery.setCheckCacheFirst(true);
if (((Boolean)executeQuery(existQuery)).booleanValue()){
throw new IllegalArgumentException(ExceptionLocalization.buildMessage("cannot_remove_detatched_entity", new Object[]{toBeDeleted}));
}//else, it is a new or previously deleted object that should be ignored (and delete should cascade)
} else {
//fire events only if this is a managed object
if (descriptor.getEventManager().hasAnyEventListeners()) {
org.eclipse.persistence.descriptors.DescriptorEvent event = new org.eclipse.persistence.descriptors.DescriptorEvent(toBeDeleted);
event.setEventCode(DescriptorEventManager.PreRemoveEvent);
event.setSession(this);
descriptor.getEventManager().executeEvent(event);
}
if (hasNewObjects() && getNewObjectsCloneToOriginal().containsKey(registeredObject)){
unregisterObject(registeredObject, DescriptorIterator.NoCascading);
} else {
getDeletedObjects().put(toBeDeleted, toBeDeleted);
}
}
descriptor.getObjectBuilder().cascadePerformRemove(toBeDeleted, this, visitedObjects);
} | NONSATD | true | }//else, it is a new or previously deleted object that should be ignored (and delete should cascade)
} else {
//fire events only if this is a managed object
if (descriptor.getEventManager().hasAnyEventListeners()) {
org.eclipse.persistence.descriptors.DescriptorEvent event = new org.eclipse.persistence.descriptors.DescriptorEvent(toBeDeleted); | existQuery = (DoesExistQuery)existQuery.clone();
existQuery.setObject(toBeDeleted);
existQuery.setPrimaryKey(primaryKey);
existQuery.setDescriptor(descriptor);
existQuery.setIsExecutionClone(true);
existQuery.setCheckCacheFirst(true);
if (((Boolean)executeQuery(existQuery)).booleanValue()){
throw new IllegalArgumentException(ExceptionLocalization.buildMessage("cannot_remove_detatched_entity", new Object[]{toBeDeleted}));
}//else, it is a new or previously deleted object that should be ignored (and delete should cascade)
} else {
//fire events only if this is a managed object
if (descriptor.getEventManager().hasAnyEventListeners()) {
org.eclipse.persistence.descriptors.DescriptorEvent event = new org.eclipse.persistence.descriptors.DescriptorEvent(toBeDeleted);
event.setEventCode(DescriptorEventManager.PreRemoveEvent);
event.setSession(this);
descriptor.getEventManager().executeEvent(event);
}
if (hasNewObjects() && getNewObjectsCloneToOriginal().containsKey(registeredObject)){
unregisterObject(registeredObject, DescriptorIterator.NoCascading);
} else {
getDeletedObjects().put(toBeDeleted, toBeDeleted); | logDebugMessage(toBeDeleted, "deleting_object");
//bug 4568370+4599010; fix EntityManager.remove() to handle new objects
if (getDeletedObjects().containsKey(toBeDeleted)){
return;
}
visitedObjects.put(toBeDeleted,toBeDeleted);
Object registeredObject = checkIfAlreadyRegistered(toBeDeleted, descriptor);
if (registeredObject == null) {
Object primaryKey = descriptor.getObjectBuilder().extractPrimaryKeyFromObject(toBeDeleted, this);
DoesExistQuery existQuery = descriptor.getQueryManager().getDoesExistQuery();
existQuery = (DoesExistQuery)existQuery.clone();
existQuery.setObject(toBeDeleted);
existQuery.setPrimaryKey(primaryKey);
existQuery.setDescriptor(descriptor);
existQuery.setIsExecutionClone(true);
existQuery.setCheckCacheFirst(true);
if (((Boolean)executeQuery(existQuery)).booleanValue()){
throw new IllegalArgumentException(ExceptionLocalization.buildMessage("cannot_remove_detatched_entity", new Object[]{toBeDeleted}));
}//else, it is a new or previously deleted object that should be ignored (and delete should cascade)
} else {
//fire events only if this is a managed object
if (descriptor.getEventManager().hasAnyEventListeners()) {
org.eclipse.persistence.descriptors.DescriptorEvent event = new org.eclipse.persistence.descriptors.DescriptorEvent(toBeDeleted);
event.setEventCode(DescriptorEventManager.PreRemoveEvent);
event.setSession(this);
descriptor.getEventManager().executeEvent(event);
}
if (hasNewObjects() && getNewObjectsCloneToOriginal().containsKey(registeredObject)){
unregisterObject(registeredObject, DescriptorIterator.NoCascading);
} else {
getDeletedObjects().put(toBeDeleted, toBeDeleted);
}
}
descriptor.getObjectBuilder().cascadePerformRemove(toBeDeleted, this, visitedObjects);
} |
17,557 | 0 | /**
* This method is used internally to clone a map that holds the persistenceContexts. A weak map is returned if ReferenceMode is weak.
*
*/ | protected Map cloneMap(Map map){
// bug 270413. This method is needed to avoid the class cast exception when the reference mode is weak.
if (this.referenceMode != null && this.referenceMode != ReferenceMode.HARD) return (IdentityWeakHashMap)((IdentityWeakHashMap)map).clone();
return (IdentityHashMap)((IdentityHashMap)map).clone();
} | NONSATD | true | protected Map cloneMap(Map map){
// bug 270413. This method is needed to avoid the class cast exception when the reference mode is weak.
if (this.referenceMode != null && this.referenceMode != ReferenceMode.HARD) return (IdentityWeakHashMap)((IdentityWeakHashMap)map).clone();
return (IdentityHashMap)((IdentityHashMap)map).clone();
} | protected Map cloneMap(Map map){
// bug 270413. This method is needed to avoid the class cast exception when the reference mode is weak.
if (this.referenceMode != null && this.referenceMode != ReferenceMode.HARD) return (IdentityWeakHashMap)((IdentityWeakHashMap)map).clone();
return (IdentityHashMap)((IdentityHashMap)map).clone();
} | protected Map cloneMap(Map map){
// bug 270413. This method is needed to avoid the class cast exception when the reference mode is weak.
if (this.referenceMode != null && this.referenceMode != ReferenceMode.HARD) return (IdentityWeakHashMap)((IdentityWeakHashMap)map).clone();
return (IdentityHashMap)((IdentityHashMap)map).clone();
} |
17,557 | 1 | // bug 270413. This method is needed to avoid the class cast exception when the reference mode is weak. | protected Map cloneMap(Map map){
// bug 270413. This method is needed to avoid the class cast exception when the reference mode is weak.
if (this.referenceMode != null && this.referenceMode != ReferenceMode.HARD) return (IdentityWeakHashMap)((IdentityWeakHashMap)map).clone();
return (IdentityHashMap)((IdentityHashMap)map).clone();
} | DEFECT | true | protected Map cloneMap(Map map){
// bug 270413. This method is needed to avoid the class cast exception when the reference mode is weak.
if (this.referenceMode != null && this.referenceMode != ReferenceMode.HARD) return (IdentityWeakHashMap)((IdentityWeakHashMap)map).clone();
return (IdentityHashMap)((IdentityHashMap)map).clone(); | protected Map cloneMap(Map map){
// bug 270413. This method is needed to avoid the class cast exception when the reference mode is weak.
if (this.referenceMode != null && this.referenceMode != ReferenceMode.HARD) return (IdentityWeakHashMap)((IdentityWeakHashMap)map).clone();
return (IdentityHashMap)((IdentityHashMap)map).clone();
} | protected Map cloneMap(Map map){
// bug 270413. This method is needed to avoid the class cast exception when the reference mode is weak.
if (this.referenceMode != null && this.referenceMode != ReferenceMode.HARD) return (IdentityWeakHashMap)((IdentityWeakHashMap)map).clone();
return (IdentityHashMap)((IdentityHashMap)map).clone();
} |
33,945 | 0 | /** Returns true if string s is letters only.
*
* NOTE: This should handle i18n version to support European characters, etc.
* since it now uses Character.isLetter()
*/ | public static boolean isAlphabetic(String s) {
if (isEmpty(s)) return defaultEmptyOK;
// Search through string's characters one by one
// until we find a non-alphabetic character.
// When we do, return false; if we don't, return true.
for (int i = 0; i < s.length(); i++) {
// Check that current character is letter.
char c = s.charAt(i);
if (!isLetter(c))
return false;
}
// All characters are letters.
return true;
} | DESIGN | true | public static boolean isAlphabetic(String s) {
if (isEmpty(s)) return defaultEmptyOK;
// Search through string's characters one by one
// until we find a non-alphabetic character.
// When we do, return false; if we don't, return true.
for (int i = 0; i < s.length(); i++) {
// Check that current character is letter.
char c = s.charAt(i);
if (!isLetter(c))
return false;
}
// All characters are letters.
return true;
} | public static boolean isAlphabetic(String s) {
if (isEmpty(s)) return defaultEmptyOK;
// Search through string's characters one by one
// until we find a non-alphabetic character.
// When we do, return false; if we don't, return true.
for (int i = 0; i < s.length(); i++) {
// Check that current character is letter.
char c = s.charAt(i);
if (!isLetter(c))
return false;
}
// All characters are letters.
return true;
} | public static boolean isAlphabetic(String s) {
if (isEmpty(s)) return defaultEmptyOK;
// Search through string's characters one by one
// until we find a non-alphabetic character.
// When we do, return false; if we don't, return true.
for (int i = 0; i < s.length(); i++) {
// Check that current character is letter.
char c = s.charAt(i);
if (!isLetter(c))
return false;
}
// All characters are letters.
return true;
} |
33,945 | 1 | // Search through string's characters one by one
// until we find a non-alphabetic character.
// When we do, return false; if we don't, return true. | public static boolean isAlphabetic(String s) {
if (isEmpty(s)) return defaultEmptyOK;
// Search through string's characters one by one
// until we find a non-alphabetic character.
// When we do, return false; if we don't, return true.
for (int i = 0; i < s.length(); i++) {
// Check that current character is letter.
char c = s.charAt(i);
if (!isLetter(c))
return false;
}
// All characters are letters.
return true;
} | NONSATD | true | public static boolean isAlphabetic(String s) {
if (isEmpty(s)) return defaultEmptyOK;
// Search through string's characters one by one
// until we find a non-alphabetic character.
// When we do, return false; if we don't, return true.
for (int i = 0; i < s.length(); i++) {
// Check that current character is letter. | public static boolean isAlphabetic(String s) {
if (isEmpty(s)) return defaultEmptyOK;
// Search through string's characters one by one
// until we find a non-alphabetic character.
// When we do, return false; if we don't, return true.
for (int i = 0; i < s.length(); i++) {
// Check that current character is letter.
char c = s.charAt(i);
if (!isLetter(c))
return false;
}
// All characters are letters.
return true;
} | public static boolean isAlphabetic(String s) {
if (isEmpty(s)) return defaultEmptyOK;
// Search through string's characters one by one
// until we find a non-alphabetic character.
// When we do, return false; if we don't, return true.
for (int i = 0; i < s.length(); i++) {
// Check that current character is letter.
char c = s.charAt(i);
if (!isLetter(c))
return false;
}
// All characters are letters.
return true;
} |
33,945 | 2 | // Check that current character is letter. | public static boolean isAlphabetic(String s) {
if (isEmpty(s)) return defaultEmptyOK;
// Search through string's characters one by one
// until we find a non-alphabetic character.
// When we do, return false; if we don't, return true.
for (int i = 0; i < s.length(); i++) {
// Check that current character is letter.
char c = s.charAt(i);
if (!isLetter(c))
return false;
}
// All characters are letters.
return true;
} | NONSATD | true | // When we do, return false; if we don't, return true.
for (int i = 0; i < s.length(); i++) {
// Check that current character is letter.
char c = s.charAt(i);
if (!isLetter(c)) | public static boolean isAlphabetic(String s) {
if (isEmpty(s)) return defaultEmptyOK;
// Search through string's characters one by one
// until we find a non-alphabetic character.
// When we do, return false; if we don't, return true.
for (int i = 0; i < s.length(); i++) {
// Check that current character is letter.
char c = s.charAt(i);
if (!isLetter(c))
return false;
}
// All characters are letters.
return true;
} | public static boolean isAlphabetic(String s) {
if (isEmpty(s)) return defaultEmptyOK;
// Search through string's characters one by one
// until we find a non-alphabetic character.
// When we do, return false; if we don't, return true.
for (int i = 0; i < s.length(); i++) {
// Check that current character is letter.
char c = s.charAt(i);
if (!isLetter(c))
return false;
}
// All characters are letters.
return true;
} |
33,945 | 3 | // All characters are letters. | public static boolean isAlphabetic(String s) {
if (isEmpty(s)) return defaultEmptyOK;
// Search through string's characters one by one
// until we find a non-alphabetic character.
// When we do, return false; if we don't, return true.
for (int i = 0; i < s.length(); i++) {
// Check that current character is letter.
char c = s.charAt(i);
if (!isLetter(c))
return false;
}
// All characters are letters.
return true;
} | NONSATD | true | return false;
}
// All characters are letters.
return true;
} | if (isEmpty(s)) return defaultEmptyOK;
// Search through string's characters one by one
// until we find a non-alphabetic character.
// When we do, return false; if we don't, return true.
for (int i = 0; i < s.length(); i++) {
// Check that current character is letter.
char c = s.charAt(i);
if (!isLetter(c))
return false;
}
// All characters are letters.
return true;
} | public static boolean isAlphabetic(String s) {
if (isEmpty(s)) return defaultEmptyOK;
// Search through string's characters one by one
// until we find a non-alphabetic character.
// When we do, return false; if we don't, return true.
for (int i = 0; i < s.length(); i++) {
// Check that current character is letter.
char c = s.charAt(i);
if (!isLetter(c))
return false;
}
// All characters are letters.
return true;
} |
9,374 | 0 | // TODO Your code.. | @Override
public void close() {
logger.info("CLOSE ZookeeperRiver");
// TODO Your code..
} | IMPLEMENTATION | true | public void close() {
logger.info("CLOSE ZookeeperRiver");
// TODO Your code..
} | @Override
public void close() {
logger.info("CLOSE ZookeeperRiver");
// TODO Your code..
} | @Override
public void close() {
logger.info("CLOSE ZookeeperRiver");
// TODO Your code..
} |
9,375 | 0 | // TODO Your code.. | @Override
public void run() {
logger.info("START ZookeeperRiverLogic: " + client.toString());
// TODO Your code..
} | IMPLEMENTATION | true | public void run() {
logger.info("START ZookeeperRiverLogic: " + client.toString());
// TODO Your code..
} | @Override
public void run() {
logger.info("START ZookeeperRiverLogic: " + client.toString());
// TODO Your code..
} | @Override
public void run() {
logger.info("START ZookeeperRiverLogic: " + client.toString());
// TODO Your code..
} |
33,952 | 0 | /**
* Check if the replica count per ts matches the expected, returns true if it does within
* timeoutMs, false otherwise.
* TODO(Rahul): follow similar style for this type of function will do with affinitized
* leaders tests.
* @param timeoutMs number of milliseconds before timing out.
* @param table the table to wait for load balancing.
* @param replicaMapExpected the expected map between cluster uuid and live, read replica count.
* @return true if the read only replica count for the table matches the expected within the
* expected time frame, false otherwise.
*/ | public boolean waitForExpectedReplicaMap(final long timeoutMs, YBTable table,
Map<String, List<List<Integer>>> replicaMapExpected) {
Condition replicaMapCondition = new ReplicaMapCondition(table, replicaMapExpected, timeoutMs);
return waitForCondition(replicaMapCondition, timeoutMs);
} | DESIGN | true | public boolean waitForExpectedReplicaMap(final long timeoutMs, YBTable table,
Map<String, List<List<Integer>>> replicaMapExpected) {
Condition replicaMapCondition = new ReplicaMapCondition(table, replicaMapExpected, timeoutMs);
return waitForCondition(replicaMapCondition, timeoutMs);
} | public boolean waitForExpectedReplicaMap(final long timeoutMs, YBTable table,
Map<String, List<List<Integer>>> replicaMapExpected) {
Condition replicaMapCondition = new ReplicaMapCondition(table, replicaMapExpected, timeoutMs);
return waitForCondition(replicaMapCondition, timeoutMs);
} | public boolean waitForExpectedReplicaMap(final long timeoutMs, YBTable table,
Map<String, List<List<Integer>>> replicaMapExpected) {
Condition replicaMapCondition = new ReplicaMapCondition(table, replicaMapExpected, timeoutMs);
return waitForCondition(replicaMapCondition, timeoutMs);
} |
9,378 | 0 | /**
* Invokes {@link SourceTransferHandler#transferData(PushSourceStream)} on
* {@link #transferHandler} for each of <tt>pkts</tt> in order to
* consecutively push them out of/make them available outside this
* <tt>PushSourceStream</tt>.
*
* @param pkts the set of <tt>RawPacket</tt>s to push out of this
* <tt>PushSourceStream</tt>
*/ | private void transferData(RawPacket[] pkts)
{
for (int i = 0; i < pkts.length; i++)
{
RawPacket pkt = pkts[i];
pkts[i] = null;
if (pkt != null)
{
if (pkt.isInvalid())
{
/*
* Return pkt to the pool because it is invalid and,
* consequently, will not be made available to reading.
*/
poolRawPacket(pkt);
}
else
{
RawPacket oldPkt;
synchronized (pktSyncRoot)
{
oldPkt = this.pkt;
this.pkt = pkt;
}
if (oldPkt != null)
{
/*
* Return oldPkt to the pool because it was made
* available to reading and it was not read.
*/
poolRawPacket(oldPkt);
}
if (transferHandler != null && !closed)
{
try
{
transferHandler.transferData(this);
}
catch (Throwable t)
{
// XXX We cannot allow transferHandler to kill us.
if (t instanceof InterruptedException)
{
Thread.currentThread().interrupt();
}
else if (t instanceof ThreadDeath)
{
throw (ThreadDeath) t;
}
else
{
logger.warn(
"An RTP packet may have not been fully"
+ " handled.",
t);
}
}
}
}
}
}
} | NONSATD | true | private void transferData(RawPacket[] pkts)
{
for (int i = 0; i < pkts.length; i++)
{
RawPacket pkt = pkts[i];
pkts[i] = null;
if (pkt != null)
{
if (pkt.isInvalid())
{
/*
* Return pkt to the pool because it is invalid and,
* consequently, will not be made available to reading.
*/
poolRawPacket(pkt);
}
else
{
RawPacket oldPkt;
synchronized (pktSyncRoot)
{
oldPkt = this.pkt;
this.pkt = pkt;
}
if (oldPkt != null)
{
/*
* Return oldPkt to the pool because it was made
* available to reading and it was not read.
*/
poolRawPacket(oldPkt);
}
if (transferHandler != null && !closed)
{
try
{
transferHandler.transferData(this);
}
catch (Throwable t)
{
// XXX We cannot allow transferHandler to kill us.
if (t instanceof InterruptedException)
{
Thread.currentThread().interrupt();
}
else if (t instanceof ThreadDeath)
{
throw (ThreadDeath) t;
}
else
{
logger.warn(
"An RTP packet may have not been fully"
+ " handled.",
t);
}
}
}
}
}
}
} | private void transferData(RawPacket[] pkts)
{
for (int i = 0; i < pkts.length; i++)
{
RawPacket pkt = pkts[i];
pkts[i] = null;
if (pkt != null)
{
if (pkt.isInvalid())
{
/*
* Return pkt to the pool because it is invalid and,
* consequently, will not be made available to reading.
*/
poolRawPacket(pkt);
}
else
{
RawPacket oldPkt;
synchronized (pktSyncRoot)
{
oldPkt = this.pkt;
this.pkt = pkt;
}
if (oldPkt != null)
{
/*
* Return oldPkt to the pool because it was made
* available to reading and it was not read.
*/
poolRawPacket(oldPkt);
}
if (transferHandler != null && !closed)
{
try
{
transferHandler.transferData(this);
}
catch (Throwable t)
{
// XXX We cannot allow transferHandler to kill us.
if (t instanceof InterruptedException)
{
Thread.currentThread().interrupt();
}
else if (t instanceof ThreadDeath)
{
throw (ThreadDeath) t;
}
else
{
logger.warn(
"An RTP packet may have not been fully"
+ " handled.",
t);
}
}
}
}
}
}
} | private void transferData(RawPacket[] pkts)
{
for (int i = 0; i < pkts.length; i++)
{
RawPacket pkt = pkts[i];
pkts[i] = null;
if (pkt != null)
{
if (pkt.isInvalid())
{
/*
* Return pkt to the pool because it is invalid and,
* consequently, will not be made available to reading.
*/
poolRawPacket(pkt);
}
else
{
RawPacket oldPkt;
synchronized (pktSyncRoot)
{
oldPkt = this.pkt;
this.pkt = pkt;
}
if (oldPkt != null)
{
/*
* Return oldPkt to the pool because it was made
* available to reading and it was not read.
*/
poolRawPacket(oldPkt);
}
if (transferHandler != null && !closed)
{
try
{
transferHandler.transferData(this);
}
catch (Throwable t)
{
// XXX We cannot allow transferHandler to kill us.
if (t instanceof InterruptedException)
{
Thread.currentThread().interrupt();
}
else if (t instanceof ThreadDeath)
{
throw (ThreadDeath) t;
}
else
{
logger.warn(
"An RTP packet may have not been fully"
+ " handled.",
t);
}
}
}
}
}
}
} |
9,378 | 1 | /*
* Return pkt to the pool because it is invalid and,
* consequently, will not be made available to reading.
*/ | private void transferData(RawPacket[] pkts)
{
for (int i = 0; i < pkts.length; i++)
{
RawPacket pkt = pkts[i];
pkts[i] = null;
if (pkt != null)
{
if (pkt.isInvalid())
{
/*
* Return pkt to the pool because it is invalid and,
* consequently, will not be made available to reading.
*/
poolRawPacket(pkt);
}
else
{
RawPacket oldPkt;
synchronized (pktSyncRoot)
{
oldPkt = this.pkt;
this.pkt = pkt;
}
if (oldPkt != null)
{
/*
* Return oldPkt to the pool because it was made
* available to reading and it was not read.
*/
poolRawPacket(oldPkt);
}
if (transferHandler != null && !closed)
{
try
{
transferHandler.transferData(this);
}
catch (Throwable t)
{
// XXX We cannot allow transferHandler to kill us.
if (t instanceof InterruptedException)
{
Thread.currentThread().interrupt();
}
else if (t instanceof ThreadDeath)
{
throw (ThreadDeath) t;
}
else
{
logger.warn(
"An RTP packet may have not been fully"
+ " handled.",
t);
}
}
}
}
}
}
} | NONSATD | true | if (pkt.isInvalid())
{
/*
* Return pkt to the pool because it is invalid and,
* consequently, will not be made available to reading.
*/
poolRawPacket(pkt);
} | private void transferData(RawPacket[] pkts)
{
for (int i = 0; i < pkts.length; i++)
{
RawPacket pkt = pkts[i];
pkts[i] = null;
if (pkt != null)
{
if (pkt.isInvalid())
{
/*
* Return pkt to the pool because it is invalid and,
* consequently, will not be made available to reading.
*/
poolRawPacket(pkt);
}
else
{
RawPacket oldPkt;
synchronized (pktSyncRoot)
{
oldPkt = this.pkt;
this.pkt = pkt;
} | private void transferData(RawPacket[] pkts)
{
for (int i = 0; i < pkts.length; i++)
{
RawPacket pkt = pkts[i];
pkts[i] = null;
if (pkt != null)
{
if (pkt.isInvalid())
{
/*
* Return pkt to the pool because it is invalid and,
* consequently, will not be made available to reading.
*/
poolRawPacket(pkt);
}
else
{
RawPacket oldPkt;
synchronized (pktSyncRoot)
{
oldPkt = this.pkt;
this.pkt = pkt;
}
if (oldPkt != null)
{
/*
* Return oldPkt to the pool because it was made
* available to reading and it was not read.
*/
poolRawPacket(oldPkt);
}
if (transferHandler != null && !closed)
{ |
9,378 | 2 | /*
* Return oldPkt to the pool because it was made
* available to reading and it was not read.
*/ | private void transferData(RawPacket[] pkts)
{
for (int i = 0; i < pkts.length; i++)
{
RawPacket pkt = pkts[i];
pkts[i] = null;
if (pkt != null)
{
if (pkt.isInvalid())
{
/*
* Return pkt to the pool because it is invalid and,
* consequently, will not be made available to reading.
*/
poolRawPacket(pkt);
}
else
{
RawPacket oldPkt;
synchronized (pktSyncRoot)
{
oldPkt = this.pkt;
this.pkt = pkt;
}
if (oldPkt != null)
{
/*
* Return oldPkt to the pool because it was made
* available to reading and it was not read.
*/
poolRawPacket(oldPkt);
}
if (transferHandler != null && !closed)
{
try
{
transferHandler.transferData(this);
}
catch (Throwable t)
{
// XXX We cannot allow transferHandler to kill us.
if (t instanceof InterruptedException)
{
Thread.currentThread().interrupt();
}
else if (t instanceof ThreadDeath)
{
throw (ThreadDeath) t;
}
else
{
logger.warn(
"An RTP packet may have not been fully"
+ " handled.",
t);
}
}
}
}
}
}
} | NONSATD | true | if (pkt.isInvalid())
{
/*
* Return pkt to the pool because it is invalid and,
* consequently, will not be made available to reading.
*/
poolRawPacket(pkt);
} | private void transferData(RawPacket[] pkts)
{
for (int i = 0; i < pkts.length; i++)
{
RawPacket pkt = pkts[i];
pkts[i] = null;
if (pkt != null)
{
if (pkt.isInvalid())
{
/*
* Return pkt to the pool because it is invalid and,
* consequently, will not be made available to reading.
*/
poolRawPacket(pkt);
}
else
{
RawPacket oldPkt;
synchronized (pktSyncRoot)
{
oldPkt = this.pkt;
this.pkt = pkt;
} | private void transferData(RawPacket[] pkts)
{
for (int i = 0; i < pkts.length; i++)
{
RawPacket pkt = pkts[i];
pkts[i] = null;
if (pkt != null)
{
if (pkt.isInvalid())
{
/*
* Return pkt to the pool because it is invalid and,
* consequently, will not be made available to reading.
*/
poolRawPacket(pkt);
}
else
{
RawPacket oldPkt;
synchronized (pktSyncRoot)
{
oldPkt = this.pkt;
this.pkt = pkt;
}
if (oldPkt != null)
{
/*
* Return oldPkt to the pool because it was made
* available to reading and it was not read.
*/
poolRawPacket(oldPkt);
}
if (transferHandler != null && !closed)
{ |
9,378 | 3 | // XXX We cannot allow transferHandler to kill us. | private void transferData(RawPacket[] pkts)
{
for (int i = 0; i < pkts.length; i++)
{
RawPacket pkt = pkts[i];
pkts[i] = null;
if (pkt != null)
{
if (pkt.isInvalid())
{
/*
* Return pkt to the pool because it is invalid and,
* consequently, will not be made available to reading.
*/
poolRawPacket(pkt);
}
else
{
RawPacket oldPkt;
synchronized (pktSyncRoot)
{
oldPkt = this.pkt;
this.pkt = pkt;
}
if (oldPkt != null)
{
/*
* Return oldPkt to the pool because it was made
* available to reading and it was not read.
*/
poolRawPacket(oldPkt);
}
if (transferHandler != null && !closed)
{
try
{
transferHandler.transferData(this);
}
catch (Throwable t)
{
// XXX We cannot allow transferHandler to kill us.
if (t instanceof InterruptedException)
{
Thread.currentThread().interrupt();
}
else if (t instanceof ThreadDeath)
{
throw (ThreadDeath) t;
}
else
{
logger.warn(
"An RTP packet may have not been fully"
+ " handled.",
t);
}
}
}
}
}
}
} | DESIGN | true | catch (Throwable t)
{
// XXX We cannot allow transferHandler to kill us.
if (t instanceof InterruptedException)
{ | poolRawPacket(oldPkt);
}
if (transferHandler != null && !closed)
{
try
{
transferHandler.transferData(this);
}
catch (Throwable t)
{
// XXX We cannot allow transferHandler to kill us.
if (t instanceof InterruptedException)
{
Thread.currentThread().interrupt();
}
else if (t instanceof ThreadDeath)
{
throw (ThreadDeath) t;
}
else
{ | {
oldPkt = this.pkt;
this.pkt = pkt;
}
if (oldPkt != null)
{
/*
* Return oldPkt to the pool because it was made
* available to reading and it was not read.
*/
poolRawPacket(oldPkt);
}
if (transferHandler != null && !closed)
{
try
{
transferHandler.transferData(this);
}
catch (Throwable t)
{
// XXX We cannot allow transferHandler to kill us.
if (t instanceof InterruptedException)
{
Thread.currentThread().interrupt();
}
else if (t instanceof ThreadDeath)
{
throw (ThreadDeath) t;
}
else
{
logger.warn(
"An RTP packet may have not been fully"
+ " handled.",
t);
}
}
}
}
}
} |
25,762 | 0 | // 1. create connect context | @Test
public void testCreateDbAndTable() throws Exception {
// 1. create connect context
ConnectContext ctx = UtFrameUtils.createDefaultCtx();
// 2. create database db1
String createDbStmtStr = "create database db1;";
CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseAndAnalyzeStmt(createDbStmtStr, ctx);
Catalog.getCurrentCatalog().createDb(createDbStmt);
System.out.println(Catalog.getCurrentCatalog().getDbNames());
// 3. create table tbl1
String createTblStmtStr = "create table db1.tbl1(k1 int) distributed by hash(k1) buckets 3 properties('replication_num' = '3'," +
"'colocate_with' = 'g1');";
CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseAndAnalyzeStmt(createTblStmtStr, ctx);
Catalog.getCurrentCatalog().createTable(createTableStmt);
// must set replicas' path hash, or the tablet scheduler won't work
updateReplicaPathHash();
// 4. get and test the created db and table
Database db = Catalog.getCurrentCatalog().getDbNullable("default_cluster:db1");
Assert.assertNotNull(db);
OlapTable tbl = (OlapTable) db.getTableNullable("tbl1");
tbl.readLock();
try {
Assert.assertNotNull(tbl);
System.out.println(tbl.getName());
Assert.assertEquals("Doris", tbl.getEngine());
Assert.assertEquals(1, tbl.getBaseSchema().size());
} finally {
tbl.readUnlock();
}
// 5. process a schema change job
String alterStmtStr = "alter table db1.tbl1 add column k2 int default '1'";
AlterTableStmt alterTableStmt = (AlterTableStmt) UtFrameUtils.parseAndAnalyzeStmt(alterStmtStr, ctx);
Catalog.getCurrentCatalog().getAlterInstance().processAlterTable(alterTableStmt);
// 6. check alter job
Map<Long, AlterJobV2> alterJobs = Catalog.getCurrentCatalog().getSchemaChangeHandler().getAlterJobsV2();
Assert.assertEquals(1, alterJobs.size());
for (AlterJobV2 alterJobV2 : alterJobs.values()) {
while (!alterJobV2.getJobState().isFinalState()) {
System.out.println("alter job " + alterJobV2.getJobId() + " is running. state: " + alterJobV2.getJobState());
Thread.sleep(1000);
}
System.out.println("alter job " + alterJobV2.getJobId() + " is done. state: " + alterJobV2.getJobState());
Assert.assertEquals(AlterJobV2.JobState.FINISHED, alterJobV2.getJobState());
}
OlapTable tbl1 = (OlapTable) db.getTableNullable("tbl1");
tbl1.readLock();
try {
Assert.assertEquals(2, tbl1.getBaseSchema().size());
String baseIndexName = tbl1.getIndexNameById(tbl.getBaseIndexId());
Assert.assertEquals(baseIndexName, tbl1.getName());
MaterializedIndexMeta indexMeta = tbl1.getIndexMetaByIndexId(tbl1.getBaseIndexId());
Assert.assertNotNull(indexMeta);
} finally {
tbl1.readUnlock();
}
// 7. query
// TODO: we can not process real query for now. So it has to be a explain query
String queryStr = "explain select * from db1.tbl1";
String a = UtFrameUtils.getSQLPlanOrErrorMsg(ctx, queryStr);
System.out.println(a);
StmtExecutor stmtExecutor = new StmtExecutor(ctx, queryStr);
stmtExecutor.execute();
Planner planner = stmtExecutor.planner();
List<PlanFragment> fragments = planner.getFragments();
Assert.assertEquals(2, fragments.size());
PlanFragment fragment = fragments.get(1);
Assert.assertTrue(fragment.getPlanRoot() instanceof OlapScanNode);
Assert.assertEquals(0, fragment.getChildren().size());
// test show backends;
BackendsProcDir dir = new BackendsProcDir(Catalog.getCurrentSystemInfo());
ProcResult result = dir.fetchResult();
Assert.assertEquals(BackendsProcDir.TITLE_NAMES.size(), result.getColumnNames().size());
Assert.assertEquals("{\"location\" : \"default\"}", result.getRows().get(0).get(19));
Assert.assertEquals("{\"lastSuccessReportTabletsTime\":\"N/A\",\"lastStreamLoadTime\":-1}",
result.getRows().get(0).get(BackendsProcDir.TITLE_NAMES.size() - 1));
} | NONSATD | true | @Test
public void testCreateDbAndTable() throws Exception {
// 1. create connect context
ConnectContext ctx = UtFrameUtils.createDefaultCtx();
// 2. create database db1 | @Test
public void testCreateDbAndTable() throws Exception {
// 1. create connect context
ConnectContext ctx = UtFrameUtils.createDefaultCtx();
// 2. create database db1
String createDbStmtStr = "create database db1;";
CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseAndAnalyzeStmt(createDbStmtStr, ctx);
Catalog.getCurrentCatalog().createDb(createDbStmt);
System.out.println(Catalog.getCurrentCatalog().getDbNames());
// 3. create table tbl1
String createTblStmtStr = "create table db1.tbl1(k1 int) distributed by hash(k1) buckets 3 properties('replication_num' = '3'," +
"'colocate_with' = 'g1');";
CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseAndAnalyzeStmt(createTblStmtStr, ctx); | @Test
public void testCreateDbAndTable() throws Exception {
// 1. create connect context
ConnectContext ctx = UtFrameUtils.createDefaultCtx();
// 2. create database db1
String createDbStmtStr = "create database db1;";
CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseAndAnalyzeStmt(createDbStmtStr, ctx);
Catalog.getCurrentCatalog().createDb(createDbStmt);
System.out.println(Catalog.getCurrentCatalog().getDbNames());
// 3. create table tbl1
String createTblStmtStr = "create table db1.tbl1(k1 int) distributed by hash(k1) buckets 3 properties('replication_num' = '3'," +
"'colocate_with' = 'g1');";
CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseAndAnalyzeStmt(createTblStmtStr, ctx);
Catalog.getCurrentCatalog().createTable(createTableStmt);
// must set replicas' path hash, or the tablet scheduler won't work
updateReplicaPathHash();
// 4. get and test the created db and table
Database db = Catalog.getCurrentCatalog().getDbNullable("default_cluster:db1");
Assert.assertNotNull(db);
OlapTable tbl = (OlapTable) db.getTableNullable("tbl1");
tbl.readLock();
try {
Assert.assertNotNull(tbl); |
25,762 | 1 | // 2. create database db1 | @Test
public void testCreateDbAndTable() throws Exception {
// 1. create connect context
ConnectContext ctx = UtFrameUtils.createDefaultCtx();
// 2. create database db1
String createDbStmtStr = "create database db1;";
CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseAndAnalyzeStmt(createDbStmtStr, ctx);
Catalog.getCurrentCatalog().createDb(createDbStmt);
System.out.println(Catalog.getCurrentCatalog().getDbNames());
// 3. create table tbl1
String createTblStmtStr = "create table db1.tbl1(k1 int) distributed by hash(k1) buckets 3 properties('replication_num' = '3'," +
"'colocate_with' = 'g1');";
CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseAndAnalyzeStmt(createTblStmtStr, ctx);
Catalog.getCurrentCatalog().createTable(createTableStmt);
// must set replicas' path hash, or the tablet scheduler won't work
updateReplicaPathHash();
// 4. get and test the created db and table
Database db = Catalog.getCurrentCatalog().getDbNullable("default_cluster:db1");
Assert.assertNotNull(db);
OlapTable tbl = (OlapTable) db.getTableNullable("tbl1");
tbl.readLock();
try {
Assert.assertNotNull(tbl);
System.out.println(tbl.getName());
Assert.assertEquals("Doris", tbl.getEngine());
Assert.assertEquals(1, tbl.getBaseSchema().size());
} finally {
tbl.readUnlock();
}
// 5. process a schema change job
String alterStmtStr = "alter table db1.tbl1 add column k2 int default '1'";
AlterTableStmt alterTableStmt = (AlterTableStmt) UtFrameUtils.parseAndAnalyzeStmt(alterStmtStr, ctx);
Catalog.getCurrentCatalog().getAlterInstance().processAlterTable(alterTableStmt);
// 6. check alter job
Map<Long, AlterJobV2> alterJobs = Catalog.getCurrentCatalog().getSchemaChangeHandler().getAlterJobsV2();
Assert.assertEquals(1, alterJobs.size());
for (AlterJobV2 alterJobV2 : alterJobs.values()) {
while (!alterJobV2.getJobState().isFinalState()) {
System.out.println("alter job " + alterJobV2.getJobId() + " is running. state: " + alterJobV2.getJobState());
Thread.sleep(1000);
}
System.out.println("alter job " + alterJobV2.getJobId() + " is done. state: " + alterJobV2.getJobState());
Assert.assertEquals(AlterJobV2.JobState.FINISHED, alterJobV2.getJobState());
}
OlapTable tbl1 = (OlapTable) db.getTableNullable("tbl1");
tbl1.readLock();
try {
Assert.assertEquals(2, tbl1.getBaseSchema().size());
String baseIndexName = tbl1.getIndexNameById(tbl.getBaseIndexId());
Assert.assertEquals(baseIndexName, tbl1.getName());
MaterializedIndexMeta indexMeta = tbl1.getIndexMetaByIndexId(tbl1.getBaseIndexId());
Assert.assertNotNull(indexMeta);
} finally {
tbl1.readUnlock();
}
// 7. query
// TODO: we can not process real query for now. So it has to be a explain query
String queryStr = "explain select * from db1.tbl1";
String a = UtFrameUtils.getSQLPlanOrErrorMsg(ctx, queryStr);
System.out.println(a);
StmtExecutor stmtExecutor = new StmtExecutor(ctx, queryStr);
stmtExecutor.execute();
Planner planner = stmtExecutor.planner();
List<PlanFragment> fragments = planner.getFragments();
Assert.assertEquals(2, fragments.size());
PlanFragment fragment = fragments.get(1);
Assert.assertTrue(fragment.getPlanRoot() instanceof OlapScanNode);
Assert.assertEquals(0, fragment.getChildren().size());
// test show backends;
BackendsProcDir dir = new BackendsProcDir(Catalog.getCurrentSystemInfo());
ProcResult result = dir.fetchResult();
Assert.assertEquals(BackendsProcDir.TITLE_NAMES.size(), result.getColumnNames().size());
Assert.assertEquals("{\"location\" : \"default\"}", result.getRows().get(0).get(19));
Assert.assertEquals("{\"lastSuccessReportTabletsTime\":\"N/A\",\"lastStreamLoadTime\":-1}",
result.getRows().get(0).get(BackendsProcDir.TITLE_NAMES.size() - 1));
} | NONSATD | true | // 1. create connect context
ConnectContext ctx = UtFrameUtils.createDefaultCtx();
// 2. create database db1
String createDbStmtStr = "create database db1;";
CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseAndAnalyzeStmt(createDbStmtStr, ctx); | @Test
public void testCreateDbAndTable() throws Exception {
// 1. create connect context
ConnectContext ctx = UtFrameUtils.createDefaultCtx();
// 2. create database db1
String createDbStmtStr = "create database db1;";
CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseAndAnalyzeStmt(createDbStmtStr, ctx);
Catalog.getCurrentCatalog().createDb(createDbStmt);
System.out.println(Catalog.getCurrentCatalog().getDbNames());
// 3. create table tbl1
String createTblStmtStr = "create table db1.tbl1(k1 int) distributed by hash(k1) buckets 3 properties('replication_num' = '3'," +
"'colocate_with' = 'g1');";
CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseAndAnalyzeStmt(createTblStmtStr, ctx);
Catalog.getCurrentCatalog().createTable(createTableStmt);
// must set replicas' path hash, or the tablet scheduler won't work | @Test
public void testCreateDbAndTable() throws Exception {
// 1. create connect context
ConnectContext ctx = UtFrameUtils.createDefaultCtx();
// 2. create database db1
String createDbStmtStr = "create database db1;";
CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseAndAnalyzeStmt(createDbStmtStr, ctx);
Catalog.getCurrentCatalog().createDb(createDbStmt);
System.out.println(Catalog.getCurrentCatalog().getDbNames());
// 3. create table tbl1
String createTblStmtStr = "create table db1.tbl1(k1 int) distributed by hash(k1) buckets 3 properties('replication_num' = '3'," +
"'colocate_with' = 'g1');";
CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseAndAnalyzeStmt(createTblStmtStr, ctx);
Catalog.getCurrentCatalog().createTable(createTableStmt);
// must set replicas' path hash, or the tablet scheduler won't work
updateReplicaPathHash();
// 4. get and test the created db and table
Database db = Catalog.getCurrentCatalog().getDbNullable("default_cluster:db1");
Assert.assertNotNull(db);
OlapTable tbl = (OlapTable) db.getTableNullable("tbl1");
tbl.readLock();
try {
Assert.assertNotNull(tbl);
System.out.println(tbl.getName());
Assert.assertEquals("Doris", tbl.getEngine()); |
25,762 | 2 | // 3. create table tbl1 | @Test
public void testCreateDbAndTable() throws Exception {
// 1. create connect context
ConnectContext ctx = UtFrameUtils.createDefaultCtx();
// 2. create database db1
String createDbStmtStr = "create database db1;";
CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseAndAnalyzeStmt(createDbStmtStr, ctx);
Catalog.getCurrentCatalog().createDb(createDbStmt);
System.out.println(Catalog.getCurrentCatalog().getDbNames());
// 3. create table tbl1
String createTblStmtStr = "create table db1.tbl1(k1 int) distributed by hash(k1) buckets 3 properties('replication_num' = '3'," +
"'colocate_with' = 'g1');";
CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseAndAnalyzeStmt(createTblStmtStr, ctx);
Catalog.getCurrentCatalog().createTable(createTableStmt);
// must set replicas' path hash, or the tablet scheduler won't work
updateReplicaPathHash();
// 4. get and test the created db and table
Database db = Catalog.getCurrentCatalog().getDbNullable("default_cluster:db1");
Assert.assertNotNull(db);
OlapTable tbl = (OlapTable) db.getTableNullable("tbl1");
tbl.readLock();
try {
Assert.assertNotNull(tbl);
System.out.println(tbl.getName());
Assert.assertEquals("Doris", tbl.getEngine());
Assert.assertEquals(1, tbl.getBaseSchema().size());
} finally {
tbl.readUnlock();
}
// 5. process a schema change job
String alterStmtStr = "alter table db1.tbl1 add column k2 int default '1'";
AlterTableStmt alterTableStmt = (AlterTableStmt) UtFrameUtils.parseAndAnalyzeStmt(alterStmtStr, ctx);
Catalog.getCurrentCatalog().getAlterInstance().processAlterTable(alterTableStmt);
// 6. check alter job
Map<Long, AlterJobV2> alterJobs = Catalog.getCurrentCatalog().getSchemaChangeHandler().getAlterJobsV2();
Assert.assertEquals(1, alterJobs.size());
for (AlterJobV2 alterJobV2 : alterJobs.values()) {
while (!alterJobV2.getJobState().isFinalState()) {
System.out.println("alter job " + alterJobV2.getJobId() + " is running. state: " + alterJobV2.getJobState());
Thread.sleep(1000);
}
System.out.println("alter job " + alterJobV2.getJobId() + " is done. state: " + alterJobV2.getJobState());
Assert.assertEquals(AlterJobV2.JobState.FINISHED, alterJobV2.getJobState());
}
OlapTable tbl1 = (OlapTable) db.getTableNullable("tbl1");
tbl1.readLock();
try {
Assert.assertEquals(2, tbl1.getBaseSchema().size());
String baseIndexName = tbl1.getIndexNameById(tbl.getBaseIndexId());
Assert.assertEquals(baseIndexName, tbl1.getName());
MaterializedIndexMeta indexMeta = tbl1.getIndexMetaByIndexId(tbl1.getBaseIndexId());
Assert.assertNotNull(indexMeta);
} finally {
tbl1.readUnlock();
}
// 7. query
// TODO: we can not process real query for now. So it has to be a explain query
String queryStr = "explain select * from db1.tbl1";
String a = UtFrameUtils.getSQLPlanOrErrorMsg(ctx, queryStr);
System.out.println(a);
StmtExecutor stmtExecutor = new StmtExecutor(ctx, queryStr);
stmtExecutor.execute();
Planner planner = stmtExecutor.planner();
List<PlanFragment> fragments = planner.getFragments();
Assert.assertEquals(2, fragments.size());
PlanFragment fragment = fragments.get(1);
Assert.assertTrue(fragment.getPlanRoot() instanceof OlapScanNode);
Assert.assertEquals(0, fragment.getChildren().size());
// test show backends;
BackendsProcDir dir = new BackendsProcDir(Catalog.getCurrentSystemInfo());
ProcResult result = dir.fetchResult();
Assert.assertEquals(BackendsProcDir.TITLE_NAMES.size(), result.getColumnNames().size());
Assert.assertEquals("{\"location\" : \"default\"}", result.getRows().get(0).get(19));
Assert.assertEquals("{\"lastSuccessReportTabletsTime\":\"N/A\",\"lastStreamLoadTime\":-1}",
result.getRows().get(0).get(BackendsProcDir.TITLE_NAMES.size() - 1));
} | NONSATD | true | Catalog.getCurrentCatalog().createDb(createDbStmt);
System.out.println(Catalog.getCurrentCatalog().getDbNames());
// 3. create table tbl1
String createTblStmtStr = "create table db1.tbl1(k1 int) distributed by hash(k1) buckets 3 properties('replication_num' = '3'," +
"'colocate_with' = 'g1');"; | @Test
public void testCreateDbAndTable() throws Exception {
// 1. create connect context
ConnectContext ctx = UtFrameUtils.createDefaultCtx();
// 2. create database db1
String createDbStmtStr = "create database db1;";
CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseAndAnalyzeStmt(createDbStmtStr, ctx);
Catalog.getCurrentCatalog().createDb(createDbStmt);
System.out.println(Catalog.getCurrentCatalog().getDbNames());
// 3. create table tbl1
String createTblStmtStr = "create table db1.tbl1(k1 int) distributed by hash(k1) buckets 3 properties('replication_num' = '3'," +
"'colocate_with' = 'g1');";
CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseAndAnalyzeStmt(createTblStmtStr, ctx);
Catalog.getCurrentCatalog().createTable(createTableStmt);
// must set replicas' path hash, or the tablet scheduler won't work
updateReplicaPathHash();
// 4. get and test the created db and table
Database db = Catalog.getCurrentCatalog().getDbNullable("default_cluster:db1");
Assert.assertNotNull(db);
OlapTable tbl = (OlapTable) db.getTableNullable("tbl1"); | @Test
public void testCreateDbAndTable() throws Exception {
// 1. create connect context
ConnectContext ctx = UtFrameUtils.createDefaultCtx();
// 2. create database db1
String createDbStmtStr = "create database db1;";
CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseAndAnalyzeStmt(createDbStmtStr, ctx);
Catalog.getCurrentCatalog().createDb(createDbStmt);
System.out.println(Catalog.getCurrentCatalog().getDbNames());
// 3. create table tbl1
String createTblStmtStr = "create table db1.tbl1(k1 int) distributed by hash(k1) buckets 3 properties('replication_num' = '3'," +
"'colocate_with' = 'g1');";
CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseAndAnalyzeStmt(createTblStmtStr, ctx);
Catalog.getCurrentCatalog().createTable(createTableStmt);
// must set replicas' path hash, or the tablet scheduler won't work
updateReplicaPathHash();
// 4. get and test the created db and table
Database db = Catalog.getCurrentCatalog().getDbNullable("default_cluster:db1");
Assert.assertNotNull(db);
OlapTable tbl = (OlapTable) db.getTableNullable("tbl1");
tbl.readLock();
try {
Assert.assertNotNull(tbl);
System.out.println(tbl.getName());
Assert.assertEquals("Doris", tbl.getEngine());
Assert.assertEquals(1, tbl.getBaseSchema().size());
} finally {
tbl.readUnlock();
}
// 5. process a schema change job |
25,762 | 3 | // must set replicas' path hash, or the tablet scheduler won't work | @Test
public void testCreateDbAndTable() throws Exception {
// 1. create connect context
ConnectContext ctx = UtFrameUtils.createDefaultCtx();
// 2. create database db1
String createDbStmtStr = "create database db1;";
CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseAndAnalyzeStmt(createDbStmtStr, ctx);
Catalog.getCurrentCatalog().createDb(createDbStmt);
System.out.println(Catalog.getCurrentCatalog().getDbNames());
// 3. create table tbl1
String createTblStmtStr = "create table db1.tbl1(k1 int) distributed by hash(k1) buckets 3 properties('replication_num' = '3'," +
"'colocate_with' = 'g1');";
CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseAndAnalyzeStmt(createTblStmtStr, ctx);
Catalog.getCurrentCatalog().createTable(createTableStmt);
// must set replicas' path hash, or the tablet scheduler won't work
updateReplicaPathHash();
// 4. get and test the created db and table
Database db = Catalog.getCurrentCatalog().getDbNullable("default_cluster:db1");
Assert.assertNotNull(db);
OlapTable tbl = (OlapTable) db.getTableNullable("tbl1");
tbl.readLock();
try {
Assert.assertNotNull(tbl);
System.out.println(tbl.getName());
Assert.assertEquals("Doris", tbl.getEngine());
Assert.assertEquals(1, tbl.getBaseSchema().size());
} finally {
tbl.readUnlock();
}
// 5. process a schema change job
String alterStmtStr = "alter table db1.tbl1 add column k2 int default '1'";
AlterTableStmt alterTableStmt = (AlterTableStmt) UtFrameUtils.parseAndAnalyzeStmt(alterStmtStr, ctx);
Catalog.getCurrentCatalog().getAlterInstance().processAlterTable(alterTableStmt);
// 6. check alter job
Map<Long, AlterJobV2> alterJobs = Catalog.getCurrentCatalog().getSchemaChangeHandler().getAlterJobsV2();
Assert.assertEquals(1, alterJobs.size());
for (AlterJobV2 alterJobV2 : alterJobs.values()) {
while (!alterJobV2.getJobState().isFinalState()) {
System.out.println("alter job " + alterJobV2.getJobId() + " is running. state: " + alterJobV2.getJobState());
Thread.sleep(1000);
}
System.out.println("alter job " + alterJobV2.getJobId() + " is done. state: " + alterJobV2.getJobState());
Assert.assertEquals(AlterJobV2.JobState.FINISHED, alterJobV2.getJobState());
}
OlapTable tbl1 = (OlapTable) db.getTableNullable("tbl1");
tbl1.readLock();
try {
Assert.assertEquals(2, tbl1.getBaseSchema().size());
String baseIndexName = tbl1.getIndexNameById(tbl.getBaseIndexId());
Assert.assertEquals(baseIndexName, tbl1.getName());
MaterializedIndexMeta indexMeta = tbl1.getIndexMetaByIndexId(tbl1.getBaseIndexId());
Assert.assertNotNull(indexMeta);
} finally {
tbl1.readUnlock();
}
// 7. query
// TODO: we can not process real query for now. So it has to be a explain query
String queryStr = "explain select * from db1.tbl1";
String a = UtFrameUtils.getSQLPlanOrErrorMsg(ctx, queryStr);
System.out.println(a);
StmtExecutor stmtExecutor = new StmtExecutor(ctx, queryStr);
stmtExecutor.execute();
Planner planner = stmtExecutor.planner();
List<PlanFragment> fragments = planner.getFragments();
Assert.assertEquals(2, fragments.size());
PlanFragment fragment = fragments.get(1);
Assert.assertTrue(fragment.getPlanRoot() instanceof OlapScanNode);
Assert.assertEquals(0, fragment.getChildren().size());
// test show backends;
BackendsProcDir dir = new BackendsProcDir(Catalog.getCurrentSystemInfo());
ProcResult result = dir.fetchResult();
Assert.assertEquals(BackendsProcDir.TITLE_NAMES.size(), result.getColumnNames().size());
Assert.assertEquals("{\"location\" : \"default\"}", result.getRows().get(0).get(19));
Assert.assertEquals("{\"lastSuccessReportTabletsTime\":\"N/A\",\"lastStreamLoadTime\":-1}",
result.getRows().get(0).get(BackendsProcDir.TITLE_NAMES.size() - 1));
} | NONSATD | true | CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseAndAnalyzeStmt(createTblStmtStr, ctx);
Catalog.getCurrentCatalog().createTable(createTableStmt);
// must set replicas' path hash, or the tablet scheduler won't work
updateReplicaPathHash();
// 4. get and test the created db and table | // 2. create database db1
String createDbStmtStr = "create database db1;";
CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseAndAnalyzeStmt(createDbStmtStr, ctx);
Catalog.getCurrentCatalog().createDb(createDbStmt);
System.out.println(Catalog.getCurrentCatalog().getDbNames());
// 3. create table tbl1
String createTblStmtStr = "create table db1.tbl1(k1 int) distributed by hash(k1) buckets 3 properties('replication_num' = '3'," +
"'colocate_with' = 'g1');";
CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseAndAnalyzeStmt(createTblStmtStr, ctx);
Catalog.getCurrentCatalog().createTable(createTableStmt);
// must set replicas' path hash, or the tablet scheduler won't work
updateReplicaPathHash();
// 4. get and test the created db and table
Database db = Catalog.getCurrentCatalog().getDbNullable("default_cluster:db1");
Assert.assertNotNull(db);
OlapTable tbl = (OlapTable) db.getTableNullable("tbl1");
tbl.readLock();
try {
Assert.assertNotNull(tbl);
System.out.println(tbl.getName());
Assert.assertEquals("Doris", tbl.getEngine()); | @Test
public void testCreateDbAndTable() throws Exception {
// 1. create connect context
ConnectContext ctx = UtFrameUtils.createDefaultCtx();
// 2. create database db1
String createDbStmtStr = "create database db1;";
CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseAndAnalyzeStmt(createDbStmtStr, ctx);
Catalog.getCurrentCatalog().createDb(createDbStmt);
System.out.println(Catalog.getCurrentCatalog().getDbNames());
// 3. create table tbl1
String createTblStmtStr = "create table db1.tbl1(k1 int) distributed by hash(k1) buckets 3 properties('replication_num' = '3'," +
"'colocate_with' = 'g1');";
CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseAndAnalyzeStmt(createTblStmtStr, ctx);
Catalog.getCurrentCatalog().createTable(createTableStmt);
// must set replicas' path hash, or the tablet scheduler won't work
updateReplicaPathHash();
// 4. get and test the created db and table
Database db = Catalog.getCurrentCatalog().getDbNullable("default_cluster:db1");
Assert.assertNotNull(db);
OlapTable tbl = (OlapTable) db.getTableNullable("tbl1");
tbl.readLock();
try {
Assert.assertNotNull(tbl);
System.out.println(tbl.getName());
Assert.assertEquals("Doris", tbl.getEngine());
Assert.assertEquals(1, tbl.getBaseSchema().size());
} finally {
tbl.readUnlock();
}
// 5. process a schema change job
String alterStmtStr = "alter table db1.tbl1 add column k2 int default '1'";
AlterTableStmt alterTableStmt = (AlterTableStmt) UtFrameUtils.parseAndAnalyzeStmt(alterStmtStr, ctx);
Catalog.getCurrentCatalog().getAlterInstance().processAlterTable(alterTableStmt);
// 6. check alter job
Map<Long, AlterJobV2> alterJobs = Catalog.getCurrentCatalog().getSchemaChangeHandler().getAlterJobsV2(); |
25,762 | 4 | // 4. get and test the created db and table | @Test
public void testCreateDbAndTable() throws Exception {
// 1. create connect context
ConnectContext ctx = UtFrameUtils.createDefaultCtx();
// 2. create database db1
String createDbStmtStr = "create database db1;";
CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseAndAnalyzeStmt(createDbStmtStr, ctx);
Catalog.getCurrentCatalog().createDb(createDbStmt);
System.out.println(Catalog.getCurrentCatalog().getDbNames());
// 3. create table tbl1
String createTblStmtStr = "create table db1.tbl1(k1 int) distributed by hash(k1) buckets 3 properties('replication_num' = '3'," +
"'colocate_with' = 'g1');";
CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseAndAnalyzeStmt(createTblStmtStr, ctx);
Catalog.getCurrentCatalog().createTable(createTableStmt);
// must set replicas' path hash, or the tablet scheduler won't work
updateReplicaPathHash();
// 4. get and test the created db and table
Database db = Catalog.getCurrentCatalog().getDbNullable("default_cluster:db1");
Assert.assertNotNull(db);
OlapTable tbl = (OlapTable) db.getTableNullable("tbl1");
tbl.readLock();
try {
Assert.assertNotNull(tbl);
System.out.println(tbl.getName());
Assert.assertEquals("Doris", tbl.getEngine());
Assert.assertEquals(1, tbl.getBaseSchema().size());
} finally {
tbl.readUnlock();
}
// 5. process a schema change job
String alterStmtStr = "alter table db1.tbl1 add column k2 int default '1'";
AlterTableStmt alterTableStmt = (AlterTableStmt) UtFrameUtils.parseAndAnalyzeStmt(alterStmtStr, ctx);
Catalog.getCurrentCatalog().getAlterInstance().processAlterTable(alterTableStmt);
// 6. check alter job
Map<Long, AlterJobV2> alterJobs = Catalog.getCurrentCatalog().getSchemaChangeHandler().getAlterJobsV2();
Assert.assertEquals(1, alterJobs.size());
for (AlterJobV2 alterJobV2 : alterJobs.values()) {
while (!alterJobV2.getJobState().isFinalState()) {
System.out.println("alter job " + alterJobV2.getJobId() + " is running. state: " + alterJobV2.getJobState());
Thread.sleep(1000);
}
System.out.println("alter job " + alterJobV2.getJobId() + " is done. state: " + alterJobV2.getJobState());
Assert.assertEquals(AlterJobV2.JobState.FINISHED, alterJobV2.getJobState());
}
OlapTable tbl1 = (OlapTable) db.getTableNullable("tbl1");
tbl1.readLock();
try {
Assert.assertEquals(2, tbl1.getBaseSchema().size());
String baseIndexName = tbl1.getIndexNameById(tbl.getBaseIndexId());
Assert.assertEquals(baseIndexName, tbl1.getName());
MaterializedIndexMeta indexMeta = tbl1.getIndexMetaByIndexId(tbl1.getBaseIndexId());
Assert.assertNotNull(indexMeta);
} finally {
tbl1.readUnlock();
}
// 7. query
// TODO: we can not process real query for now. So it has to be a explain query
String queryStr = "explain select * from db1.tbl1";
String a = UtFrameUtils.getSQLPlanOrErrorMsg(ctx, queryStr);
System.out.println(a);
StmtExecutor stmtExecutor = new StmtExecutor(ctx, queryStr);
stmtExecutor.execute();
Planner planner = stmtExecutor.planner();
List<PlanFragment> fragments = planner.getFragments();
Assert.assertEquals(2, fragments.size());
PlanFragment fragment = fragments.get(1);
Assert.assertTrue(fragment.getPlanRoot() instanceof OlapScanNode);
Assert.assertEquals(0, fragment.getChildren().size());
// test show backends;
BackendsProcDir dir = new BackendsProcDir(Catalog.getCurrentSystemInfo());
ProcResult result = dir.fetchResult();
Assert.assertEquals(BackendsProcDir.TITLE_NAMES.size(), result.getColumnNames().size());
Assert.assertEquals("{\"location\" : \"default\"}", result.getRows().get(0).get(19));
Assert.assertEquals("{\"lastSuccessReportTabletsTime\":\"N/A\",\"lastStreamLoadTime\":-1}",
result.getRows().get(0).get(BackendsProcDir.TITLE_NAMES.size() - 1));
} | NONSATD | true | // must set replicas' path hash, or the tablet scheduler won't work
updateReplicaPathHash();
// 4. get and test the created db and table
Database db = Catalog.getCurrentCatalog().getDbNullable("default_cluster:db1");
Assert.assertNotNull(db); | CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseAndAnalyzeStmt(createDbStmtStr, ctx);
Catalog.getCurrentCatalog().createDb(createDbStmt);
System.out.println(Catalog.getCurrentCatalog().getDbNames());
// 3. create table tbl1
String createTblStmtStr = "create table db1.tbl1(k1 int) distributed by hash(k1) buckets 3 properties('replication_num' = '3'," +
"'colocate_with' = 'g1');";
CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseAndAnalyzeStmt(createTblStmtStr, ctx);
Catalog.getCurrentCatalog().createTable(createTableStmt);
// must set replicas' path hash, or the tablet scheduler won't work
updateReplicaPathHash();
// 4. get and test the created db and table
Database db = Catalog.getCurrentCatalog().getDbNullable("default_cluster:db1");
Assert.assertNotNull(db);
OlapTable tbl = (OlapTable) db.getTableNullable("tbl1");
tbl.readLock();
try {
Assert.assertNotNull(tbl);
System.out.println(tbl.getName());
Assert.assertEquals("Doris", tbl.getEngine());
Assert.assertEquals(1, tbl.getBaseSchema().size());
} finally { | @Test
public void testCreateDbAndTable() throws Exception {
// 1. create connect context
ConnectContext ctx = UtFrameUtils.createDefaultCtx();
// 2. create database db1
String createDbStmtStr = "create database db1;";
CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseAndAnalyzeStmt(createDbStmtStr, ctx);
Catalog.getCurrentCatalog().createDb(createDbStmt);
System.out.println(Catalog.getCurrentCatalog().getDbNames());
// 3. create table tbl1
String createTblStmtStr = "create table db1.tbl1(k1 int) distributed by hash(k1) buckets 3 properties('replication_num' = '3'," +
"'colocate_with' = 'g1');";
CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseAndAnalyzeStmt(createTblStmtStr, ctx);
Catalog.getCurrentCatalog().createTable(createTableStmt);
// must set replicas' path hash, or the tablet scheduler won't work
updateReplicaPathHash();
// 4. get and test the created db and table
Database db = Catalog.getCurrentCatalog().getDbNullable("default_cluster:db1");
Assert.assertNotNull(db);
OlapTable tbl = (OlapTable) db.getTableNullable("tbl1");
tbl.readLock();
try {
Assert.assertNotNull(tbl);
System.out.println(tbl.getName());
Assert.assertEquals("Doris", tbl.getEngine());
Assert.assertEquals(1, tbl.getBaseSchema().size());
} finally {
tbl.readUnlock();
}
// 5. process a schema change job
String alterStmtStr = "alter table db1.tbl1 add column k2 int default '1'";
AlterTableStmt alterTableStmt = (AlterTableStmt) UtFrameUtils.parseAndAnalyzeStmt(alterStmtStr, ctx);
Catalog.getCurrentCatalog().getAlterInstance().processAlterTable(alterTableStmt);
// 6. check alter job
Map<Long, AlterJobV2> alterJobs = Catalog.getCurrentCatalog().getSchemaChangeHandler().getAlterJobsV2();
Assert.assertEquals(1, alterJobs.size());
for (AlterJobV2 alterJobV2 : alterJobs.values()) { |
25,762 | 5 | // 5. process a schema change job | @Test
public void testCreateDbAndTable() throws Exception {
// 1. create connect context
ConnectContext ctx = UtFrameUtils.createDefaultCtx();
// 2. create database db1
String createDbStmtStr = "create database db1;";
CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseAndAnalyzeStmt(createDbStmtStr, ctx);
Catalog.getCurrentCatalog().createDb(createDbStmt);
System.out.println(Catalog.getCurrentCatalog().getDbNames());
// 3. create table tbl1
String createTblStmtStr = "create table db1.tbl1(k1 int) distributed by hash(k1) buckets 3 properties('replication_num' = '3'," +
"'colocate_with' = 'g1');";
CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseAndAnalyzeStmt(createTblStmtStr, ctx);
Catalog.getCurrentCatalog().createTable(createTableStmt);
// must set replicas' path hash, or the tablet scheduler won't work
updateReplicaPathHash();
// 4. get and test the created db and table
Database db = Catalog.getCurrentCatalog().getDbNullable("default_cluster:db1");
Assert.assertNotNull(db);
OlapTable tbl = (OlapTable) db.getTableNullable("tbl1");
tbl.readLock();
try {
Assert.assertNotNull(tbl);
System.out.println(tbl.getName());
Assert.assertEquals("Doris", tbl.getEngine());
Assert.assertEquals(1, tbl.getBaseSchema().size());
} finally {
tbl.readUnlock();
}
// 5. process a schema change job
String alterStmtStr = "alter table db1.tbl1 add column k2 int default '1'";
AlterTableStmt alterTableStmt = (AlterTableStmt) UtFrameUtils.parseAndAnalyzeStmt(alterStmtStr, ctx);
Catalog.getCurrentCatalog().getAlterInstance().processAlterTable(alterTableStmt);
// 6. check alter job
Map<Long, AlterJobV2> alterJobs = Catalog.getCurrentCatalog().getSchemaChangeHandler().getAlterJobsV2();
Assert.assertEquals(1, alterJobs.size());
for (AlterJobV2 alterJobV2 : alterJobs.values()) {
while (!alterJobV2.getJobState().isFinalState()) {
System.out.println("alter job " + alterJobV2.getJobId() + " is running. state: " + alterJobV2.getJobState());
Thread.sleep(1000);
}
System.out.println("alter job " + alterJobV2.getJobId() + " is done. state: " + alterJobV2.getJobState());
Assert.assertEquals(AlterJobV2.JobState.FINISHED, alterJobV2.getJobState());
}
OlapTable tbl1 = (OlapTable) db.getTableNullable("tbl1");
tbl1.readLock();
try {
Assert.assertEquals(2, tbl1.getBaseSchema().size());
String baseIndexName = tbl1.getIndexNameById(tbl.getBaseIndexId());
Assert.assertEquals(baseIndexName, tbl1.getName());
MaterializedIndexMeta indexMeta = tbl1.getIndexMetaByIndexId(tbl1.getBaseIndexId());
Assert.assertNotNull(indexMeta);
} finally {
tbl1.readUnlock();
}
// 7. query
// TODO: we can not process real query for now. So it has to be a explain query
String queryStr = "explain select * from db1.tbl1";
String a = UtFrameUtils.getSQLPlanOrErrorMsg(ctx, queryStr);
System.out.println(a);
StmtExecutor stmtExecutor = new StmtExecutor(ctx, queryStr);
stmtExecutor.execute();
Planner planner = stmtExecutor.planner();
List<PlanFragment> fragments = planner.getFragments();
Assert.assertEquals(2, fragments.size());
PlanFragment fragment = fragments.get(1);
Assert.assertTrue(fragment.getPlanRoot() instanceof OlapScanNode);
Assert.assertEquals(0, fragment.getChildren().size());
// test show backends;
BackendsProcDir dir = new BackendsProcDir(Catalog.getCurrentSystemInfo());
ProcResult result = dir.fetchResult();
Assert.assertEquals(BackendsProcDir.TITLE_NAMES.size(), result.getColumnNames().size());
Assert.assertEquals("{\"location\" : \"default\"}", result.getRows().get(0).get(19));
Assert.assertEquals("{\"lastSuccessReportTabletsTime\":\"N/A\",\"lastStreamLoadTime\":-1}",
result.getRows().get(0).get(BackendsProcDir.TITLE_NAMES.size() - 1));
} | NONSATD | true | tbl.readUnlock();
}
// 5. process a schema change job
String alterStmtStr = "alter table db1.tbl1 add column k2 int default '1'";
AlterTableStmt alterTableStmt = (AlterTableStmt) UtFrameUtils.parseAndAnalyzeStmt(alterStmtStr, ctx); | OlapTable tbl = (OlapTable) db.getTableNullable("tbl1");
tbl.readLock();
try {
Assert.assertNotNull(tbl);
System.out.println(tbl.getName());
Assert.assertEquals("Doris", tbl.getEngine());
Assert.assertEquals(1, tbl.getBaseSchema().size());
} finally {
tbl.readUnlock();
}
// 5. process a schema change job
String alterStmtStr = "alter table db1.tbl1 add column k2 int default '1'";
AlterTableStmt alterTableStmt = (AlterTableStmt) UtFrameUtils.parseAndAnalyzeStmt(alterStmtStr, ctx);
Catalog.getCurrentCatalog().getAlterInstance().processAlterTable(alterTableStmt);
// 6. check alter job
Map<Long, AlterJobV2> alterJobs = Catalog.getCurrentCatalog().getSchemaChangeHandler().getAlterJobsV2();
Assert.assertEquals(1, alterJobs.size());
for (AlterJobV2 alterJobV2 : alterJobs.values()) {
while (!alterJobV2.getJobState().isFinalState()) {
System.out.println("alter job " + alterJobV2.getJobId() + " is running. state: " + alterJobV2.getJobState());
Thread.sleep(1000); | // 3. create table tbl1
String createTblStmtStr = "create table db1.tbl1(k1 int) distributed by hash(k1) buckets 3 properties('replication_num' = '3'," +
"'colocate_with' = 'g1');";
CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseAndAnalyzeStmt(createTblStmtStr, ctx);
Catalog.getCurrentCatalog().createTable(createTableStmt);
// must set replicas' path hash, or the tablet scheduler won't work
updateReplicaPathHash();
// 4. get and test the created db and table
Database db = Catalog.getCurrentCatalog().getDbNullable("default_cluster:db1");
Assert.assertNotNull(db);
OlapTable tbl = (OlapTable) db.getTableNullable("tbl1");
tbl.readLock();
try {
Assert.assertNotNull(tbl);
System.out.println(tbl.getName());
Assert.assertEquals("Doris", tbl.getEngine());
Assert.assertEquals(1, tbl.getBaseSchema().size());
} finally {
tbl.readUnlock();
}
// 5. process a schema change job
String alterStmtStr = "alter table db1.tbl1 add column k2 int default '1'";
AlterTableStmt alterTableStmt = (AlterTableStmt) UtFrameUtils.parseAndAnalyzeStmt(alterStmtStr, ctx);
Catalog.getCurrentCatalog().getAlterInstance().processAlterTable(alterTableStmt);
// 6. check alter job
Map<Long, AlterJobV2> alterJobs = Catalog.getCurrentCatalog().getSchemaChangeHandler().getAlterJobsV2();
Assert.assertEquals(1, alterJobs.size());
for (AlterJobV2 alterJobV2 : alterJobs.values()) {
while (!alterJobV2.getJobState().isFinalState()) {
System.out.println("alter job " + alterJobV2.getJobId() + " is running. state: " + alterJobV2.getJobState());
Thread.sleep(1000);
}
System.out.println("alter job " + alterJobV2.getJobId() + " is done. state: " + alterJobV2.getJobState());
Assert.assertEquals(AlterJobV2.JobState.FINISHED, alterJobV2.getJobState());
}
OlapTable tbl1 = (OlapTable) db.getTableNullable("tbl1");
tbl1.readLock();
try {
Assert.assertEquals(2, tbl1.getBaseSchema().size());
String baseIndexName = tbl1.getIndexNameById(tbl.getBaseIndexId());
Assert.assertEquals(baseIndexName, tbl1.getName()); |
25,762 | 6 | // 6. check alter job | @Test
public void testCreateDbAndTable() throws Exception {
// 1. create connect context
ConnectContext ctx = UtFrameUtils.createDefaultCtx();
// 2. create database db1
String createDbStmtStr = "create database db1;";
CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseAndAnalyzeStmt(createDbStmtStr, ctx);
Catalog.getCurrentCatalog().createDb(createDbStmt);
System.out.println(Catalog.getCurrentCatalog().getDbNames());
// 3. create table tbl1
String createTblStmtStr = "create table db1.tbl1(k1 int) distributed by hash(k1) buckets 3 properties('replication_num' = '3'," +
"'colocate_with' = 'g1');";
CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseAndAnalyzeStmt(createTblStmtStr, ctx);
Catalog.getCurrentCatalog().createTable(createTableStmt);
// must set replicas' path hash, or the tablet scheduler won't work
updateReplicaPathHash();
// 4. get and test the created db and table
Database db = Catalog.getCurrentCatalog().getDbNullable("default_cluster:db1");
Assert.assertNotNull(db);
OlapTable tbl = (OlapTable) db.getTableNullable("tbl1");
tbl.readLock();
try {
Assert.assertNotNull(tbl);
System.out.println(tbl.getName());
Assert.assertEquals("Doris", tbl.getEngine());
Assert.assertEquals(1, tbl.getBaseSchema().size());
} finally {
tbl.readUnlock();
}
// 5. process a schema change job
String alterStmtStr = "alter table db1.tbl1 add column k2 int default '1'";
AlterTableStmt alterTableStmt = (AlterTableStmt) UtFrameUtils.parseAndAnalyzeStmt(alterStmtStr, ctx);
Catalog.getCurrentCatalog().getAlterInstance().processAlterTable(alterTableStmt);
// 6. check alter job
Map<Long, AlterJobV2> alterJobs = Catalog.getCurrentCatalog().getSchemaChangeHandler().getAlterJobsV2();
Assert.assertEquals(1, alterJobs.size());
for (AlterJobV2 alterJobV2 : alterJobs.values()) {
while (!alterJobV2.getJobState().isFinalState()) {
System.out.println("alter job " + alterJobV2.getJobId() + " is running. state: " + alterJobV2.getJobState());
Thread.sleep(1000);
}
System.out.println("alter job " + alterJobV2.getJobId() + " is done. state: " + alterJobV2.getJobState());
Assert.assertEquals(AlterJobV2.JobState.FINISHED, alterJobV2.getJobState());
}
OlapTable tbl1 = (OlapTable) db.getTableNullable("tbl1");
tbl1.readLock();
try {
Assert.assertEquals(2, tbl1.getBaseSchema().size());
String baseIndexName = tbl1.getIndexNameById(tbl.getBaseIndexId());
Assert.assertEquals(baseIndexName, tbl1.getName());
MaterializedIndexMeta indexMeta = tbl1.getIndexMetaByIndexId(tbl1.getBaseIndexId());
Assert.assertNotNull(indexMeta);
} finally {
tbl1.readUnlock();
}
// 7. query
// TODO: we can not process real query for now. So it has to be a explain query
String queryStr = "explain select * from db1.tbl1";
String a = UtFrameUtils.getSQLPlanOrErrorMsg(ctx, queryStr);
System.out.println(a);
StmtExecutor stmtExecutor = new StmtExecutor(ctx, queryStr);
stmtExecutor.execute();
Planner planner = stmtExecutor.planner();
List<PlanFragment> fragments = planner.getFragments();
Assert.assertEquals(2, fragments.size());
PlanFragment fragment = fragments.get(1);
Assert.assertTrue(fragment.getPlanRoot() instanceof OlapScanNode);
Assert.assertEquals(0, fragment.getChildren().size());
// test show backends;
BackendsProcDir dir = new BackendsProcDir(Catalog.getCurrentSystemInfo());
ProcResult result = dir.fetchResult();
Assert.assertEquals(BackendsProcDir.TITLE_NAMES.size(), result.getColumnNames().size());
Assert.assertEquals("{\"location\" : \"default\"}", result.getRows().get(0).get(19));
Assert.assertEquals("{\"lastSuccessReportTabletsTime\":\"N/A\",\"lastStreamLoadTime\":-1}",
result.getRows().get(0).get(BackendsProcDir.TITLE_NAMES.size() - 1));
} | NONSATD | true | AlterTableStmt alterTableStmt = (AlterTableStmt) UtFrameUtils.parseAndAnalyzeStmt(alterStmtStr, ctx);
Catalog.getCurrentCatalog().getAlterInstance().processAlterTable(alterTableStmt);
// 6. check alter job
Map<Long, AlterJobV2> alterJobs = Catalog.getCurrentCatalog().getSchemaChangeHandler().getAlterJobsV2();
Assert.assertEquals(1, alterJobs.size()); | System.out.println(tbl.getName());
Assert.assertEquals("Doris", tbl.getEngine());
Assert.assertEquals(1, tbl.getBaseSchema().size());
} finally {
tbl.readUnlock();
}
// 5. process a schema change job
String alterStmtStr = "alter table db1.tbl1 add column k2 int default '1'";
AlterTableStmt alterTableStmt = (AlterTableStmt) UtFrameUtils.parseAndAnalyzeStmt(alterStmtStr, ctx);
Catalog.getCurrentCatalog().getAlterInstance().processAlterTable(alterTableStmt);
// 6. check alter job
Map<Long, AlterJobV2> alterJobs = Catalog.getCurrentCatalog().getSchemaChangeHandler().getAlterJobsV2();
Assert.assertEquals(1, alterJobs.size());
for (AlterJobV2 alterJobV2 : alterJobs.values()) {
while (!alterJobV2.getJobState().isFinalState()) {
System.out.println("alter job " + alterJobV2.getJobId() + " is running. state: " + alterJobV2.getJobState());
Thread.sleep(1000);
}
System.out.println("alter job " + alterJobV2.getJobId() + " is done. state: " + alterJobV2.getJobState());
Assert.assertEquals(AlterJobV2.JobState.FINISHED, alterJobV2.getJobState());
} | Catalog.getCurrentCatalog().createTable(createTableStmt);
// must set replicas' path hash, or the tablet scheduler won't work
updateReplicaPathHash();
// 4. get and test the created db and table
Database db = Catalog.getCurrentCatalog().getDbNullable("default_cluster:db1");
Assert.assertNotNull(db);
OlapTable tbl = (OlapTable) db.getTableNullable("tbl1");
tbl.readLock();
try {
Assert.assertNotNull(tbl);
System.out.println(tbl.getName());
Assert.assertEquals("Doris", tbl.getEngine());
Assert.assertEquals(1, tbl.getBaseSchema().size());
} finally {
tbl.readUnlock();
}
// 5. process a schema change job
String alterStmtStr = "alter table db1.tbl1 add column k2 int default '1'";
AlterTableStmt alterTableStmt = (AlterTableStmt) UtFrameUtils.parseAndAnalyzeStmt(alterStmtStr, ctx);
Catalog.getCurrentCatalog().getAlterInstance().processAlterTable(alterTableStmt);
// 6. check alter job
Map<Long, AlterJobV2> alterJobs = Catalog.getCurrentCatalog().getSchemaChangeHandler().getAlterJobsV2();
Assert.assertEquals(1, alterJobs.size());
for (AlterJobV2 alterJobV2 : alterJobs.values()) {
while (!alterJobV2.getJobState().isFinalState()) {
System.out.println("alter job " + alterJobV2.getJobId() + " is running. state: " + alterJobV2.getJobState());
Thread.sleep(1000);
}
System.out.println("alter job " + alterJobV2.getJobId() + " is done. state: " + alterJobV2.getJobState());
Assert.assertEquals(AlterJobV2.JobState.FINISHED, alterJobV2.getJobState());
}
OlapTable tbl1 = (OlapTable) db.getTableNullable("tbl1");
tbl1.readLock();
try {
Assert.assertEquals(2, tbl1.getBaseSchema().size());
String baseIndexName = tbl1.getIndexNameById(tbl.getBaseIndexId());
Assert.assertEquals(baseIndexName, tbl1.getName());
MaterializedIndexMeta indexMeta = tbl1.getIndexMetaByIndexId(tbl1.getBaseIndexId());
Assert.assertNotNull(indexMeta);
} finally {
tbl1.readUnlock(); |
25,762 | 7 | // 7. query
// TODO: we can not process real query for now. So it has to be a explain query | @Test
public void testCreateDbAndTable() throws Exception {
// 1. create connect context
ConnectContext ctx = UtFrameUtils.createDefaultCtx();
// 2. create database db1
String createDbStmtStr = "create database db1;";
CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseAndAnalyzeStmt(createDbStmtStr, ctx);
Catalog.getCurrentCatalog().createDb(createDbStmt);
System.out.println(Catalog.getCurrentCatalog().getDbNames());
// 3. create table tbl1
String createTblStmtStr = "create table db1.tbl1(k1 int) distributed by hash(k1) buckets 3 properties('replication_num' = '3'," +
"'colocate_with' = 'g1');";
CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseAndAnalyzeStmt(createTblStmtStr, ctx);
Catalog.getCurrentCatalog().createTable(createTableStmt);
// must set replicas' path hash, or the tablet scheduler won't work
updateReplicaPathHash();
// 4. get and test the created db and table
Database db = Catalog.getCurrentCatalog().getDbNullable("default_cluster:db1");
Assert.assertNotNull(db);
OlapTable tbl = (OlapTable) db.getTableNullable("tbl1");
tbl.readLock();
try {
Assert.assertNotNull(tbl);
System.out.println(tbl.getName());
Assert.assertEquals("Doris", tbl.getEngine());
Assert.assertEquals(1, tbl.getBaseSchema().size());
} finally {
tbl.readUnlock();
}
// 5. process a schema change job
String alterStmtStr = "alter table db1.tbl1 add column k2 int default '1'";
AlterTableStmt alterTableStmt = (AlterTableStmt) UtFrameUtils.parseAndAnalyzeStmt(alterStmtStr, ctx);
Catalog.getCurrentCatalog().getAlterInstance().processAlterTable(alterTableStmt);
// 6. check alter job
Map<Long, AlterJobV2> alterJobs = Catalog.getCurrentCatalog().getSchemaChangeHandler().getAlterJobsV2();
Assert.assertEquals(1, alterJobs.size());
for (AlterJobV2 alterJobV2 : alterJobs.values()) {
while (!alterJobV2.getJobState().isFinalState()) {
System.out.println("alter job " + alterJobV2.getJobId() + " is running. state: " + alterJobV2.getJobState());
Thread.sleep(1000);
}
System.out.println("alter job " + alterJobV2.getJobId() + " is done. state: " + alterJobV2.getJobState());
Assert.assertEquals(AlterJobV2.JobState.FINISHED, alterJobV2.getJobState());
}
OlapTable tbl1 = (OlapTable) db.getTableNullable("tbl1");
tbl1.readLock();
try {
Assert.assertEquals(2, tbl1.getBaseSchema().size());
String baseIndexName = tbl1.getIndexNameById(tbl.getBaseIndexId());
Assert.assertEquals(baseIndexName, tbl1.getName());
MaterializedIndexMeta indexMeta = tbl1.getIndexMetaByIndexId(tbl1.getBaseIndexId());
Assert.assertNotNull(indexMeta);
} finally {
tbl1.readUnlock();
}
// 7. query
// TODO: we can not process real query for now. So it has to be a explain query
String queryStr = "explain select * from db1.tbl1";
String a = UtFrameUtils.getSQLPlanOrErrorMsg(ctx, queryStr);
System.out.println(a);
StmtExecutor stmtExecutor = new StmtExecutor(ctx, queryStr);
stmtExecutor.execute();
Planner planner = stmtExecutor.planner();
List<PlanFragment> fragments = planner.getFragments();
Assert.assertEquals(2, fragments.size());
PlanFragment fragment = fragments.get(1);
Assert.assertTrue(fragment.getPlanRoot() instanceof OlapScanNode);
Assert.assertEquals(0, fragment.getChildren().size());
// test show backends;
BackendsProcDir dir = new BackendsProcDir(Catalog.getCurrentSystemInfo());
ProcResult result = dir.fetchResult();
Assert.assertEquals(BackendsProcDir.TITLE_NAMES.size(), result.getColumnNames().size());
Assert.assertEquals("{\"location\" : \"default\"}", result.getRows().get(0).get(19));
Assert.assertEquals("{\"lastSuccessReportTabletsTime\":\"N/A\",\"lastStreamLoadTime\":-1}",
result.getRows().get(0).get(BackendsProcDir.TITLE_NAMES.size() - 1));
} | DESIGN | true | tbl1.readUnlock();
}
// 7. query
// TODO: we can not process real query for now. So it has to be a explain query
String queryStr = "explain select * from db1.tbl1";
String a = UtFrameUtils.getSQLPlanOrErrorMsg(ctx, queryStr); | tbl1.readLock();
try {
Assert.assertEquals(2, tbl1.getBaseSchema().size());
String baseIndexName = tbl1.getIndexNameById(tbl.getBaseIndexId());
Assert.assertEquals(baseIndexName, tbl1.getName());
MaterializedIndexMeta indexMeta = tbl1.getIndexMetaByIndexId(tbl1.getBaseIndexId());
Assert.assertNotNull(indexMeta);
} finally {
tbl1.readUnlock();
}
// 7. query
// TODO: we can not process real query for now. So it has to be a explain query
String queryStr = "explain select * from db1.tbl1";
String a = UtFrameUtils.getSQLPlanOrErrorMsg(ctx, queryStr);
System.out.println(a);
StmtExecutor stmtExecutor = new StmtExecutor(ctx, queryStr);
stmtExecutor.execute();
Planner planner = stmtExecutor.planner();
List<PlanFragment> fragments = planner.getFragments();
Assert.assertEquals(2, fragments.size());
PlanFragment fragment = fragments.get(1);
Assert.assertTrue(fragment.getPlanRoot() instanceof OlapScanNode); | Assert.assertEquals(1, alterJobs.size());
for (AlterJobV2 alterJobV2 : alterJobs.values()) {
while (!alterJobV2.getJobState().isFinalState()) {
System.out.println("alter job " + alterJobV2.getJobId() + " is running. state: " + alterJobV2.getJobState());
Thread.sleep(1000);
}
System.out.println("alter job " + alterJobV2.getJobId() + " is done. state: " + alterJobV2.getJobState());
Assert.assertEquals(AlterJobV2.JobState.FINISHED, alterJobV2.getJobState());
}
OlapTable tbl1 = (OlapTable) db.getTableNullable("tbl1");
tbl1.readLock();
try {
Assert.assertEquals(2, tbl1.getBaseSchema().size());
String baseIndexName = tbl1.getIndexNameById(tbl.getBaseIndexId());
Assert.assertEquals(baseIndexName, tbl1.getName());
MaterializedIndexMeta indexMeta = tbl1.getIndexMetaByIndexId(tbl1.getBaseIndexId());
Assert.assertNotNull(indexMeta);
} finally {
tbl1.readUnlock();
}
// 7. query
// TODO: we can not process real query for now. So it has to be a explain query
String queryStr = "explain select * from db1.tbl1";
String a = UtFrameUtils.getSQLPlanOrErrorMsg(ctx, queryStr);
System.out.println(a);
StmtExecutor stmtExecutor = new StmtExecutor(ctx, queryStr);
stmtExecutor.execute();
Planner planner = stmtExecutor.planner();
List<PlanFragment> fragments = planner.getFragments();
Assert.assertEquals(2, fragments.size());
PlanFragment fragment = fragments.get(1);
Assert.assertTrue(fragment.getPlanRoot() instanceof OlapScanNode);
Assert.assertEquals(0, fragment.getChildren().size());
// test show backends;
BackendsProcDir dir = new BackendsProcDir(Catalog.getCurrentSystemInfo());
ProcResult result = dir.fetchResult();
Assert.assertEquals(BackendsProcDir.TITLE_NAMES.size(), result.getColumnNames().size());
Assert.assertEquals("{\"location\" : \"default\"}", result.getRows().get(0).get(19));
Assert.assertEquals("{\"lastSuccessReportTabletsTime\":\"N/A\",\"lastStreamLoadTime\":-1}",
result.getRows().get(0).get(BackendsProcDir.TITLE_NAMES.size() - 1));
} |
25,762 | 8 | // test show backends; | @Test
public void testCreateDbAndTable() throws Exception {
// 1. create connect context
ConnectContext ctx = UtFrameUtils.createDefaultCtx();
// 2. create database db1
String createDbStmtStr = "create database db1;";
CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseAndAnalyzeStmt(createDbStmtStr, ctx);
Catalog.getCurrentCatalog().createDb(createDbStmt);
System.out.println(Catalog.getCurrentCatalog().getDbNames());
// 3. create table tbl1
String createTblStmtStr = "create table db1.tbl1(k1 int) distributed by hash(k1) buckets 3 properties('replication_num' = '3'," +
"'colocate_with' = 'g1');";
CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseAndAnalyzeStmt(createTblStmtStr, ctx);
Catalog.getCurrentCatalog().createTable(createTableStmt);
// must set replicas' path hash, or the tablet scheduler won't work
updateReplicaPathHash();
// 4. get and test the created db and table
Database db = Catalog.getCurrentCatalog().getDbNullable("default_cluster:db1");
Assert.assertNotNull(db);
OlapTable tbl = (OlapTable) db.getTableNullable("tbl1");
tbl.readLock();
try {
Assert.assertNotNull(tbl);
System.out.println(tbl.getName());
Assert.assertEquals("Doris", tbl.getEngine());
Assert.assertEquals(1, tbl.getBaseSchema().size());
} finally {
tbl.readUnlock();
}
// 5. process a schema change job
String alterStmtStr = "alter table db1.tbl1 add column k2 int default '1'";
AlterTableStmt alterTableStmt = (AlterTableStmt) UtFrameUtils.parseAndAnalyzeStmt(alterStmtStr, ctx);
Catalog.getCurrentCatalog().getAlterInstance().processAlterTable(alterTableStmt);
// 6. check alter job
Map<Long, AlterJobV2> alterJobs = Catalog.getCurrentCatalog().getSchemaChangeHandler().getAlterJobsV2();
Assert.assertEquals(1, alterJobs.size());
for (AlterJobV2 alterJobV2 : alterJobs.values()) {
while (!alterJobV2.getJobState().isFinalState()) {
System.out.println("alter job " + alterJobV2.getJobId() + " is running. state: " + alterJobV2.getJobState());
Thread.sleep(1000);
}
System.out.println("alter job " + alterJobV2.getJobId() + " is done. state: " + alterJobV2.getJobState());
Assert.assertEquals(AlterJobV2.JobState.FINISHED, alterJobV2.getJobState());
}
OlapTable tbl1 = (OlapTable) db.getTableNullable("tbl1");
tbl1.readLock();
try {
Assert.assertEquals(2, tbl1.getBaseSchema().size());
String baseIndexName = tbl1.getIndexNameById(tbl.getBaseIndexId());
Assert.assertEquals(baseIndexName, tbl1.getName());
MaterializedIndexMeta indexMeta = tbl1.getIndexMetaByIndexId(tbl1.getBaseIndexId());
Assert.assertNotNull(indexMeta);
} finally {
tbl1.readUnlock();
}
// 7. query
// TODO: we can not process real query for now. So it has to be a explain query
String queryStr = "explain select * from db1.tbl1";
String a = UtFrameUtils.getSQLPlanOrErrorMsg(ctx, queryStr);
System.out.println(a);
StmtExecutor stmtExecutor = new StmtExecutor(ctx, queryStr);
stmtExecutor.execute();
Planner planner = stmtExecutor.planner();
List<PlanFragment> fragments = planner.getFragments();
Assert.assertEquals(2, fragments.size());
PlanFragment fragment = fragments.get(1);
Assert.assertTrue(fragment.getPlanRoot() instanceof OlapScanNode);
Assert.assertEquals(0, fragment.getChildren().size());
// test show backends;
BackendsProcDir dir = new BackendsProcDir(Catalog.getCurrentSystemInfo());
ProcResult result = dir.fetchResult();
Assert.assertEquals(BackendsProcDir.TITLE_NAMES.size(), result.getColumnNames().size());
Assert.assertEquals("{\"location\" : \"default\"}", result.getRows().get(0).get(19));
Assert.assertEquals("{\"lastSuccessReportTabletsTime\":\"N/A\",\"lastStreamLoadTime\":-1}",
result.getRows().get(0).get(BackendsProcDir.TITLE_NAMES.size() - 1));
} | NONSATD | true | Assert.assertTrue(fragment.getPlanRoot() instanceof OlapScanNode);
Assert.assertEquals(0, fragment.getChildren().size());
// test show backends;
BackendsProcDir dir = new BackendsProcDir(Catalog.getCurrentSystemInfo());
ProcResult result = dir.fetchResult(); | String a = UtFrameUtils.getSQLPlanOrErrorMsg(ctx, queryStr);
System.out.println(a);
StmtExecutor stmtExecutor = new StmtExecutor(ctx, queryStr);
stmtExecutor.execute();
Planner planner = stmtExecutor.planner();
List<PlanFragment> fragments = planner.getFragments();
Assert.assertEquals(2, fragments.size());
PlanFragment fragment = fragments.get(1);
Assert.assertTrue(fragment.getPlanRoot() instanceof OlapScanNode);
Assert.assertEquals(0, fragment.getChildren().size());
// test show backends;
BackendsProcDir dir = new BackendsProcDir(Catalog.getCurrentSystemInfo());
ProcResult result = dir.fetchResult();
Assert.assertEquals(BackendsProcDir.TITLE_NAMES.size(), result.getColumnNames().size());
Assert.assertEquals("{\"location\" : \"default\"}", result.getRows().get(0).get(19));
Assert.assertEquals("{\"lastSuccessReportTabletsTime\":\"N/A\",\"lastStreamLoadTime\":-1}",
result.getRows().get(0).get(BackendsProcDir.TITLE_NAMES.size() - 1));
} | String baseIndexName = tbl1.getIndexNameById(tbl.getBaseIndexId());
Assert.assertEquals(baseIndexName, tbl1.getName());
MaterializedIndexMeta indexMeta = tbl1.getIndexMetaByIndexId(tbl1.getBaseIndexId());
Assert.assertNotNull(indexMeta);
} finally {
tbl1.readUnlock();
}
// 7. query
// TODO: we can not process real query for now. So it has to be a explain query
String queryStr = "explain select * from db1.tbl1";
String a = UtFrameUtils.getSQLPlanOrErrorMsg(ctx, queryStr);
System.out.println(a);
StmtExecutor stmtExecutor = new StmtExecutor(ctx, queryStr);
stmtExecutor.execute();
Planner planner = stmtExecutor.planner();
List<PlanFragment> fragments = planner.getFragments();
Assert.assertEquals(2, fragments.size());
PlanFragment fragment = fragments.get(1);
Assert.assertTrue(fragment.getPlanRoot() instanceof OlapScanNode);
Assert.assertEquals(0, fragment.getChildren().size());
// test show backends;
BackendsProcDir dir = new BackendsProcDir(Catalog.getCurrentSystemInfo());
ProcResult result = dir.fetchResult();
Assert.assertEquals(BackendsProcDir.TITLE_NAMES.size(), result.getColumnNames().size());
Assert.assertEquals("{\"location\" : \"default\"}", result.getRows().get(0).get(19));
Assert.assertEquals("{\"lastSuccessReportTabletsTime\":\"N/A\",\"lastStreamLoadTime\":-1}",
result.getRows().get(0).get(BackendsProcDir.TITLE_NAMES.size() - 1));
} |
25,770 | 0 | // here the input isn't in the canonical form, but we should be forgiving | public void testToInternal() throws Exception {
assertFormatParsed("1995-12-31T23:59:59.999", "1995-12-31T23:59:59.999666Z");
assertFormatParsed("1995-12-31T23:59:59.999", "1995-12-31T23:59:59.999Z");
assertFormatParsed("1995-12-31T23:59:59.99", "1995-12-31T23:59:59.99Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.9Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59Z");
// here the input isn't in the canonical form, but we should be forgiving
assertFormatParsed("1995-12-31T23:59:59.99", "1995-12-31T23:59:59.990Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.900Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.90Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.000Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.00Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.0Z");
// kind of kludgy, but we have other tests for the actual date math
assertFormatParsed(DateFormatUtil.formatDate(p.parseMath("/DAY")), "NOW/DAY");
// as of Solr 1.3
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59Z/DAY");
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59.123Z/DAY");
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59.123999Z/DAY");
} | NONSATD | true | assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.9Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59Z");
// here the input isn't in the canonical form, but we should be forgiving
assertFormatParsed("1995-12-31T23:59:59.99", "1995-12-31T23:59:59.990Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.900Z"); | public void testToInternal() throws Exception {
assertFormatParsed("1995-12-31T23:59:59.999", "1995-12-31T23:59:59.999666Z");
assertFormatParsed("1995-12-31T23:59:59.999", "1995-12-31T23:59:59.999Z");
assertFormatParsed("1995-12-31T23:59:59.99", "1995-12-31T23:59:59.99Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.9Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59Z");
// here the input isn't in the canonical form, but we should be forgiving
assertFormatParsed("1995-12-31T23:59:59.99", "1995-12-31T23:59:59.990Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.900Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.90Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.000Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.00Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.0Z");
// kind of kludgy, but we have other tests for the actual date math
assertFormatParsed(DateFormatUtil.formatDate(p.parseMath("/DAY")), "NOW/DAY");
// as of Solr 1.3
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59Z/DAY"); | public void testToInternal() throws Exception {
assertFormatParsed("1995-12-31T23:59:59.999", "1995-12-31T23:59:59.999666Z");
assertFormatParsed("1995-12-31T23:59:59.999", "1995-12-31T23:59:59.999Z");
assertFormatParsed("1995-12-31T23:59:59.99", "1995-12-31T23:59:59.99Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.9Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59Z");
// here the input isn't in the canonical form, but we should be forgiving
assertFormatParsed("1995-12-31T23:59:59.99", "1995-12-31T23:59:59.990Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.900Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.90Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.000Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.00Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.0Z");
// kind of kludgy, but we have other tests for the actual date math
assertFormatParsed(DateFormatUtil.formatDate(p.parseMath("/DAY")), "NOW/DAY");
// as of Solr 1.3
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59Z/DAY");
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59.123Z/DAY");
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59.123999Z/DAY");
} |
25,770 | 1 | // kind of kludgy, but we have other tests for the actual date math | public void testToInternal() throws Exception {
assertFormatParsed("1995-12-31T23:59:59.999", "1995-12-31T23:59:59.999666Z");
assertFormatParsed("1995-12-31T23:59:59.999", "1995-12-31T23:59:59.999Z");
assertFormatParsed("1995-12-31T23:59:59.99", "1995-12-31T23:59:59.99Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.9Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59Z");
// here the input isn't in the canonical form, but we should be forgiving
assertFormatParsed("1995-12-31T23:59:59.99", "1995-12-31T23:59:59.990Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.900Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.90Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.000Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.00Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.0Z");
// kind of kludgy, but we have other tests for the actual date math
assertFormatParsed(DateFormatUtil.formatDate(p.parseMath("/DAY")), "NOW/DAY");
// as of Solr 1.3
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59Z/DAY");
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59.123Z/DAY");
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59.123999Z/DAY");
} | TEST | true | assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.00Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.0Z");
// kind of kludgy, but we have other tests for the actual date math
assertFormatParsed(DateFormatUtil.formatDate(p.parseMath("/DAY")), "NOW/DAY");
// as of Solr 1.3 | assertFormatParsed("1995-12-31T23:59:59.99", "1995-12-31T23:59:59.99Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.9Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59Z");
// here the input isn't in the canonical form, but we should be forgiving
assertFormatParsed("1995-12-31T23:59:59.99", "1995-12-31T23:59:59.990Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.900Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.90Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.000Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.00Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.0Z");
// kind of kludgy, but we have other tests for the actual date math
assertFormatParsed(DateFormatUtil.formatDate(p.parseMath("/DAY")), "NOW/DAY");
// as of Solr 1.3
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59Z/DAY");
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59.123Z/DAY");
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59.123999Z/DAY");
} | public void testToInternal() throws Exception {
assertFormatParsed("1995-12-31T23:59:59.999", "1995-12-31T23:59:59.999666Z");
assertFormatParsed("1995-12-31T23:59:59.999", "1995-12-31T23:59:59.999Z");
assertFormatParsed("1995-12-31T23:59:59.99", "1995-12-31T23:59:59.99Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.9Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59Z");
// here the input isn't in the canonical form, but we should be forgiving
assertFormatParsed("1995-12-31T23:59:59.99", "1995-12-31T23:59:59.990Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.900Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.90Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.000Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.00Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.0Z");
// kind of kludgy, but we have other tests for the actual date math
assertFormatParsed(DateFormatUtil.formatDate(p.parseMath("/DAY")), "NOW/DAY");
// as of Solr 1.3
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59Z/DAY");
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59.123Z/DAY");
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59.123999Z/DAY");
} |
25,770 | 2 | // as of Solr 1.3 | public void testToInternal() throws Exception {
assertFormatParsed("1995-12-31T23:59:59.999", "1995-12-31T23:59:59.999666Z");
assertFormatParsed("1995-12-31T23:59:59.999", "1995-12-31T23:59:59.999Z");
assertFormatParsed("1995-12-31T23:59:59.99", "1995-12-31T23:59:59.99Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.9Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59Z");
// here the input isn't in the canonical form, but we should be forgiving
assertFormatParsed("1995-12-31T23:59:59.99", "1995-12-31T23:59:59.990Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.900Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.90Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.000Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.00Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.0Z");
// kind of kludgy, but we have other tests for the actual date math
assertFormatParsed(DateFormatUtil.formatDate(p.parseMath("/DAY")), "NOW/DAY");
// as of Solr 1.3
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59Z/DAY");
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59.123Z/DAY");
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59.123999Z/DAY");
} | NONSATD | true | // kind of kludgy, but we have other tests for the actual date math
assertFormatParsed(DateFormatUtil.formatDate(p.parseMath("/DAY")), "NOW/DAY");
// as of Solr 1.3
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59Z/DAY");
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59.123Z/DAY"); | assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59Z");
// here the input isn't in the canonical form, but we should be forgiving
assertFormatParsed("1995-12-31T23:59:59.99", "1995-12-31T23:59:59.990Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.900Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.90Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.000Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.00Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.0Z");
// kind of kludgy, but we have other tests for the actual date math
assertFormatParsed(DateFormatUtil.formatDate(p.parseMath("/DAY")), "NOW/DAY");
// as of Solr 1.3
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59Z/DAY");
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59.123Z/DAY");
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59.123999Z/DAY");
} | public void testToInternal() throws Exception {
assertFormatParsed("1995-12-31T23:59:59.999", "1995-12-31T23:59:59.999666Z");
assertFormatParsed("1995-12-31T23:59:59.999", "1995-12-31T23:59:59.999Z");
assertFormatParsed("1995-12-31T23:59:59.99", "1995-12-31T23:59:59.99Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.9Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59Z");
// here the input isn't in the canonical form, but we should be forgiving
assertFormatParsed("1995-12-31T23:59:59.99", "1995-12-31T23:59:59.990Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.900Z");
assertFormatParsed("1995-12-31T23:59:59.9", "1995-12-31T23:59:59.90Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.000Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.00Z");
assertFormatParsed("1995-12-31T23:59:59", "1995-12-31T23:59:59.0Z");
// kind of kludgy, but we have other tests for the actual date math
assertFormatParsed(DateFormatUtil.formatDate(p.parseMath("/DAY")), "NOW/DAY");
// as of Solr 1.3
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59Z/DAY");
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59.123Z/DAY");
assertFormatParsed("1995-12-31T00:00:00", "1995-12-31T23:59:59.123999Z/DAY");
} |
17,582 | 0 | // Obtain Representations for playback. | @Override
public void onManifest(String contentId, MediaPresentationDescription manifest) {
Handler mainHandler = player.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new BufferPool(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);
// Obtain Representations for playback.
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
ArrayList<Representation> audioRepresentationsList = new ArrayList<Representation>();
ArrayList<Representation> videoRepresentationsList = new ArrayList<Representation>();
Period period = manifest.periods.get(0);
boolean hasContentProtection = false;
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
hasContentProtection |= adaptationSet.hasContentProtection();
int adaptationSetType = adaptationSet.type;
for (int j = 0; j < adaptationSet.representations.size(); j++) {
Representation representation = adaptationSet.representations.get(j);
if (adaptationSetType == AdaptationSet.TYPE_AUDIO) {
audioRepresentationsList.add(representation);
} else if (adaptationSetType == AdaptationSet.TYPE_VIDEO) {
Format format = representation.format;
if (format.width * format.height <= maxDecodableFrameSize) {
videoRepresentationsList.add(representation);
} else {
// The device isn't capable of playing this stream.
}
}
}
}
Representation[] videoRepresentations = new Representation[videoRepresentationsList.size()];
videoRepresentationsList.toArray(videoRepresentations);
// Check drm support if necessary.
DrmSessionManager drmSessionManager = null;
if (hasContentProtection) {
if (Util.SDK_INT < 18) {
callback.onRenderersError(new UnsupportedOperationException(
"Protected content not supported on API level " + Util.SDK_INT));
return;
}
try {
Pair<DrmSessionManager, Boolean> drmSessionManagerData =
V18Compat.getDrmSessionManagerData(player, drmCallback);
drmSessionManager = drmSessionManagerData.first;
if (!drmSessionManagerData.second) {
// HD streams require L1 security.
videoRepresentations = getSdRepresentations(videoRepresentations);
}
} catch (Exception e) {
callback.onRenderersError(e);
return;
}
}
// Build the video renderer.
DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource videoChunkSource;
String mimeType = videoRepresentations[0].format.mimeType;
if (mimeType.equals(MimeTypes.VIDEO_MP4)) {
videoChunkSource = new DashMp4ChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else if (mimeType.equals(MimeTypes.VIDEO_WEBM)) {
// TODO: Figure out how to query supported vpX resolutions. For now, restrict to standard
// definition streams.
videoRepresentations = getSdRepresentations(videoRepresentations);
videoChunkSource = new DashWebmChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else {
throw new IllegalStateException("Unexpected mime type: " + mimeType);
}
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
mainHandler, player, 50);
// Build the audio renderer.
final String[] audioTrackNames;
final MultiTrackChunkSource audioChunkSource;
final MediaCodecAudioTrackRenderer audioRenderer;
if (audioRepresentationsList.isEmpty()) {
audioTrackNames = null;
audioChunkSource = null;
audioRenderer = null;
} else {
DataSource audioDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
audioTrackNames = new String[audioRepresentationsList.size()];
ChunkSource[] audioChunkSources = new ChunkSource[audioRepresentationsList.size()];
FormatEvaluator audioEvaluator = new FormatEvaluator.FixedEvaluator();
for (int i = 0; i < audioRepresentationsList.size(); i++) {
Representation representation = audioRepresentationsList.get(i);
Format format = representation.format;
audioTrackNames[i] = format.id + " (" + format.numChannels + "ch, " +
format.audioSamplingRate + "Hz)";
audioChunkSources[i] = new DashMp4ChunkSource(audioDataSource,
audioEvaluator, representation);
}
audioChunkSource = new MultiTrackChunkSource(audioChunkSources);
SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
mainHandler, player);
}
// Build the debug renderer.
TrackRenderer debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, videoRenderer, videoSampleSource) : null;
// Invoke the callback.
String[][] trackNames = new String[DemoPlayer.RENDERER_COUNT][];
trackNames[DemoPlayer.TYPE_AUDIO] = audioTrackNames;
MultiTrackChunkSource[] multiTrackChunkSources =
new MultiTrackChunkSource[DemoPlayer.RENDERER_COUNT];
multiTrackChunkSources[DemoPlayer.TYPE_AUDIO] = audioChunkSource;
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer;
callback.onRenderers(trackNames, multiTrackChunkSources, renderers);
} | NONSATD | true | LoadControl loadControl = new DefaultLoadControl(new BufferPool(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);
// Obtain Representations for playback.
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
ArrayList<Representation> audioRepresentationsList = new ArrayList<Representation>(); | @Override
public void onManifest(String contentId, MediaPresentationDescription manifest) {
Handler mainHandler = player.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new BufferPool(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);
// Obtain Representations for playback.
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
ArrayList<Representation> audioRepresentationsList = new ArrayList<Representation>();
ArrayList<Representation> videoRepresentationsList = new ArrayList<Representation>();
Period period = manifest.periods.get(0);
boolean hasContentProtection = false;
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
hasContentProtection |= adaptationSet.hasContentProtection();
int adaptationSetType = adaptationSet.type;
for (int j = 0; j < adaptationSet.representations.size(); j++) { | @Override
public void onManifest(String contentId, MediaPresentationDescription manifest) {
Handler mainHandler = player.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new BufferPool(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);
// Obtain Representations for playback.
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
ArrayList<Representation> audioRepresentationsList = new ArrayList<Representation>();
ArrayList<Representation> videoRepresentationsList = new ArrayList<Representation>();
Period period = manifest.periods.get(0);
boolean hasContentProtection = false;
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
hasContentProtection |= adaptationSet.hasContentProtection();
int adaptationSetType = adaptationSet.type;
for (int j = 0; j < adaptationSet.representations.size(); j++) {
Representation representation = adaptationSet.representations.get(j);
if (adaptationSetType == AdaptationSet.TYPE_AUDIO) {
audioRepresentationsList.add(representation);
} else if (adaptationSetType == AdaptationSet.TYPE_VIDEO) {
Format format = representation.format;
if (format.width * format.height <= maxDecodableFrameSize) {
videoRepresentationsList.add(representation);
} else {
// The device isn't capable of playing this stream.
} |
17,582 | 1 | // The device isn't capable of playing this stream. | @Override
public void onManifest(String contentId, MediaPresentationDescription manifest) {
Handler mainHandler = player.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new BufferPool(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);
// Obtain Representations for playback.
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
ArrayList<Representation> audioRepresentationsList = new ArrayList<Representation>();
ArrayList<Representation> videoRepresentationsList = new ArrayList<Representation>();
Period period = manifest.periods.get(0);
boolean hasContentProtection = false;
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
hasContentProtection |= adaptationSet.hasContentProtection();
int adaptationSetType = adaptationSet.type;
for (int j = 0; j < adaptationSet.representations.size(); j++) {
Representation representation = adaptationSet.representations.get(j);
if (adaptationSetType == AdaptationSet.TYPE_AUDIO) {
audioRepresentationsList.add(representation);
} else if (adaptationSetType == AdaptationSet.TYPE_VIDEO) {
Format format = representation.format;
if (format.width * format.height <= maxDecodableFrameSize) {
videoRepresentationsList.add(representation);
} else {
// The device isn't capable of playing this stream.
}
}
}
}
Representation[] videoRepresentations = new Representation[videoRepresentationsList.size()];
videoRepresentationsList.toArray(videoRepresentations);
// Check drm support if necessary.
DrmSessionManager drmSessionManager = null;
if (hasContentProtection) {
if (Util.SDK_INT < 18) {
callback.onRenderersError(new UnsupportedOperationException(
"Protected content not supported on API level " + Util.SDK_INT));
return;
}
try {
Pair<DrmSessionManager, Boolean> drmSessionManagerData =
V18Compat.getDrmSessionManagerData(player, drmCallback);
drmSessionManager = drmSessionManagerData.first;
if (!drmSessionManagerData.second) {
// HD streams require L1 security.
videoRepresentations = getSdRepresentations(videoRepresentations);
}
} catch (Exception e) {
callback.onRenderersError(e);
return;
}
}
// Build the video renderer.
DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource videoChunkSource;
String mimeType = videoRepresentations[0].format.mimeType;
if (mimeType.equals(MimeTypes.VIDEO_MP4)) {
videoChunkSource = new DashMp4ChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else if (mimeType.equals(MimeTypes.VIDEO_WEBM)) {
// TODO: Figure out how to query supported vpX resolutions. For now, restrict to standard
// definition streams.
videoRepresentations = getSdRepresentations(videoRepresentations);
videoChunkSource = new DashWebmChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else {
throw new IllegalStateException("Unexpected mime type: " + mimeType);
}
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
mainHandler, player, 50);
// Build the audio renderer.
final String[] audioTrackNames;
final MultiTrackChunkSource audioChunkSource;
final MediaCodecAudioTrackRenderer audioRenderer;
if (audioRepresentationsList.isEmpty()) {
audioTrackNames = null;
audioChunkSource = null;
audioRenderer = null;
} else {
DataSource audioDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
audioTrackNames = new String[audioRepresentationsList.size()];
ChunkSource[] audioChunkSources = new ChunkSource[audioRepresentationsList.size()];
FormatEvaluator audioEvaluator = new FormatEvaluator.FixedEvaluator();
for (int i = 0; i < audioRepresentationsList.size(); i++) {
Representation representation = audioRepresentationsList.get(i);
Format format = representation.format;
audioTrackNames[i] = format.id + " (" + format.numChannels + "ch, " +
format.audioSamplingRate + "Hz)";
audioChunkSources[i] = new DashMp4ChunkSource(audioDataSource,
audioEvaluator, representation);
}
audioChunkSource = new MultiTrackChunkSource(audioChunkSources);
SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
mainHandler, player);
}
// Build the debug renderer.
TrackRenderer debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, videoRenderer, videoSampleSource) : null;
// Invoke the callback.
String[][] trackNames = new String[DemoPlayer.RENDERER_COUNT][];
trackNames[DemoPlayer.TYPE_AUDIO] = audioTrackNames;
MultiTrackChunkSource[] multiTrackChunkSources =
new MultiTrackChunkSource[DemoPlayer.RENDERER_COUNT];
multiTrackChunkSources[DemoPlayer.TYPE_AUDIO] = audioChunkSource;
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer;
callback.onRenderers(trackNames, multiTrackChunkSources, renderers);
} | NONSATD | true | videoRepresentationsList.add(representation);
} else {
// The device isn't capable of playing this stream.
}
} | int adaptationSetType = adaptationSet.type;
for (int j = 0; j < adaptationSet.representations.size(); j++) {
Representation representation = adaptationSet.representations.get(j);
if (adaptationSetType == AdaptationSet.TYPE_AUDIO) {
audioRepresentationsList.add(representation);
} else if (adaptationSetType == AdaptationSet.TYPE_VIDEO) {
Format format = representation.format;
if (format.width * format.height <= maxDecodableFrameSize) {
videoRepresentationsList.add(representation);
} else {
// The device isn't capable of playing this stream.
}
}
}
}
Representation[] videoRepresentations = new Representation[videoRepresentationsList.size()];
videoRepresentationsList.toArray(videoRepresentations);
// Check drm support if necessary.
DrmSessionManager drmSessionManager = null;
if (hasContentProtection) {
if (Util.SDK_INT < 18) { | DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);
// Obtain Representations for playback.
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
ArrayList<Representation> audioRepresentationsList = new ArrayList<Representation>();
ArrayList<Representation> videoRepresentationsList = new ArrayList<Representation>();
Period period = manifest.periods.get(0);
boolean hasContentProtection = false;
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
hasContentProtection |= adaptationSet.hasContentProtection();
int adaptationSetType = adaptationSet.type;
for (int j = 0; j < adaptationSet.representations.size(); j++) {
Representation representation = adaptationSet.representations.get(j);
if (adaptationSetType == AdaptationSet.TYPE_AUDIO) {
audioRepresentationsList.add(representation);
} else if (adaptationSetType == AdaptationSet.TYPE_VIDEO) {
Format format = representation.format;
if (format.width * format.height <= maxDecodableFrameSize) {
videoRepresentationsList.add(representation);
} else {
// The device isn't capable of playing this stream.
}
}
}
}
Representation[] videoRepresentations = new Representation[videoRepresentationsList.size()];
videoRepresentationsList.toArray(videoRepresentations);
// Check drm support if necessary.
DrmSessionManager drmSessionManager = null;
if (hasContentProtection) {
if (Util.SDK_INT < 18) {
callback.onRenderersError(new UnsupportedOperationException(
"Protected content not supported on API level " + Util.SDK_INT));
return;
}
try {
Pair<DrmSessionManager, Boolean> drmSessionManagerData =
V18Compat.getDrmSessionManagerData(player, drmCallback);
drmSessionManager = drmSessionManagerData.first;
if (!drmSessionManagerData.second) {
// HD streams require L1 security. |
17,582 | 2 | // Check drm support if necessary. | @Override
public void onManifest(String contentId, MediaPresentationDescription manifest) {
Handler mainHandler = player.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new BufferPool(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);
// Obtain Representations for playback.
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
ArrayList<Representation> audioRepresentationsList = new ArrayList<Representation>();
ArrayList<Representation> videoRepresentationsList = new ArrayList<Representation>();
Period period = manifest.periods.get(0);
boolean hasContentProtection = false;
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
hasContentProtection |= adaptationSet.hasContentProtection();
int adaptationSetType = adaptationSet.type;
for (int j = 0; j < adaptationSet.representations.size(); j++) {
Representation representation = adaptationSet.representations.get(j);
if (adaptationSetType == AdaptationSet.TYPE_AUDIO) {
audioRepresentationsList.add(representation);
} else if (adaptationSetType == AdaptationSet.TYPE_VIDEO) {
Format format = representation.format;
if (format.width * format.height <= maxDecodableFrameSize) {
videoRepresentationsList.add(representation);
} else {
// The device isn't capable of playing this stream.
}
}
}
}
Representation[] videoRepresentations = new Representation[videoRepresentationsList.size()];
videoRepresentationsList.toArray(videoRepresentations);
// Check drm support if necessary.
DrmSessionManager drmSessionManager = null;
if (hasContentProtection) {
if (Util.SDK_INT < 18) {
callback.onRenderersError(new UnsupportedOperationException(
"Protected content not supported on API level " + Util.SDK_INT));
return;
}
try {
Pair<DrmSessionManager, Boolean> drmSessionManagerData =
V18Compat.getDrmSessionManagerData(player, drmCallback);
drmSessionManager = drmSessionManagerData.first;
if (!drmSessionManagerData.second) {
// HD streams require L1 security.
videoRepresentations = getSdRepresentations(videoRepresentations);
}
} catch (Exception e) {
callback.onRenderersError(e);
return;
}
}
// Build the video renderer.
DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource videoChunkSource;
String mimeType = videoRepresentations[0].format.mimeType;
if (mimeType.equals(MimeTypes.VIDEO_MP4)) {
videoChunkSource = new DashMp4ChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else if (mimeType.equals(MimeTypes.VIDEO_WEBM)) {
// TODO: Figure out how to query supported vpX resolutions. For now, restrict to standard
// definition streams.
videoRepresentations = getSdRepresentations(videoRepresentations);
videoChunkSource = new DashWebmChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else {
throw new IllegalStateException("Unexpected mime type: " + mimeType);
}
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
mainHandler, player, 50);
// Build the audio renderer.
final String[] audioTrackNames;
final MultiTrackChunkSource audioChunkSource;
final MediaCodecAudioTrackRenderer audioRenderer;
if (audioRepresentationsList.isEmpty()) {
audioTrackNames = null;
audioChunkSource = null;
audioRenderer = null;
} else {
DataSource audioDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
audioTrackNames = new String[audioRepresentationsList.size()];
ChunkSource[] audioChunkSources = new ChunkSource[audioRepresentationsList.size()];
FormatEvaluator audioEvaluator = new FormatEvaluator.FixedEvaluator();
for (int i = 0; i < audioRepresentationsList.size(); i++) {
Representation representation = audioRepresentationsList.get(i);
Format format = representation.format;
audioTrackNames[i] = format.id + " (" + format.numChannels + "ch, " +
format.audioSamplingRate + "Hz)";
audioChunkSources[i] = new DashMp4ChunkSource(audioDataSource,
audioEvaluator, representation);
}
audioChunkSource = new MultiTrackChunkSource(audioChunkSources);
SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
mainHandler, player);
}
// Build the debug renderer.
TrackRenderer debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, videoRenderer, videoSampleSource) : null;
// Invoke the callback.
String[][] trackNames = new String[DemoPlayer.RENDERER_COUNT][];
trackNames[DemoPlayer.TYPE_AUDIO] = audioTrackNames;
MultiTrackChunkSource[] multiTrackChunkSources =
new MultiTrackChunkSource[DemoPlayer.RENDERER_COUNT];
multiTrackChunkSources[DemoPlayer.TYPE_AUDIO] = audioChunkSource;
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer;
callback.onRenderers(trackNames, multiTrackChunkSources, renderers);
} | NONSATD | true | Representation[] videoRepresentations = new Representation[videoRepresentationsList.size()];
videoRepresentationsList.toArray(videoRepresentations);
// Check drm support if necessary.
DrmSessionManager drmSessionManager = null;
if (hasContentProtection) { | if (format.width * format.height <= maxDecodableFrameSize) {
videoRepresentationsList.add(representation);
} else {
// The device isn't capable of playing this stream.
}
}
}
}
Representation[] videoRepresentations = new Representation[videoRepresentationsList.size()];
videoRepresentationsList.toArray(videoRepresentations);
// Check drm support if necessary.
DrmSessionManager drmSessionManager = null;
if (hasContentProtection) {
if (Util.SDK_INT < 18) {
callback.onRenderersError(new UnsupportedOperationException(
"Protected content not supported on API level " + Util.SDK_INT));
return;
}
try {
Pair<DrmSessionManager, Boolean> drmSessionManagerData =
V18Compat.getDrmSessionManagerData(player, drmCallback); | for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
hasContentProtection |= adaptationSet.hasContentProtection();
int adaptationSetType = adaptationSet.type;
for (int j = 0; j < adaptationSet.representations.size(); j++) {
Representation representation = adaptationSet.representations.get(j);
if (adaptationSetType == AdaptationSet.TYPE_AUDIO) {
audioRepresentationsList.add(representation);
} else if (adaptationSetType == AdaptationSet.TYPE_VIDEO) {
Format format = representation.format;
if (format.width * format.height <= maxDecodableFrameSize) {
videoRepresentationsList.add(representation);
} else {
// The device isn't capable of playing this stream.
}
}
}
}
Representation[] videoRepresentations = new Representation[videoRepresentationsList.size()];
videoRepresentationsList.toArray(videoRepresentations);
// Check drm support if necessary.
DrmSessionManager drmSessionManager = null;
if (hasContentProtection) {
if (Util.SDK_INT < 18) {
callback.onRenderersError(new UnsupportedOperationException(
"Protected content not supported on API level " + Util.SDK_INT));
return;
}
try {
Pair<DrmSessionManager, Boolean> drmSessionManagerData =
V18Compat.getDrmSessionManagerData(player, drmCallback);
drmSessionManager = drmSessionManagerData.first;
if (!drmSessionManagerData.second) {
// HD streams require L1 security.
videoRepresentations = getSdRepresentations(videoRepresentations);
}
} catch (Exception e) {
callback.onRenderersError(e);
return;
}
} |
17,582 | 3 | // HD streams require L1 security. | @Override
public void onManifest(String contentId, MediaPresentationDescription manifest) {
Handler mainHandler = player.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new BufferPool(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);
// Obtain Representations for playback.
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
ArrayList<Representation> audioRepresentationsList = new ArrayList<Representation>();
ArrayList<Representation> videoRepresentationsList = new ArrayList<Representation>();
Period period = manifest.periods.get(0);
boolean hasContentProtection = false;
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
hasContentProtection |= adaptationSet.hasContentProtection();
int adaptationSetType = adaptationSet.type;
for (int j = 0; j < adaptationSet.representations.size(); j++) {
Representation representation = adaptationSet.representations.get(j);
if (adaptationSetType == AdaptationSet.TYPE_AUDIO) {
audioRepresentationsList.add(representation);
} else if (adaptationSetType == AdaptationSet.TYPE_VIDEO) {
Format format = representation.format;
if (format.width * format.height <= maxDecodableFrameSize) {
videoRepresentationsList.add(representation);
} else {
// The device isn't capable of playing this stream.
}
}
}
}
Representation[] videoRepresentations = new Representation[videoRepresentationsList.size()];
videoRepresentationsList.toArray(videoRepresentations);
// Check drm support if necessary.
DrmSessionManager drmSessionManager = null;
if (hasContentProtection) {
if (Util.SDK_INT < 18) {
callback.onRenderersError(new UnsupportedOperationException(
"Protected content not supported on API level " + Util.SDK_INT));
return;
}
try {
Pair<DrmSessionManager, Boolean> drmSessionManagerData =
V18Compat.getDrmSessionManagerData(player, drmCallback);
drmSessionManager = drmSessionManagerData.first;
if (!drmSessionManagerData.second) {
// HD streams require L1 security.
videoRepresentations = getSdRepresentations(videoRepresentations);
}
} catch (Exception e) {
callback.onRenderersError(e);
return;
}
}
// Build the video renderer.
DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource videoChunkSource;
String mimeType = videoRepresentations[0].format.mimeType;
if (mimeType.equals(MimeTypes.VIDEO_MP4)) {
videoChunkSource = new DashMp4ChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else if (mimeType.equals(MimeTypes.VIDEO_WEBM)) {
// TODO: Figure out how to query supported vpX resolutions. For now, restrict to standard
// definition streams.
videoRepresentations = getSdRepresentations(videoRepresentations);
videoChunkSource = new DashWebmChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else {
throw new IllegalStateException("Unexpected mime type: " + mimeType);
}
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
mainHandler, player, 50);
// Build the audio renderer.
final String[] audioTrackNames;
final MultiTrackChunkSource audioChunkSource;
final MediaCodecAudioTrackRenderer audioRenderer;
if (audioRepresentationsList.isEmpty()) {
audioTrackNames = null;
audioChunkSource = null;
audioRenderer = null;
} else {
DataSource audioDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
audioTrackNames = new String[audioRepresentationsList.size()];
ChunkSource[] audioChunkSources = new ChunkSource[audioRepresentationsList.size()];
FormatEvaluator audioEvaluator = new FormatEvaluator.FixedEvaluator();
for (int i = 0; i < audioRepresentationsList.size(); i++) {
Representation representation = audioRepresentationsList.get(i);
Format format = representation.format;
audioTrackNames[i] = format.id + " (" + format.numChannels + "ch, " +
format.audioSamplingRate + "Hz)";
audioChunkSources[i] = new DashMp4ChunkSource(audioDataSource,
audioEvaluator, representation);
}
audioChunkSource = new MultiTrackChunkSource(audioChunkSources);
SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
mainHandler, player);
}
// Build the debug renderer.
TrackRenderer debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, videoRenderer, videoSampleSource) : null;
// Invoke the callback.
String[][] trackNames = new String[DemoPlayer.RENDERER_COUNT][];
trackNames[DemoPlayer.TYPE_AUDIO] = audioTrackNames;
MultiTrackChunkSource[] multiTrackChunkSources =
new MultiTrackChunkSource[DemoPlayer.RENDERER_COUNT];
multiTrackChunkSources[DemoPlayer.TYPE_AUDIO] = audioChunkSource;
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer;
callback.onRenderers(trackNames, multiTrackChunkSources, renderers);
} | NONSATD | true | drmSessionManager = drmSessionManagerData.first;
if (!drmSessionManagerData.second) {
// HD streams require L1 security.
videoRepresentations = getSdRepresentations(videoRepresentations);
} | if (Util.SDK_INT < 18) {
callback.onRenderersError(new UnsupportedOperationException(
"Protected content not supported on API level " + Util.SDK_INT));
return;
}
try {
Pair<DrmSessionManager, Boolean> drmSessionManagerData =
V18Compat.getDrmSessionManagerData(player, drmCallback);
drmSessionManager = drmSessionManagerData.first;
if (!drmSessionManagerData.second) {
// HD streams require L1 security.
videoRepresentations = getSdRepresentations(videoRepresentations);
}
} catch (Exception e) {
callback.onRenderersError(e);
return;
}
}
// Build the video renderer.
DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource videoChunkSource; | // The device isn't capable of playing this stream.
}
}
}
}
Representation[] videoRepresentations = new Representation[videoRepresentationsList.size()];
videoRepresentationsList.toArray(videoRepresentations);
// Check drm support if necessary.
DrmSessionManager drmSessionManager = null;
if (hasContentProtection) {
if (Util.SDK_INT < 18) {
callback.onRenderersError(new UnsupportedOperationException(
"Protected content not supported on API level " + Util.SDK_INT));
return;
}
try {
Pair<DrmSessionManager, Boolean> drmSessionManagerData =
V18Compat.getDrmSessionManagerData(player, drmCallback);
drmSessionManager = drmSessionManagerData.first;
if (!drmSessionManagerData.second) {
// HD streams require L1 security.
videoRepresentations = getSdRepresentations(videoRepresentations);
}
} catch (Exception e) {
callback.onRenderersError(e);
return;
}
}
// Build the video renderer.
DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource videoChunkSource;
String mimeType = videoRepresentations[0].format.mimeType;
if (mimeType.equals(MimeTypes.VIDEO_MP4)) {
videoChunkSource = new DashMp4ChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else if (mimeType.equals(MimeTypes.VIDEO_WEBM)) {
// TODO: Figure out how to query supported vpX resolutions. For now, restrict to standard
// definition streams.
videoRepresentations = getSdRepresentations(videoRepresentations);
videoChunkSource = new DashWebmChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations); |
17,582 | 4 | // Build the video renderer. | @Override
public void onManifest(String contentId, MediaPresentationDescription manifest) {
Handler mainHandler = player.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new BufferPool(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);
// Obtain Representations for playback.
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
ArrayList<Representation> audioRepresentationsList = new ArrayList<Representation>();
ArrayList<Representation> videoRepresentationsList = new ArrayList<Representation>();
Period period = manifest.periods.get(0);
boolean hasContentProtection = false;
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
hasContentProtection |= adaptationSet.hasContentProtection();
int adaptationSetType = adaptationSet.type;
for (int j = 0; j < adaptationSet.representations.size(); j++) {
Representation representation = adaptationSet.representations.get(j);
if (adaptationSetType == AdaptationSet.TYPE_AUDIO) {
audioRepresentationsList.add(representation);
} else if (adaptationSetType == AdaptationSet.TYPE_VIDEO) {
Format format = representation.format;
if (format.width * format.height <= maxDecodableFrameSize) {
videoRepresentationsList.add(representation);
} else {
// The device isn't capable of playing this stream.
}
}
}
}
Representation[] videoRepresentations = new Representation[videoRepresentationsList.size()];
videoRepresentationsList.toArray(videoRepresentations);
// Check drm support if necessary.
DrmSessionManager drmSessionManager = null;
if (hasContentProtection) {
if (Util.SDK_INT < 18) {
callback.onRenderersError(new UnsupportedOperationException(
"Protected content not supported on API level " + Util.SDK_INT));
return;
}
try {
Pair<DrmSessionManager, Boolean> drmSessionManagerData =
V18Compat.getDrmSessionManagerData(player, drmCallback);
drmSessionManager = drmSessionManagerData.first;
if (!drmSessionManagerData.second) {
// HD streams require L1 security.
videoRepresentations = getSdRepresentations(videoRepresentations);
}
} catch (Exception e) {
callback.onRenderersError(e);
return;
}
}
// Build the video renderer.
DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource videoChunkSource;
String mimeType = videoRepresentations[0].format.mimeType;
if (mimeType.equals(MimeTypes.VIDEO_MP4)) {
videoChunkSource = new DashMp4ChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else if (mimeType.equals(MimeTypes.VIDEO_WEBM)) {
// TODO: Figure out how to query supported vpX resolutions. For now, restrict to standard
// definition streams.
videoRepresentations = getSdRepresentations(videoRepresentations);
videoChunkSource = new DashWebmChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else {
throw new IllegalStateException("Unexpected mime type: " + mimeType);
}
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
mainHandler, player, 50);
// Build the audio renderer.
final String[] audioTrackNames;
final MultiTrackChunkSource audioChunkSource;
final MediaCodecAudioTrackRenderer audioRenderer;
if (audioRepresentationsList.isEmpty()) {
audioTrackNames = null;
audioChunkSource = null;
audioRenderer = null;
} else {
DataSource audioDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
audioTrackNames = new String[audioRepresentationsList.size()];
ChunkSource[] audioChunkSources = new ChunkSource[audioRepresentationsList.size()];
FormatEvaluator audioEvaluator = new FormatEvaluator.FixedEvaluator();
for (int i = 0; i < audioRepresentationsList.size(); i++) {
Representation representation = audioRepresentationsList.get(i);
Format format = representation.format;
audioTrackNames[i] = format.id + " (" + format.numChannels + "ch, " +
format.audioSamplingRate + "Hz)";
audioChunkSources[i] = new DashMp4ChunkSource(audioDataSource,
audioEvaluator, representation);
}
audioChunkSource = new MultiTrackChunkSource(audioChunkSources);
SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
mainHandler, player);
}
// Build the debug renderer.
TrackRenderer debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, videoRenderer, videoSampleSource) : null;
// Invoke the callback.
String[][] trackNames = new String[DemoPlayer.RENDERER_COUNT][];
trackNames[DemoPlayer.TYPE_AUDIO] = audioTrackNames;
MultiTrackChunkSource[] multiTrackChunkSources =
new MultiTrackChunkSource[DemoPlayer.RENDERER_COUNT];
multiTrackChunkSources[DemoPlayer.TYPE_AUDIO] = audioChunkSource;
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer;
callback.onRenderers(trackNames, multiTrackChunkSources, renderers);
} | NONSATD | true | }
}
// Build the video renderer.
DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource videoChunkSource; | drmSessionManager = drmSessionManagerData.first;
if (!drmSessionManagerData.second) {
// HD streams require L1 security.
videoRepresentations = getSdRepresentations(videoRepresentations);
}
} catch (Exception e) {
callback.onRenderersError(e);
return;
}
}
// Build the video renderer.
DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource videoChunkSource;
String mimeType = videoRepresentations[0].format.mimeType;
if (mimeType.equals(MimeTypes.VIDEO_MP4)) {
videoChunkSource = new DashMp4ChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else if (mimeType.equals(MimeTypes.VIDEO_WEBM)) {
// TODO: Figure out how to query supported vpX resolutions. For now, restrict to standard
// definition streams.
videoRepresentations = getSdRepresentations(videoRepresentations); | DrmSessionManager drmSessionManager = null;
if (hasContentProtection) {
if (Util.SDK_INT < 18) {
callback.onRenderersError(new UnsupportedOperationException(
"Protected content not supported on API level " + Util.SDK_INT));
return;
}
try {
Pair<DrmSessionManager, Boolean> drmSessionManagerData =
V18Compat.getDrmSessionManagerData(player, drmCallback);
drmSessionManager = drmSessionManagerData.first;
if (!drmSessionManagerData.second) {
// HD streams require L1 security.
videoRepresentations = getSdRepresentations(videoRepresentations);
}
} catch (Exception e) {
callback.onRenderersError(e);
return;
}
}
// Build the video renderer.
DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource videoChunkSource;
String mimeType = videoRepresentations[0].format.mimeType;
if (mimeType.equals(MimeTypes.VIDEO_MP4)) {
videoChunkSource = new DashMp4ChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else if (mimeType.equals(MimeTypes.VIDEO_WEBM)) {
// TODO: Figure out how to query supported vpX resolutions. For now, restrict to standard
// definition streams.
videoRepresentations = getSdRepresentations(videoRepresentations);
videoChunkSource = new DashWebmChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else {
throw new IllegalStateException("Unexpected mime type: " + mimeType);
}
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, |
17,582 | 5 | // TODO: Figure out how to query supported vpX resolutions. For now, restrict to standard
// definition streams. | @Override
public void onManifest(String contentId, MediaPresentationDescription manifest) {
Handler mainHandler = player.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new BufferPool(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);
// Obtain Representations for playback.
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
ArrayList<Representation> audioRepresentationsList = new ArrayList<Representation>();
ArrayList<Representation> videoRepresentationsList = new ArrayList<Representation>();
Period period = manifest.periods.get(0);
boolean hasContentProtection = false;
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
hasContentProtection |= adaptationSet.hasContentProtection();
int adaptationSetType = adaptationSet.type;
for (int j = 0; j < adaptationSet.representations.size(); j++) {
Representation representation = adaptationSet.representations.get(j);
if (adaptationSetType == AdaptationSet.TYPE_AUDIO) {
audioRepresentationsList.add(representation);
} else if (adaptationSetType == AdaptationSet.TYPE_VIDEO) {
Format format = representation.format;
if (format.width * format.height <= maxDecodableFrameSize) {
videoRepresentationsList.add(representation);
} else {
// The device isn't capable of playing this stream.
}
}
}
}
Representation[] videoRepresentations = new Representation[videoRepresentationsList.size()];
videoRepresentationsList.toArray(videoRepresentations);
// Check drm support if necessary.
DrmSessionManager drmSessionManager = null;
if (hasContentProtection) {
if (Util.SDK_INT < 18) {
callback.onRenderersError(new UnsupportedOperationException(
"Protected content not supported on API level " + Util.SDK_INT));
return;
}
try {
Pair<DrmSessionManager, Boolean> drmSessionManagerData =
V18Compat.getDrmSessionManagerData(player, drmCallback);
drmSessionManager = drmSessionManagerData.first;
if (!drmSessionManagerData.second) {
// HD streams require L1 security.
videoRepresentations = getSdRepresentations(videoRepresentations);
}
} catch (Exception e) {
callback.onRenderersError(e);
return;
}
}
// Build the video renderer.
DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource videoChunkSource;
String mimeType = videoRepresentations[0].format.mimeType;
if (mimeType.equals(MimeTypes.VIDEO_MP4)) {
videoChunkSource = new DashMp4ChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else if (mimeType.equals(MimeTypes.VIDEO_WEBM)) {
// TODO: Figure out how to query supported vpX resolutions. For now, restrict to standard
// definition streams.
videoRepresentations = getSdRepresentations(videoRepresentations);
videoChunkSource = new DashWebmChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else {
throw new IllegalStateException("Unexpected mime type: " + mimeType);
}
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
mainHandler, player, 50);
// Build the audio renderer.
final String[] audioTrackNames;
final MultiTrackChunkSource audioChunkSource;
final MediaCodecAudioTrackRenderer audioRenderer;
if (audioRepresentationsList.isEmpty()) {
audioTrackNames = null;
audioChunkSource = null;
audioRenderer = null;
} else {
DataSource audioDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
audioTrackNames = new String[audioRepresentationsList.size()];
ChunkSource[] audioChunkSources = new ChunkSource[audioRepresentationsList.size()];
FormatEvaluator audioEvaluator = new FormatEvaluator.FixedEvaluator();
for (int i = 0; i < audioRepresentationsList.size(); i++) {
Representation representation = audioRepresentationsList.get(i);
Format format = representation.format;
audioTrackNames[i] = format.id + " (" + format.numChannels + "ch, " +
format.audioSamplingRate + "Hz)";
audioChunkSources[i] = new DashMp4ChunkSource(audioDataSource,
audioEvaluator, representation);
}
audioChunkSource = new MultiTrackChunkSource(audioChunkSources);
SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
mainHandler, player);
}
// Build the debug renderer.
TrackRenderer debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, videoRenderer, videoSampleSource) : null;
// Invoke the callback.
String[][] trackNames = new String[DemoPlayer.RENDERER_COUNT][];
trackNames[DemoPlayer.TYPE_AUDIO] = audioTrackNames;
MultiTrackChunkSource[] multiTrackChunkSources =
new MultiTrackChunkSource[DemoPlayer.RENDERER_COUNT];
multiTrackChunkSources[DemoPlayer.TYPE_AUDIO] = audioChunkSource;
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer;
callback.onRenderers(trackNames, multiTrackChunkSources, renderers);
} | DESIGN | true | new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else if (mimeType.equals(MimeTypes.VIDEO_WEBM)) {
// TODO: Figure out how to query supported vpX resolutions. For now, restrict to standard
// definition streams.
videoRepresentations = getSdRepresentations(videoRepresentations);
videoChunkSource = new DashWebmChunkSource(videoDataSource, | }
}
// Build the video renderer.
DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource videoChunkSource;
String mimeType = videoRepresentations[0].format.mimeType;
if (mimeType.equals(MimeTypes.VIDEO_MP4)) {
videoChunkSource = new DashMp4ChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else if (mimeType.equals(MimeTypes.VIDEO_WEBM)) {
// TODO: Figure out how to query supported vpX resolutions. For now, restrict to standard
// definition streams.
videoRepresentations = getSdRepresentations(videoRepresentations);
videoChunkSource = new DashWebmChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else {
throw new IllegalStateException("Unexpected mime type: " + mimeType);
}
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, | Pair<DrmSessionManager, Boolean> drmSessionManagerData =
V18Compat.getDrmSessionManagerData(player, drmCallback);
drmSessionManager = drmSessionManagerData.first;
if (!drmSessionManagerData.second) {
// HD streams require L1 security.
videoRepresentations = getSdRepresentations(videoRepresentations);
}
} catch (Exception e) {
callback.onRenderersError(e);
return;
}
}
// Build the video renderer.
DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource videoChunkSource;
String mimeType = videoRepresentations[0].format.mimeType;
if (mimeType.equals(MimeTypes.VIDEO_MP4)) {
videoChunkSource = new DashMp4ChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else if (mimeType.equals(MimeTypes.VIDEO_WEBM)) {
// TODO: Figure out how to query supported vpX resolutions. For now, restrict to standard
// definition streams.
videoRepresentations = getSdRepresentations(videoRepresentations);
videoChunkSource = new DashWebmChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else {
throw new IllegalStateException("Unexpected mime type: " + mimeType);
}
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
mainHandler, player, 50);
// Build the audio renderer.
final String[] audioTrackNames;
final MultiTrackChunkSource audioChunkSource;
final MediaCodecAudioTrackRenderer audioRenderer;
if (audioRepresentationsList.isEmpty()) {
audioTrackNames = null;
audioChunkSource = null;
audioRenderer = null; |
17,582 | 6 | // Build the audio renderer. | @Override
public void onManifest(String contentId, MediaPresentationDescription manifest) {
Handler mainHandler = player.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new BufferPool(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);
// Obtain Representations for playback.
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
ArrayList<Representation> audioRepresentationsList = new ArrayList<Representation>();
ArrayList<Representation> videoRepresentationsList = new ArrayList<Representation>();
Period period = manifest.periods.get(0);
boolean hasContentProtection = false;
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
hasContentProtection |= adaptationSet.hasContentProtection();
int adaptationSetType = adaptationSet.type;
for (int j = 0; j < adaptationSet.representations.size(); j++) {
Representation representation = adaptationSet.representations.get(j);
if (adaptationSetType == AdaptationSet.TYPE_AUDIO) {
audioRepresentationsList.add(representation);
} else if (adaptationSetType == AdaptationSet.TYPE_VIDEO) {
Format format = representation.format;
if (format.width * format.height <= maxDecodableFrameSize) {
videoRepresentationsList.add(representation);
} else {
// The device isn't capable of playing this stream.
}
}
}
}
Representation[] videoRepresentations = new Representation[videoRepresentationsList.size()];
videoRepresentationsList.toArray(videoRepresentations);
// Check drm support if necessary.
DrmSessionManager drmSessionManager = null;
if (hasContentProtection) {
if (Util.SDK_INT < 18) {
callback.onRenderersError(new UnsupportedOperationException(
"Protected content not supported on API level " + Util.SDK_INT));
return;
}
try {
Pair<DrmSessionManager, Boolean> drmSessionManagerData =
V18Compat.getDrmSessionManagerData(player, drmCallback);
drmSessionManager = drmSessionManagerData.first;
if (!drmSessionManagerData.second) {
// HD streams require L1 security.
videoRepresentations = getSdRepresentations(videoRepresentations);
}
} catch (Exception e) {
callback.onRenderersError(e);
return;
}
}
// Build the video renderer.
DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource videoChunkSource;
String mimeType = videoRepresentations[0].format.mimeType;
if (mimeType.equals(MimeTypes.VIDEO_MP4)) {
videoChunkSource = new DashMp4ChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else if (mimeType.equals(MimeTypes.VIDEO_WEBM)) {
// TODO: Figure out how to query supported vpX resolutions. For now, restrict to standard
// definition streams.
videoRepresentations = getSdRepresentations(videoRepresentations);
videoChunkSource = new DashWebmChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else {
throw new IllegalStateException("Unexpected mime type: " + mimeType);
}
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
mainHandler, player, 50);
// Build the audio renderer.
final String[] audioTrackNames;
final MultiTrackChunkSource audioChunkSource;
final MediaCodecAudioTrackRenderer audioRenderer;
if (audioRepresentationsList.isEmpty()) {
audioTrackNames = null;
audioChunkSource = null;
audioRenderer = null;
} else {
DataSource audioDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
audioTrackNames = new String[audioRepresentationsList.size()];
ChunkSource[] audioChunkSources = new ChunkSource[audioRepresentationsList.size()];
FormatEvaluator audioEvaluator = new FormatEvaluator.FixedEvaluator();
for (int i = 0; i < audioRepresentationsList.size(); i++) {
Representation representation = audioRepresentationsList.get(i);
Format format = representation.format;
audioTrackNames[i] = format.id + " (" + format.numChannels + "ch, " +
format.audioSamplingRate + "Hz)";
audioChunkSources[i] = new DashMp4ChunkSource(audioDataSource,
audioEvaluator, representation);
}
audioChunkSource = new MultiTrackChunkSource(audioChunkSources);
SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
mainHandler, player);
}
// Build the debug renderer.
TrackRenderer debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, videoRenderer, videoSampleSource) : null;
// Invoke the callback.
String[][] trackNames = new String[DemoPlayer.RENDERER_COUNT][];
trackNames[DemoPlayer.TYPE_AUDIO] = audioTrackNames;
MultiTrackChunkSource[] multiTrackChunkSources =
new MultiTrackChunkSource[DemoPlayer.RENDERER_COUNT];
multiTrackChunkSources[DemoPlayer.TYPE_AUDIO] = audioChunkSource;
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer;
callback.onRenderers(trackNames, multiTrackChunkSources, renderers);
} | NONSATD | true | drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
mainHandler, player, 50);
// Build the audio renderer.
final String[] audioTrackNames;
final MultiTrackChunkSource audioChunkSource; | new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else {
throw new IllegalStateException("Unexpected mime type: " + mimeType);
}
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
mainHandler, player, 50);
// Build the audio renderer.
final String[] audioTrackNames;
final MultiTrackChunkSource audioChunkSource;
final MediaCodecAudioTrackRenderer audioRenderer;
if (audioRepresentationsList.isEmpty()) {
audioTrackNames = null;
audioChunkSource = null;
audioRenderer = null;
} else {
DataSource audioDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
audioTrackNames = new String[audioRepresentationsList.size()]; | ChunkSource videoChunkSource;
String mimeType = videoRepresentations[0].format.mimeType;
if (mimeType.equals(MimeTypes.VIDEO_MP4)) {
videoChunkSource = new DashMp4ChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else if (mimeType.equals(MimeTypes.VIDEO_WEBM)) {
// TODO: Figure out how to query supported vpX resolutions. For now, restrict to standard
// definition streams.
videoRepresentations = getSdRepresentations(videoRepresentations);
videoChunkSource = new DashWebmChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else {
throw new IllegalStateException("Unexpected mime type: " + mimeType);
}
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
mainHandler, player, 50);
// Build the audio renderer.
final String[] audioTrackNames;
final MultiTrackChunkSource audioChunkSource;
final MediaCodecAudioTrackRenderer audioRenderer;
if (audioRepresentationsList.isEmpty()) {
audioTrackNames = null;
audioChunkSource = null;
audioRenderer = null;
} else {
DataSource audioDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
audioTrackNames = new String[audioRepresentationsList.size()];
ChunkSource[] audioChunkSources = new ChunkSource[audioRepresentationsList.size()];
FormatEvaluator audioEvaluator = new FormatEvaluator.FixedEvaluator();
for (int i = 0; i < audioRepresentationsList.size(); i++) {
Representation representation = audioRepresentationsList.get(i);
Format format = representation.format;
audioTrackNames[i] = format.id + " (" + format.numChannels + "ch, " +
format.audioSamplingRate + "Hz)";
audioChunkSources[i] = new DashMp4ChunkSource(audioDataSource,
audioEvaluator, representation);
} |
17,582 | 7 | // Build the debug renderer. | @Override
public void onManifest(String contentId, MediaPresentationDescription manifest) {
Handler mainHandler = player.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new BufferPool(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);
// Obtain Representations for playback.
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
ArrayList<Representation> audioRepresentationsList = new ArrayList<Representation>();
ArrayList<Representation> videoRepresentationsList = new ArrayList<Representation>();
Period period = manifest.periods.get(0);
boolean hasContentProtection = false;
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
hasContentProtection |= adaptationSet.hasContentProtection();
int adaptationSetType = adaptationSet.type;
for (int j = 0; j < adaptationSet.representations.size(); j++) {
Representation representation = adaptationSet.representations.get(j);
if (adaptationSetType == AdaptationSet.TYPE_AUDIO) {
audioRepresentationsList.add(representation);
} else if (adaptationSetType == AdaptationSet.TYPE_VIDEO) {
Format format = representation.format;
if (format.width * format.height <= maxDecodableFrameSize) {
videoRepresentationsList.add(representation);
} else {
// The device isn't capable of playing this stream.
}
}
}
}
Representation[] videoRepresentations = new Representation[videoRepresentationsList.size()];
videoRepresentationsList.toArray(videoRepresentations);
// Check drm support if necessary.
DrmSessionManager drmSessionManager = null;
if (hasContentProtection) {
if (Util.SDK_INT < 18) {
callback.onRenderersError(new UnsupportedOperationException(
"Protected content not supported on API level " + Util.SDK_INT));
return;
}
try {
Pair<DrmSessionManager, Boolean> drmSessionManagerData =
V18Compat.getDrmSessionManagerData(player, drmCallback);
drmSessionManager = drmSessionManagerData.first;
if (!drmSessionManagerData.second) {
// HD streams require L1 security.
videoRepresentations = getSdRepresentations(videoRepresentations);
}
} catch (Exception e) {
callback.onRenderersError(e);
return;
}
}
// Build the video renderer.
DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource videoChunkSource;
String mimeType = videoRepresentations[0].format.mimeType;
if (mimeType.equals(MimeTypes.VIDEO_MP4)) {
videoChunkSource = new DashMp4ChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else if (mimeType.equals(MimeTypes.VIDEO_WEBM)) {
// TODO: Figure out how to query supported vpX resolutions. For now, restrict to standard
// definition streams.
videoRepresentations = getSdRepresentations(videoRepresentations);
videoChunkSource = new DashWebmChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else {
throw new IllegalStateException("Unexpected mime type: " + mimeType);
}
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
mainHandler, player, 50);
// Build the audio renderer.
final String[] audioTrackNames;
final MultiTrackChunkSource audioChunkSource;
final MediaCodecAudioTrackRenderer audioRenderer;
if (audioRepresentationsList.isEmpty()) {
audioTrackNames = null;
audioChunkSource = null;
audioRenderer = null;
} else {
DataSource audioDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
audioTrackNames = new String[audioRepresentationsList.size()];
ChunkSource[] audioChunkSources = new ChunkSource[audioRepresentationsList.size()];
FormatEvaluator audioEvaluator = new FormatEvaluator.FixedEvaluator();
for (int i = 0; i < audioRepresentationsList.size(); i++) {
Representation representation = audioRepresentationsList.get(i);
Format format = representation.format;
audioTrackNames[i] = format.id + " (" + format.numChannels + "ch, " +
format.audioSamplingRate + "Hz)";
audioChunkSources[i] = new DashMp4ChunkSource(audioDataSource,
audioEvaluator, representation);
}
audioChunkSource = new MultiTrackChunkSource(audioChunkSources);
SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
mainHandler, player);
}
// Build the debug renderer.
TrackRenderer debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, videoRenderer, videoSampleSource) : null;
// Invoke the callback.
String[][] trackNames = new String[DemoPlayer.RENDERER_COUNT][];
trackNames[DemoPlayer.TYPE_AUDIO] = audioTrackNames;
MultiTrackChunkSource[] multiTrackChunkSources =
new MultiTrackChunkSource[DemoPlayer.RENDERER_COUNT];
multiTrackChunkSources[DemoPlayer.TYPE_AUDIO] = audioChunkSource;
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer;
callback.onRenderers(trackNames, multiTrackChunkSources, renderers);
} | NONSATD | true | mainHandler, player);
}
// Build the debug renderer.
TrackRenderer debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, videoRenderer, videoSampleSource) : null; | audioChunkSources[i] = new DashMp4ChunkSource(audioDataSource,
audioEvaluator, representation);
}
audioChunkSource = new MultiTrackChunkSource(audioChunkSources);
SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
mainHandler, player);
}
// Build the debug renderer.
TrackRenderer debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, videoRenderer, videoSampleSource) : null;
// Invoke the callback.
String[][] trackNames = new String[DemoPlayer.RENDERER_COUNT][];
trackNames[DemoPlayer.TYPE_AUDIO] = audioTrackNames;
MultiTrackChunkSource[] multiTrackChunkSources =
new MultiTrackChunkSource[DemoPlayer.RENDERER_COUNT];
multiTrackChunkSources[DemoPlayer.TYPE_AUDIO] = audioChunkSource;
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; | } else {
DataSource audioDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
audioTrackNames = new String[audioRepresentationsList.size()];
ChunkSource[] audioChunkSources = new ChunkSource[audioRepresentationsList.size()];
FormatEvaluator audioEvaluator = new FormatEvaluator.FixedEvaluator();
for (int i = 0; i < audioRepresentationsList.size(); i++) {
Representation representation = audioRepresentationsList.get(i);
Format format = representation.format;
audioTrackNames[i] = format.id + " (" + format.numChannels + "ch, " +
format.audioSamplingRate + "Hz)";
audioChunkSources[i] = new DashMp4ChunkSource(audioDataSource,
audioEvaluator, representation);
}
audioChunkSource = new MultiTrackChunkSource(audioChunkSources);
SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
mainHandler, player);
}
// Build the debug renderer.
TrackRenderer debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, videoRenderer, videoSampleSource) : null;
// Invoke the callback.
String[][] trackNames = new String[DemoPlayer.RENDERER_COUNT][];
trackNames[DemoPlayer.TYPE_AUDIO] = audioTrackNames;
MultiTrackChunkSource[] multiTrackChunkSources =
new MultiTrackChunkSource[DemoPlayer.RENDERER_COUNT];
multiTrackChunkSources[DemoPlayer.TYPE_AUDIO] = audioChunkSource;
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer;
callback.onRenderers(trackNames, multiTrackChunkSources, renderers);
} |
17,582 | 8 | // Invoke the callback. | @Override
public void onManifest(String contentId, MediaPresentationDescription manifest) {
Handler mainHandler = player.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new BufferPool(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);
// Obtain Representations for playback.
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
ArrayList<Representation> audioRepresentationsList = new ArrayList<Representation>();
ArrayList<Representation> videoRepresentationsList = new ArrayList<Representation>();
Period period = manifest.periods.get(0);
boolean hasContentProtection = false;
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
hasContentProtection |= adaptationSet.hasContentProtection();
int adaptationSetType = adaptationSet.type;
for (int j = 0; j < adaptationSet.representations.size(); j++) {
Representation representation = adaptationSet.representations.get(j);
if (adaptationSetType == AdaptationSet.TYPE_AUDIO) {
audioRepresentationsList.add(representation);
} else if (adaptationSetType == AdaptationSet.TYPE_VIDEO) {
Format format = representation.format;
if (format.width * format.height <= maxDecodableFrameSize) {
videoRepresentationsList.add(representation);
} else {
// The device isn't capable of playing this stream.
}
}
}
}
Representation[] videoRepresentations = new Representation[videoRepresentationsList.size()];
videoRepresentationsList.toArray(videoRepresentations);
// Check drm support if necessary.
DrmSessionManager drmSessionManager = null;
if (hasContentProtection) {
if (Util.SDK_INT < 18) {
callback.onRenderersError(new UnsupportedOperationException(
"Protected content not supported on API level " + Util.SDK_INT));
return;
}
try {
Pair<DrmSessionManager, Boolean> drmSessionManagerData =
V18Compat.getDrmSessionManagerData(player, drmCallback);
drmSessionManager = drmSessionManagerData.first;
if (!drmSessionManagerData.second) {
// HD streams require L1 security.
videoRepresentations = getSdRepresentations(videoRepresentations);
}
} catch (Exception e) {
callback.onRenderersError(e);
return;
}
}
// Build the video renderer.
DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource videoChunkSource;
String mimeType = videoRepresentations[0].format.mimeType;
if (mimeType.equals(MimeTypes.VIDEO_MP4)) {
videoChunkSource = new DashMp4ChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else if (mimeType.equals(MimeTypes.VIDEO_WEBM)) {
// TODO: Figure out how to query supported vpX resolutions. For now, restrict to standard
// definition streams.
videoRepresentations = getSdRepresentations(videoRepresentations);
videoChunkSource = new DashWebmChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else {
throw new IllegalStateException("Unexpected mime type: " + mimeType);
}
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
mainHandler, player, 50);
// Build the audio renderer.
final String[] audioTrackNames;
final MultiTrackChunkSource audioChunkSource;
final MediaCodecAudioTrackRenderer audioRenderer;
if (audioRepresentationsList.isEmpty()) {
audioTrackNames = null;
audioChunkSource = null;
audioRenderer = null;
} else {
DataSource audioDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
audioTrackNames = new String[audioRepresentationsList.size()];
ChunkSource[] audioChunkSources = new ChunkSource[audioRepresentationsList.size()];
FormatEvaluator audioEvaluator = new FormatEvaluator.FixedEvaluator();
for (int i = 0; i < audioRepresentationsList.size(); i++) {
Representation representation = audioRepresentationsList.get(i);
Format format = representation.format;
audioTrackNames[i] = format.id + " (" + format.numChannels + "ch, " +
format.audioSamplingRate + "Hz)";
audioChunkSources[i] = new DashMp4ChunkSource(audioDataSource,
audioEvaluator, representation);
}
audioChunkSource = new MultiTrackChunkSource(audioChunkSources);
SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
mainHandler, player);
}
// Build the debug renderer.
TrackRenderer debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, videoRenderer, videoSampleSource) : null;
// Invoke the callback.
String[][] trackNames = new String[DemoPlayer.RENDERER_COUNT][];
trackNames[DemoPlayer.TYPE_AUDIO] = audioTrackNames;
MultiTrackChunkSource[] multiTrackChunkSources =
new MultiTrackChunkSource[DemoPlayer.RENDERER_COUNT];
multiTrackChunkSources[DemoPlayer.TYPE_AUDIO] = audioChunkSource;
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer;
callback.onRenderers(trackNames, multiTrackChunkSources, renderers);
} | NONSATD | true | TrackRenderer debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, videoRenderer, videoSampleSource) : null;
// Invoke the callback.
String[][] trackNames = new String[DemoPlayer.RENDERER_COUNT][];
trackNames[DemoPlayer.TYPE_AUDIO] = audioTrackNames; | audioChunkSource = new MultiTrackChunkSource(audioChunkSources);
SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
mainHandler, player);
}
// Build the debug renderer.
TrackRenderer debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, videoRenderer, videoSampleSource) : null;
// Invoke the callback.
String[][] trackNames = new String[DemoPlayer.RENDERER_COUNT][];
trackNames[DemoPlayer.TYPE_AUDIO] = audioTrackNames;
MultiTrackChunkSource[] multiTrackChunkSources =
new MultiTrackChunkSource[DemoPlayer.RENDERER_COUNT];
multiTrackChunkSources[DemoPlayer.TYPE_AUDIO] = audioChunkSource;
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer;
callback.onRenderers(trackNames, multiTrackChunkSources, renderers); | ChunkSource[] audioChunkSources = new ChunkSource[audioRepresentationsList.size()];
FormatEvaluator audioEvaluator = new FormatEvaluator.FixedEvaluator();
for (int i = 0; i < audioRepresentationsList.size(); i++) {
Representation representation = audioRepresentationsList.get(i);
Format format = representation.format;
audioTrackNames[i] = format.id + " (" + format.numChannels + "ch, " +
format.audioSamplingRate + "Hz)";
audioChunkSources[i] = new DashMp4ChunkSource(audioDataSource,
audioEvaluator, representation);
}
audioChunkSource = new MultiTrackChunkSource(audioChunkSources);
SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
mainHandler, player);
}
// Build the debug renderer.
TrackRenderer debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, videoRenderer, videoSampleSource) : null;
// Invoke the callback.
String[][] trackNames = new String[DemoPlayer.RENDERER_COUNT][];
trackNames[DemoPlayer.TYPE_AUDIO] = audioTrackNames;
MultiTrackChunkSource[] multiTrackChunkSources =
new MultiTrackChunkSource[DemoPlayer.RENDERER_COUNT];
multiTrackChunkSources[DemoPlayer.TYPE_AUDIO] = audioChunkSource;
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer;
callback.onRenderers(trackNames, multiTrackChunkSources, renderers);
} |
17,588 | 0 | /**
* Add a line to the list of loaded feeds to record the snapshot and which feed the snapshot replicates.
*/ | private void registerSnapshot () {
try {
Statement statement = connection.createStatement();
// TODO copy over feed_id and feed_version from source namespace?
// FIXME do the following only on databases that support schemas.
// SQLite does not support them. Is there any advantage of schemas over flat tables?
statement.execute("create schema " + tablePrefix);
// TODO: Record total snapshot processing time?
// Simply insert into feeds table (no need for table creation) because making a snapshot presumes that the
// feeds table already exists.
PreparedStatement insertStatement = connection.prepareStatement(
"insert into feeds values (?, null, null, null, null, null, current_timestamp, ?)");
insertStatement.setString(1, tablePrefix);
insertStatement.setString(2, feedIdToSnapshot);
insertStatement.execute();
connection.commit();
LOG.info("Created new snapshot namespace: {}", insertStatement);
} catch (Exception ex) {
LOG.error("Exception while registering snapshot namespace in feeds table: {}", ex.getMessage());
DbUtils.closeQuietly(connection);
}
} | NONSATD | true | private void registerSnapshot () {
try {
Statement statement = connection.createStatement();
// TODO copy over feed_id and feed_version from source namespace?
// FIXME do the following only on databases that support schemas.
// SQLite does not support them. Is there any advantage of schemas over flat tables?
statement.execute("create schema " + tablePrefix);
// TODO: Record total snapshot processing time?
// Simply insert into feeds table (no need for table creation) because making a snapshot presumes that the
// feeds table already exists.
PreparedStatement insertStatement = connection.prepareStatement(
"insert into feeds values (?, null, null, null, null, null, current_timestamp, ?)");
insertStatement.setString(1, tablePrefix);
insertStatement.setString(2, feedIdToSnapshot);
insertStatement.execute();
connection.commit();
LOG.info("Created new snapshot namespace: {}", insertStatement);
} catch (Exception ex) {
LOG.error("Exception while registering snapshot namespace in feeds table: {}", ex.getMessage());
DbUtils.closeQuietly(connection);
}
} | private void registerSnapshot () {
try {
Statement statement = connection.createStatement();
// TODO copy over feed_id and feed_version from source namespace?
// FIXME do the following only on databases that support schemas.
// SQLite does not support them. Is there any advantage of schemas over flat tables?
statement.execute("create schema " + tablePrefix);
// TODO: Record total snapshot processing time?
// Simply insert into feeds table (no need for table creation) because making a snapshot presumes that the
// feeds table already exists.
PreparedStatement insertStatement = connection.prepareStatement(
"insert into feeds values (?, null, null, null, null, null, current_timestamp, ?)");
insertStatement.setString(1, tablePrefix);
insertStatement.setString(2, feedIdToSnapshot);
insertStatement.execute();
connection.commit();
LOG.info("Created new snapshot namespace: {}", insertStatement);
} catch (Exception ex) {
LOG.error("Exception while registering snapshot namespace in feeds table: {}", ex.getMessage());
DbUtils.closeQuietly(connection);
}
} | private void registerSnapshot () {
try {
Statement statement = connection.createStatement();
// TODO copy over feed_id and feed_version from source namespace?
// FIXME do the following only on databases that support schemas.
// SQLite does not support them. Is there any advantage of schemas over flat tables?
statement.execute("create schema " + tablePrefix);
// TODO: Record total snapshot processing time?
// Simply insert into feeds table (no need for table creation) because making a snapshot presumes that the
// feeds table already exists.
PreparedStatement insertStatement = connection.prepareStatement(
"insert into feeds values (?, null, null, null, null, null, current_timestamp, ?)");
insertStatement.setString(1, tablePrefix);
insertStatement.setString(2, feedIdToSnapshot);
insertStatement.execute();
connection.commit();
LOG.info("Created new snapshot namespace: {}", insertStatement);
} catch (Exception ex) {
LOG.error("Exception while registering snapshot namespace in feeds table: {}", ex.getMessage());
DbUtils.closeQuietly(connection);
}
} |
17,588 | 1 | // TODO copy over feed_id and feed_version from source namespace?
// FIXME do the following only on databases that support schemas.
// SQLite does not support them. Is there any advantage of schemas over flat tables? | private void registerSnapshot () {
try {
Statement statement = connection.createStatement();
// TODO copy over feed_id and feed_version from source namespace?
// FIXME do the following only on databases that support schemas.
// SQLite does not support them. Is there any advantage of schemas over flat tables?
statement.execute("create schema " + tablePrefix);
// TODO: Record total snapshot processing time?
// Simply insert into feeds table (no need for table creation) because making a snapshot presumes that the
// feeds table already exists.
PreparedStatement insertStatement = connection.prepareStatement(
"insert into feeds values (?, null, null, null, null, null, current_timestamp, ?)");
insertStatement.setString(1, tablePrefix);
insertStatement.setString(2, feedIdToSnapshot);
insertStatement.execute();
connection.commit();
LOG.info("Created new snapshot namespace: {}", insertStatement);
} catch (Exception ex) {
LOG.error("Exception while registering snapshot namespace in feeds table: {}", ex.getMessage());
DbUtils.closeQuietly(connection);
}
} | DESIGN | true | try {
Statement statement = connection.createStatement();
// TODO copy over feed_id and feed_version from source namespace?
// FIXME do the following only on databases that support schemas.
// SQLite does not support them. Is there any advantage of schemas over flat tables?
statement.execute("create schema " + tablePrefix);
// TODO: Record total snapshot processing time? | private void registerSnapshot () {
try {
Statement statement = connection.createStatement();
// TODO copy over feed_id and feed_version from source namespace?
// FIXME do the following only on databases that support schemas.
// SQLite does not support them. Is there any advantage of schemas over flat tables?
statement.execute("create schema " + tablePrefix);
// TODO: Record total snapshot processing time?
// Simply insert into feeds table (no need for table creation) because making a snapshot presumes that the
// feeds table already exists.
PreparedStatement insertStatement = connection.prepareStatement(
"insert into feeds values (?, null, null, null, null, null, current_timestamp, ?)");
insertStatement.setString(1, tablePrefix);
insertStatement.setString(2, feedIdToSnapshot);
insertStatement.execute();
connection.commit(); | private void registerSnapshot () {
try {
Statement statement = connection.createStatement();
// TODO copy over feed_id and feed_version from source namespace?
// FIXME do the following only on databases that support schemas.
// SQLite does not support them. Is there any advantage of schemas over flat tables?
statement.execute("create schema " + tablePrefix);
// TODO: Record total snapshot processing time?
// Simply insert into feeds table (no need for table creation) because making a snapshot presumes that the
// feeds table already exists.
PreparedStatement insertStatement = connection.prepareStatement(
"insert into feeds values (?, null, null, null, null, null, current_timestamp, ?)");
insertStatement.setString(1, tablePrefix);
insertStatement.setString(2, feedIdToSnapshot);
insertStatement.execute();
connection.commit();
LOG.info("Created new snapshot namespace: {}", insertStatement);
} catch (Exception ex) {
LOG.error("Exception while registering snapshot namespace in feeds table: {}", ex.getMessage());
DbUtils.closeQuietly(connection);
}
} |
17,588 | 2 | // TODO: Record total snapshot processing time?
// Simply insert into feeds table (no need for table creation) because making a snapshot presumes that the
// feeds table already exists. | private void registerSnapshot () {
try {
Statement statement = connection.createStatement();
// TODO copy over feed_id and feed_version from source namespace?
// FIXME do the following only on databases that support schemas.
// SQLite does not support them. Is there any advantage of schemas over flat tables?
statement.execute("create schema " + tablePrefix);
// TODO: Record total snapshot processing time?
// Simply insert into feeds table (no need for table creation) because making a snapshot presumes that the
// feeds table already exists.
PreparedStatement insertStatement = connection.prepareStatement(
"insert into feeds values (?, null, null, null, null, null, current_timestamp, ?)");
insertStatement.setString(1, tablePrefix);
insertStatement.setString(2, feedIdToSnapshot);
insertStatement.execute();
connection.commit();
LOG.info("Created new snapshot namespace: {}", insertStatement);
} catch (Exception ex) {
LOG.error("Exception while registering snapshot namespace in feeds table: {}", ex.getMessage());
DbUtils.closeQuietly(connection);
}
} | IMPLEMENTATION | true | // SQLite does not support them. Is there any advantage of schemas over flat tables?
statement.execute("create schema " + tablePrefix);
// TODO: Record total snapshot processing time?
// Simply insert into feeds table (no need for table creation) because making a snapshot presumes that the
// feeds table already exists.
PreparedStatement insertStatement = connection.prepareStatement(
"insert into feeds values (?, null, null, null, null, null, current_timestamp, ?)"); | private void registerSnapshot () {
try {
Statement statement = connection.createStatement();
// TODO copy over feed_id and feed_version from source namespace?
// FIXME do the following only on databases that support schemas.
// SQLite does not support them. Is there any advantage of schemas over flat tables?
statement.execute("create schema " + tablePrefix);
// TODO: Record total snapshot processing time?
// Simply insert into feeds table (no need for table creation) because making a snapshot presumes that the
// feeds table already exists.
PreparedStatement insertStatement = connection.prepareStatement(
"insert into feeds values (?, null, null, null, null, null, current_timestamp, ?)");
insertStatement.setString(1, tablePrefix);
insertStatement.setString(2, feedIdToSnapshot);
insertStatement.execute();
connection.commit();
LOG.info("Created new snapshot namespace: {}", insertStatement);
} catch (Exception ex) {
LOG.error("Exception while registering snapshot namespace in feeds table: {}", ex.getMessage());
DbUtils.closeQuietly(connection); | private void registerSnapshot () {
try {
Statement statement = connection.createStatement();
// TODO copy over feed_id and feed_version from source namespace?
// FIXME do the following only on databases that support schemas.
// SQLite does not support them. Is there any advantage of schemas over flat tables?
statement.execute("create schema " + tablePrefix);
// TODO: Record total snapshot processing time?
// Simply insert into feeds table (no need for table creation) because making a snapshot presumes that the
// feeds table already exists.
PreparedStatement insertStatement = connection.prepareStatement(
"insert into feeds values (?, null, null, null, null, null, current_timestamp, ?)");
insertStatement.setString(1, tablePrefix);
insertStatement.setString(2, feedIdToSnapshot);
insertStatement.execute();
connection.commit();
LOG.info("Created new snapshot namespace: {}", insertStatement);
} catch (Exception ex) {
LOG.error("Exception while registering snapshot namespace in feeds table: {}", ex.getMessage());
DbUtils.closeQuietly(connection);
}
} |
25,782 | 0 | // TODO - cleanup redundancies | public static List<JSONObject> constructRokuNativeElements(JSONObject elementObj) {
List<JSONObject> elements = new ArrayList<>();
JSONArray valueArr = (JSONArray) elementObj.get("value");
if (valueArr == null) {
return elements;
}
for (int i = 0; i < valueArr.size(); i++) {
String[] boundsComponents = { "0", "0", "0", "0" };
String text = "";
JSONObject valueObj = (JSONObject) valueArr.get(i);
JSONArray attrArr = (JSONArray) valueObj.get("Attrs");
for (int i2 = 0; i2 < attrArr.size(); i2++) {
JSONObject attrObj = (JSONObject) attrArr.get(i2);
JSONObject nameObj = (JSONObject) attrObj.get("Name");
if (nameObj.containsValue("bounds")) {
String boundsStr = (String) attrObj.get("Value");
boundsStr = boundsStr.replace("{", "").replace("}", "");
boundsComponents = boundsStr.split(", ");
}
if (nameObj.containsValue("text")) {
text = (String) attrObj.get("Value");
}
}
elements.add(constructRokuNativeElementJSON(elementObj, text, boundsComponents));
}
return elements;
} | IMPLEMENTATION | true | public static List<JSONObject> constructRokuNativeElements(JSONObject elementObj) {
List<JSONObject> elements = new ArrayList<>();
JSONArray valueArr = (JSONArray) elementObj.get("value");
if (valueArr == null) {
return elements;
}
for (int i = 0; i < valueArr.size(); i++) {
String[] boundsComponents = { "0", "0", "0", "0" };
String text = "";
JSONObject valueObj = (JSONObject) valueArr.get(i);
JSONArray attrArr = (JSONArray) valueObj.get("Attrs");
for (int i2 = 0; i2 < attrArr.size(); i2++) {
JSONObject attrObj = (JSONObject) attrArr.get(i2);
JSONObject nameObj = (JSONObject) attrObj.get("Name");
if (nameObj.containsValue("bounds")) {
String boundsStr = (String) attrObj.get("Value");
boundsStr = boundsStr.replace("{", "").replace("}", "");
boundsComponents = boundsStr.split(", ");
}
if (nameObj.containsValue("text")) {
text = (String) attrObj.get("Value");
}
}
elements.add(constructRokuNativeElementJSON(elementObj, text, boundsComponents));
}
return elements;
} | public static List<JSONObject> constructRokuNativeElements(JSONObject elementObj) {
List<JSONObject> elements = new ArrayList<>();
JSONArray valueArr = (JSONArray) elementObj.get("value");
if (valueArr == null) {
return elements;
}
for (int i = 0; i < valueArr.size(); i++) {
String[] boundsComponents = { "0", "0", "0", "0" };
String text = "";
JSONObject valueObj = (JSONObject) valueArr.get(i);
JSONArray attrArr = (JSONArray) valueObj.get("Attrs");
for (int i2 = 0; i2 < attrArr.size(); i2++) {
JSONObject attrObj = (JSONObject) attrArr.get(i2);
JSONObject nameObj = (JSONObject) attrObj.get("Name");
if (nameObj.containsValue("bounds")) {
String boundsStr = (String) attrObj.get("Value");
boundsStr = boundsStr.replace("{", "").replace("}", "");
boundsComponents = boundsStr.split(", ");
}
if (nameObj.containsValue("text")) {
text = (String) attrObj.get("Value");
}
}
elements.add(constructRokuNativeElementJSON(elementObj, text, boundsComponents));
}
return elements;
} | public static List<JSONObject> constructRokuNativeElements(JSONObject elementObj) {
List<JSONObject> elements = new ArrayList<>();
JSONArray valueArr = (JSONArray) elementObj.get("value");
if (valueArr == null) {
return elements;
}
for (int i = 0; i < valueArr.size(); i++) {
String[] boundsComponents = { "0", "0", "0", "0" };
String text = "";
JSONObject valueObj = (JSONObject) valueArr.get(i);
JSONArray attrArr = (JSONArray) valueObj.get("Attrs");
for (int i2 = 0; i2 < attrArr.size(); i2++) {
JSONObject attrObj = (JSONObject) attrArr.get(i2);
JSONObject nameObj = (JSONObject) attrObj.get("Name");
if (nameObj.containsValue("bounds")) {
String boundsStr = (String) attrObj.get("Value");
boundsStr = boundsStr.replace("{", "").replace("}", "");
boundsComponents = boundsStr.split(", ");
}
if (nameObj.containsValue("text")) {
text = (String) attrObj.get("Value");
}
}
elements.add(constructRokuNativeElementJSON(elementObj, text, boundsComponents));
}
return elements;
} |
25,790 | 0 | /**
* This is really testing how the NonProductionConfig works - how can this be
* targeted to the init config?
*/ | @Test
public void testBlowingUpWithDuplicateLoaders() {
KeyValuePairLoader kvpl = new KeyValuePairLoader();
kvpl.setKeyValuePairs(cmdLineArgsWFullClassName);
try {
AndHowConfiguration config = AndHowTestConfig.instance()
.setLoaders(kvpl, kvpl)
.addOverrideGroups(configPtGroups);
AndHow.setConfig(config);
AndHow.instance();
fail(); //The line above should throw an error
} catch (AppFatalException ce) {
assertEquals(1, ce.getProblems().filter(ConstructionProblem.class).size());
assertTrue(ce.getProblems().filter(ConstructionProblem.class).get(0) instanceof ConstructionProblem.DuplicateLoader);
ConstructionProblem.DuplicateLoader dl = (ConstructionProblem.DuplicateLoader)ce.getProblems().filter(ConstructionProblem.class).get(0);
assertEquals(kvpl, dl.getLoader());
assertTrue(ce.getSampleDirectory().length() > 0);
File sampleDir = new File(ce.getSampleDirectory());
assertTrue(sampleDir.exists());
assertTrue(sampleDir.listFiles().length > 0);
}
} | DESIGN | true | @Test
public void testBlowingUpWithDuplicateLoaders() {
KeyValuePairLoader kvpl = new KeyValuePairLoader();
kvpl.setKeyValuePairs(cmdLineArgsWFullClassName);
try {
AndHowConfiguration config = AndHowTestConfig.instance()
.setLoaders(kvpl, kvpl)
.addOverrideGroups(configPtGroups);
AndHow.setConfig(config);
AndHow.instance();
fail(); //The line above should throw an error
} catch (AppFatalException ce) {
assertEquals(1, ce.getProblems().filter(ConstructionProblem.class).size());
assertTrue(ce.getProblems().filter(ConstructionProblem.class).get(0) instanceof ConstructionProblem.DuplicateLoader);
ConstructionProblem.DuplicateLoader dl = (ConstructionProblem.DuplicateLoader)ce.getProblems().filter(ConstructionProblem.class).get(0);
assertEquals(kvpl, dl.getLoader());
assertTrue(ce.getSampleDirectory().length() > 0);
File sampleDir = new File(ce.getSampleDirectory());
assertTrue(sampleDir.exists());
assertTrue(sampleDir.listFiles().length > 0);
}
} | @Test
public void testBlowingUpWithDuplicateLoaders() {
KeyValuePairLoader kvpl = new KeyValuePairLoader();
kvpl.setKeyValuePairs(cmdLineArgsWFullClassName);
try {
AndHowConfiguration config = AndHowTestConfig.instance()
.setLoaders(kvpl, kvpl)
.addOverrideGroups(configPtGroups);
AndHow.setConfig(config);
AndHow.instance();
fail(); //The line above should throw an error
} catch (AppFatalException ce) {
assertEquals(1, ce.getProblems().filter(ConstructionProblem.class).size());
assertTrue(ce.getProblems().filter(ConstructionProblem.class).get(0) instanceof ConstructionProblem.DuplicateLoader);
ConstructionProblem.DuplicateLoader dl = (ConstructionProblem.DuplicateLoader)ce.getProblems().filter(ConstructionProblem.class).get(0);
assertEquals(kvpl, dl.getLoader());
assertTrue(ce.getSampleDirectory().length() > 0);
File sampleDir = new File(ce.getSampleDirectory());
assertTrue(sampleDir.exists());
assertTrue(sampleDir.listFiles().length > 0);
}
} | @Test
public void testBlowingUpWithDuplicateLoaders() {
KeyValuePairLoader kvpl = new KeyValuePairLoader();
kvpl.setKeyValuePairs(cmdLineArgsWFullClassName);
try {
AndHowConfiguration config = AndHowTestConfig.instance()
.setLoaders(kvpl, kvpl)
.addOverrideGroups(configPtGroups);
AndHow.setConfig(config);
AndHow.instance();
fail(); //The line above should throw an error
} catch (AppFatalException ce) {
assertEquals(1, ce.getProblems().filter(ConstructionProblem.class).size());
assertTrue(ce.getProblems().filter(ConstructionProblem.class).get(0) instanceof ConstructionProblem.DuplicateLoader);
ConstructionProblem.DuplicateLoader dl = (ConstructionProblem.DuplicateLoader)ce.getProblems().filter(ConstructionProblem.class).get(0);
assertEquals(kvpl, dl.getLoader());
assertTrue(ce.getSampleDirectory().length() > 0);
File sampleDir = new File(ce.getSampleDirectory());
assertTrue(sampleDir.exists());
assertTrue(sampleDir.listFiles().length > 0);
}
} |
25,790 | 1 | //The line above should throw an error | @Test
public void testBlowingUpWithDuplicateLoaders() {
KeyValuePairLoader kvpl = new KeyValuePairLoader();
kvpl.setKeyValuePairs(cmdLineArgsWFullClassName);
try {
AndHowConfiguration config = AndHowTestConfig.instance()
.setLoaders(kvpl, kvpl)
.addOverrideGroups(configPtGroups);
AndHow.setConfig(config);
AndHow.instance();
fail(); //The line above should throw an error
} catch (AppFatalException ce) {
assertEquals(1, ce.getProblems().filter(ConstructionProblem.class).size());
assertTrue(ce.getProblems().filter(ConstructionProblem.class).get(0) instanceof ConstructionProblem.DuplicateLoader);
ConstructionProblem.DuplicateLoader dl = (ConstructionProblem.DuplicateLoader)ce.getProblems().filter(ConstructionProblem.class).get(0);
assertEquals(kvpl, dl.getLoader());
assertTrue(ce.getSampleDirectory().length() > 0);
File sampleDir = new File(ce.getSampleDirectory());
assertTrue(sampleDir.exists());
assertTrue(sampleDir.listFiles().length > 0);
}
} | NONSATD | true | AndHow.setConfig(config);
AndHow.instance();
fail(); //The line above should throw an error
} catch (AppFatalException ce) {
assertEquals(1, ce.getProblems().filter(ConstructionProblem.class).size()); | @Test
public void testBlowingUpWithDuplicateLoaders() {
KeyValuePairLoader kvpl = new KeyValuePairLoader();
kvpl.setKeyValuePairs(cmdLineArgsWFullClassName);
try {
AndHowConfiguration config = AndHowTestConfig.instance()
.setLoaders(kvpl, kvpl)
.addOverrideGroups(configPtGroups);
AndHow.setConfig(config);
AndHow.instance();
fail(); //The line above should throw an error
} catch (AppFatalException ce) {
assertEquals(1, ce.getProblems().filter(ConstructionProblem.class).size());
assertTrue(ce.getProblems().filter(ConstructionProblem.class).get(0) instanceof ConstructionProblem.DuplicateLoader);
ConstructionProblem.DuplicateLoader dl = (ConstructionProblem.DuplicateLoader)ce.getProblems().filter(ConstructionProblem.class).get(0);
assertEquals(kvpl, dl.getLoader());
assertTrue(ce.getSampleDirectory().length() > 0);
File sampleDir = new File(ce.getSampleDirectory());
assertTrue(sampleDir.exists());
assertTrue(sampleDir.listFiles().length > 0);
} | @Test
public void testBlowingUpWithDuplicateLoaders() {
KeyValuePairLoader kvpl = new KeyValuePairLoader();
kvpl.setKeyValuePairs(cmdLineArgsWFullClassName);
try {
AndHowConfiguration config = AndHowTestConfig.instance()
.setLoaders(kvpl, kvpl)
.addOverrideGroups(configPtGroups);
AndHow.setConfig(config);
AndHow.instance();
fail(); //The line above should throw an error
} catch (AppFatalException ce) {
assertEquals(1, ce.getProblems().filter(ConstructionProblem.class).size());
assertTrue(ce.getProblems().filter(ConstructionProblem.class).get(0) instanceof ConstructionProblem.DuplicateLoader);
ConstructionProblem.DuplicateLoader dl = (ConstructionProblem.DuplicateLoader)ce.getProblems().filter(ConstructionProblem.class).get(0);
assertEquals(kvpl, dl.getLoader());
assertTrue(ce.getSampleDirectory().length() > 0);
File sampleDir = new File(ce.getSampleDirectory());
assertTrue(sampleDir.exists());
assertTrue(sampleDir.listFiles().length > 0);
}
} |
33,993 | 0 | /*
* (a: SubjectType) is Type
*/ | private static void checkTypeCompatibility(
@NotNull ExpressionTypingContext context,
@Nullable JetType type,
@NotNull JetType subjectType,
@NotNull JetElement reportErrorOn
) {
// TODO : Take smart casts into account?
if (type == null) {
return;
}
if (isIntersectionEmpty(type, subjectType)) {
context.trace.report(INCOMPATIBLE_TYPES.on(reportErrorOn, type, subjectType));
return;
}
// check if the pattern is essentially a 'null' expression
if (KotlinBuiltIns.isNullableNothing(type) && !TypeUtils.isNullableType(subjectType)) {
context.trace.report(SENSELESS_NULL_IN_WHEN.on(reportErrorOn));
}
} | NONSATD | true | private static void checkTypeCompatibility(
@NotNull ExpressionTypingContext context,
@Nullable JetType type,
@NotNull JetType subjectType,
@NotNull JetElement reportErrorOn
) {
// TODO : Take smart casts into account?
if (type == null) {
return;
}
if (isIntersectionEmpty(type, subjectType)) {
context.trace.report(INCOMPATIBLE_TYPES.on(reportErrorOn, type, subjectType));
return;
}
// check if the pattern is essentially a 'null' expression
if (KotlinBuiltIns.isNullableNothing(type) && !TypeUtils.isNullableType(subjectType)) {
context.trace.report(SENSELESS_NULL_IN_WHEN.on(reportErrorOn));
}
} | private static void checkTypeCompatibility(
@NotNull ExpressionTypingContext context,
@Nullable JetType type,
@NotNull JetType subjectType,
@NotNull JetElement reportErrorOn
) {
// TODO : Take smart casts into account?
if (type == null) {
return;
}
if (isIntersectionEmpty(type, subjectType)) {
context.trace.report(INCOMPATIBLE_TYPES.on(reportErrorOn, type, subjectType));
return;
}
// check if the pattern is essentially a 'null' expression
if (KotlinBuiltIns.isNullableNothing(type) && !TypeUtils.isNullableType(subjectType)) {
context.trace.report(SENSELESS_NULL_IN_WHEN.on(reportErrorOn));
}
} | private static void checkTypeCompatibility(
@NotNull ExpressionTypingContext context,
@Nullable JetType type,
@NotNull JetType subjectType,
@NotNull JetElement reportErrorOn
) {
// TODO : Take smart casts into account?
if (type == null) {
return;
}
if (isIntersectionEmpty(type, subjectType)) {
context.trace.report(INCOMPATIBLE_TYPES.on(reportErrorOn, type, subjectType));
return;
}
// check if the pattern is essentially a 'null' expression
if (KotlinBuiltIns.isNullableNothing(type) && !TypeUtils.isNullableType(subjectType)) {
context.trace.report(SENSELESS_NULL_IN_WHEN.on(reportErrorOn));
}
} |
33,993 | 1 | // TODO : Take smart casts into account? | private static void checkTypeCompatibility(
@NotNull ExpressionTypingContext context,
@Nullable JetType type,
@NotNull JetType subjectType,
@NotNull JetElement reportErrorOn
) {
// TODO : Take smart casts into account?
if (type == null) {
return;
}
if (isIntersectionEmpty(type, subjectType)) {
context.trace.report(INCOMPATIBLE_TYPES.on(reportErrorOn, type, subjectType));
return;
}
// check if the pattern is essentially a 'null' expression
if (KotlinBuiltIns.isNullableNothing(type) && !TypeUtils.isNullableType(subjectType)) {
context.trace.report(SENSELESS_NULL_IN_WHEN.on(reportErrorOn));
}
} | DESIGN | true | @NotNull JetElement reportErrorOn
) {
// TODO : Take smart casts into account?
if (type == null) {
return; | private static void checkTypeCompatibility(
@NotNull ExpressionTypingContext context,
@Nullable JetType type,
@NotNull JetType subjectType,
@NotNull JetElement reportErrorOn
) {
// TODO : Take smart casts into account?
if (type == null) {
return;
}
if (isIntersectionEmpty(type, subjectType)) {
context.trace.report(INCOMPATIBLE_TYPES.on(reportErrorOn, type, subjectType));
return;
}
// check if the pattern is essentially a 'null' expression
if (KotlinBuiltIns.isNullableNothing(type) && !TypeUtils.isNullableType(subjectType)) {
context.trace.report(SENSELESS_NULL_IN_WHEN.on(reportErrorOn)); | private static void checkTypeCompatibility(
@NotNull ExpressionTypingContext context,
@Nullable JetType type,
@NotNull JetType subjectType,
@NotNull JetElement reportErrorOn
) {
// TODO : Take smart casts into account?
if (type == null) {
return;
}
if (isIntersectionEmpty(type, subjectType)) {
context.trace.report(INCOMPATIBLE_TYPES.on(reportErrorOn, type, subjectType));
return;
}
// check if the pattern is essentially a 'null' expression
if (KotlinBuiltIns.isNullableNothing(type) && !TypeUtils.isNullableType(subjectType)) {
context.trace.report(SENSELESS_NULL_IN_WHEN.on(reportErrorOn));
}
} |
33,993 | 2 | // check if the pattern is essentially a 'null' expression | private static void checkTypeCompatibility(
@NotNull ExpressionTypingContext context,
@Nullable JetType type,
@NotNull JetType subjectType,
@NotNull JetElement reportErrorOn
) {
// TODO : Take smart casts into account?
if (type == null) {
return;
}
if (isIntersectionEmpty(type, subjectType)) {
context.trace.report(INCOMPATIBLE_TYPES.on(reportErrorOn, type, subjectType));
return;
}
// check if the pattern is essentially a 'null' expression
if (KotlinBuiltIns.isNullableNothing(type) && !TypeUtils.isNullableType(subjectType)) {
context.trace.report(SENSELESS_NULL_IN_WHEN.on(reportErrorOn));
}
} | NONSATD | true | return;
}
// check if the pattern is essentially a 'null' expression
if (KotlinBuiltIns.isNullableNothing(type) && !TypeUtils.isNullableType(subjectType)) {
context.trace.report(SENSELESS_NULL_IN_WHEN.on(reportErrorOn)); | @NotNull JetElement reportErrorOn
) {
// TODO : Take smart casts into account?
if (type == null) {
return;
}
if (isIntersectionEmpty(type, subjectType)) {
context.trace.report(INCOMPATIBLE_TYPES.on(reportErrorOn, type, subjectType));
return;
}
// check if the pattern is essentially a 'null' expression
if (KotlinBuiltIns.isNullableNothing(type) && !TypeUtils.isNullableType(subjectType)) {
context.trace.report(SENSELESS_NULL_IN_WHEN.on(reportErrorOn));
}
} | private static void checkTypeCompatibility(
@NotNull ExpressionTypingContext context,
@Nullable JetType type,
@NotNull JetType subjectType,
@NotNull JetElement reportErrorOn
) {
// TODO : Take smart casts into account?
if (type == null) {
return;
}
if (isIntersectionEmpty(type, subjectType)) {
context.trace.report(INCOMPATIBLE_TYPES.on(reportErrorOn, type, subjectType));
return;
}
// check if the pattern is essentially a 'null' expression
if (KotlinBuiltIns.isNullableNothing(type) && !TypeUtils.isNullableType(subjectType)) {
context.trace.report(SENSELESS_NULL_IN_WHEN.on(reportErrorOn));
}
} |
1,229 | 0 | // TODO (lwhite): These tests don't fail. What was their intent? | @Test
public void testReadFailure() throws IOException {
// TODO (lwhite): These tests don't fail. What was their intent?
Table table1 =
Table.read()
.csv(CsvReadOptions.builder("../data/read_failure_test.csv").minimizeColumnSizes());
table1.structure(); // just make sure the import completed
ShortColumn test = table1.shortColumn("Test");
// TODO(lwhite): Better tests
assertNotNull(test.summary());
} | TEST | true | @Test
public void testReadFailure() throws IOException {
// TODO (lwhite): These tests don't fail. What was their intent?
Table table1 =
Table.read() | @Test
public void testReadFailure() throws IOException {
// TODO (lwhite): These tests don't fail. What was their intent?
Table table1 =
Table.read()
.csv(CsvReadOptions.builder("../data/read_failure_test.csv").minimizeColumnSizes());
table1.structure(); // just make sure the import completed
ShortColumn test = table1.shortColumn("Test");
// TODO(lwhite): Better tests
assertNotNull(test.summary());
} | @Test
public void testReadFailure() throws IOException {
// TODO (lwhite): These tests don't fail. What was their intent?
Table table1 =
Table.read()
.csv(CsvReadOptions.builder("../data/read_failure_test.csv").minimizeColumnSizes());
table1.structure(); // just make sure the import completed
ShortColumn test = table1.shortColumn("Test");
// TODO(lwhite): Better tests
assertNotNull(test.summary());
} |
1,229 | 1 | // just make sure the import completed | @Test
public void testReadFailure() throws IOException {
// TODO (lwhite): These tests don't fail. What was their intent?
Table table1 =
Table.read()
.csv(CsvReadOptions.builder("../data/read_failure_test.csv").minimizeColumnSizes());
table1.structure(); // just make sure the import completed
ShortColumn test = table1.shortColumn("Test");
// TODO(lwhite): Better tests
assertNotNull(test.summary());
} | NONSATD | true | Table.read()
.csv(CsvReadOptions.builder("../data/read_failure_test.csv").minimizeColumnSizes());
table1.structure(); // just make sure the import completed
ShortColumn test = table1.shortColumn("Test");
// TODO(lwhite): Better tests | @Test
public void testReadFailure() throws IOException {
// TODO (lwhite): These tests don't fail. What was their intent?
Table table1 =
Table.read()
.csv(CsvReadOptions.builder("../data/read_failure_test.csv").minimizeColumnSizes());
table1.structure(); // just make sure the import completed
ShortColumn test = table1.shortColumn("Test");
// TODO(lwhite): Better tests
assertNotNull(test.summary());
} | @Test
public void testReadFailure() throws IOException {
// TODO (lwhite): These tests don't fail. What was their intent?
Table table1 =
Table.read()
.csv(CsvReadOptions.builder("../data/read_failure_test.csv").minimizeColumnSizes());
table1.structure(); // just make sure the import completed
ShortColumn test = table1.shortColumn("Test");
// TODO(lwhite): Better tests
assertNotNull(test.summary());
} |
1,229 | 2 | // TODO(lwhite): Better tests | @Test
public void testReadFailure() throws IOException {
// TODO (lwhite): These tests don't fail. What was their intent?
Table table1 =
Table.read()
.csv(CsvReadOptions.builder("../data/read_failure_test.csv").minimizeColumnSizes());
table1.structure(); // just make sure the import completed
ShortColumn test = table1.shortColumn("Test");
// TODO(lwhite): Better tests
assertNotNull(test.summary());
} | TEST | true | table1.structure(); // just make sure the import completed
ShortColumn test = table1.shortColumn("Test");
// TODO(lwhite): Better tests
assertNotNull(test.summary());
} | @Test
public void testReadFailure() throws IOException {
// TODO (lwhite): These tests don't fail. What was their intent?
Table table1 =
Table.read()
.csv(CsvReadOptions.builder("../data/read_failure_test.csv").minimizeColumnSizes());
table1.structure(); // just make sure the import completed
ShortColumn test = table1.shortColumn("Test");
// TODO(lwhite): Better tests
assertNotNull(test.summary());
} | @Test
public void testReadFailure() throws IOException {
// TODO (lwhite): These tests don't fail. What was their intent?
Table table1 =
Table.read()
.csv(CsvReadOptions.builder("../data/read_failure_test.csv").minimizeColumnSizes());
table1.structure(); // just make sure the import completed
ShortColumn test = table1.shortColumn("Test");
// TODO(lwhite): Better tests
assertNotNull(test.summary());
} |
1,232 | 0 | // TODO: Add deleteSource | protected void deleteAllInstancesForConcept(ActionContext pAc, CeConcept pConcept) {
// Note that instances are not referenced by any other model entity, so
// they can be safely deleted here
// without leaving any references in place.
// If there are textual references to the instance (i.e. in a property
// that refers to it by name) then
// these references must be manually cleaned up separately by the code
// that calls this method.
// First remove the instances individually from the list of all
// instances
ArrayList<CeInstance> allInsts = getAllInstancesForConcept(pConcept);
for (CeInstance thisInst : allInsts) {
deleteInstanceNoRefs(thisInst);
}
// Then remove all of the instances for the specified concept from the
// other list
this.instancesByConcept.remove(pConcept);
// Then remove the unused sentences and sources that may be left as a
// result
pAc.getModelBuilder().removeUnusedSentencesAndSources(pAc);
} | IMPLEMENTATION | true | protected void deleteAllInstancesForConcept(ActionContext pAc, CeConcept pConcept) {
// Note that instances are not referenced by any other model entity, so
// they can be safely deleted here
// without leaving any references in place.
// If there are textual references to the instance (i.e. in a property
// that refers to it by name) then
// these references must be manually cleaned up separately by the code
// that calls this method.
// First remove the instances individually from the list of all
// instances
ArrayList<CeInstance> allInsts = getAllInstancesForConcept(pConcept);
for (CeInstance thisInst : allInsts) {
deleteInstanceNoRefs(thisInst);
}
// Then remove all of the instances for the specified concept from the
// other list
this.instancesByConcept.remove(pConcept);
// Then remove the unused sentences and sources that may be left as a
// result
pAc.getModelBuilder().removeUnusedSentencesAndSources(pAc);
} | protected void deleteAllInstancesForConcept(ActionContext pAc, CeConcept pConcept) {
// Note that instances are not referenced by any other model entity, so
// they can be safely deleted here
// without leaving any references in place.
// If there are textual references to the instance (i.e. in a property
// that refers to it by name) then
// these references must be manually cleaned up separately by the code
// that calls this method.
// First remove the instances individually from the list of all
// instances
ArrayList<CeInstance> allInsts = getAllInstancesForConcept(pConcept);
for (CeInstance thisInst : allInsts) {
deleteInstanceNoRefs(thisInst);
}
// Then remove all of the instances for the specified concept from the
// other list
this.instancesByConcept.remove(pConcept);
// Then remove the unused sentences and sources that may be left as a
// result
pAc.getModelBuilder().removeUnusedSentencesAndSources(pAc);
} | protected void deleteAllInstancesForConcept(ActionContext pAc, CeConcept pConcept) {
// Note that instances are not referenced by any other model entity, so
// they can be safely deleted here
// without leaving any references in place.
// If there are textual references to the instance (i.e. in a property
// that refers to it by name) then
// these references must be manually cleaned up separately by the code
// that calls this method.
// First remove the instances individually from the list of all
// instances
ArrayList<CeInstance> allInsts = getAllInstancesForConcept(pConcept);
for (CeInstance thisInst : allInsts) {
deleteInstanceNoRefs(thisInst);
}
// Then remove all of the instances for the specified concept from the
// other list
this.instancesByConcept.remove(pConcept);
// Then remove the unused sentences and sources that may be left as a
// result
pAc.getModelBuilder().removeUnusedSentencesAndSources(pAc);
} |
1,232 | 1 | // Note that instances are not referenced by any other model entity, so
// they can be safely deleted here
// without leaving any references in place.
// If there are textual references to the instance (i.e. in a property
// that refers to it by name) then
// these references must be manually cleaned up separately by the code
// that calls this method.
// First remove the instances individually from the list of all
// instances | protected void deleteAllInstancesForConcept(ActionContext pAc, CeConcept pConcept) {
// Note that instances are not referenced by any other model entity, so
// they can be safely deleted here
// without leaving any references in place.
// If there are textual references to the instance (i.e. in a property
// that refers to it by name) then
// these references must be manually cleaned up separately by the code
// that calls this method.
// First remove the instances individually from the list of all
// instances
ArrayList<CeInstance> allInsts = getAllInstancesForConcept(pConcept);
for (CeInstance thisInst : allInsts) {
deleteInstanceNoRefs(thisInst);
}
// Then remove all of the instances for the specified concept from the
// other list
this.instancesByConcept.remove(pConcept);
// Then remove the unused sentences and sources that may be left as a
// result
pAc.getModelBuilder().removeUnusedSentencesAndSources(pAc);
} | NONSATD | true | protected void deleteAllInstancesForConcept(ActionContext pAc, CeConcept pConcept) {
// Note that instances are not referenced by any other model entity, so
// they can be safely deleted here
// without leaving any references in place.
// If there are textual references to the instance (i.e. in a property
// that refers to it by name) then
// these references must be manually cleaned up separately by the code
// that calls this method.
// First remove the instances individually from the list of all
// instances
ArrayList<CeInstance> allInsts = getAllInstancesForConcept(pConcept);
for (CeInstance thisInst : allInsts) { | protected void deleteAllInstancesForConcept(ActionContext pAc, CeConcept pConcept) {
// Note that instances are not referenced by any other model entity, so
// they can be safely deleted here
// without leaving any references in place.
// If there are textual references to the instance (i.e. in a property
// that refers to it by name) then
// these references must be manually cleaned up separately by the code
// that calls this method.
// First remove the instances individually from the list of all
// instances
ArrayList<CeInstance> allInsts = getAllInstancesForConcept(pConcept);
for (CeInstance thisInst : allInsts) {
deleteInstanceNoRefs(thisInst);
}
// Then remove all of the instances for the specified concept from the
// other list
this.instancesByConcept.remove(pConcept);
// Then remove the unused sentences and sources that may be left as a
// result
pAc.getModelBuilder().removeUnusedSentencesAndSources(pAc); | protected void deleteAllInstancesForConcept(ActionContext pAc, CeConcept pConcept) {
// Note that instances are not referenced by any other model entity, so
// they can be safely deleted here
// without leaving any references in place.
// If there are textual references to the instance (i.e. in a property
// that refers to it by name) then
// these references must be manually cleaned up separately by the code
// that calls this method.
// First remove the instances individually from the list of all
// instances
ArrayList<CeInstance> allInsts = getAllInstancesForConcept(pConcept);
for (CeInstance thisInst : allInsts) {
deleteInstanceNoRefs(thisInst);
}
// Then remove all of the instances for the specified concept from the
// other list
this.instancesByConcept.remove(pConcept);
// Then remove the unused sentences and sources that may be left as a
// result
pAc.getModelBuilder().removeUnusedSentencesAndSources(pAc);
} |
1,232 | 2 | // Then remove all of the instances for the specified concept from the
// other list | protected void deleteAllInstancesForConcept(ActionContext pAc, CeConcept pConcept) {
// Note that instances are not referenced by any other model entity, so
// they can be safely deleted here
// without leaving any references in place.
// If there are textual references to the instance (i.e. in a property
// that refers to it by name) then
// these references must be manually cleaned up separately by the code
// that calls this method.
// First remove the instances individually from the list of all
// instances
ArrayList<CeInstance> allInsts = getAllInstancesForConcept(pConcept);
for (CeInstance thisInst : allInsts) {
deleteInstanceNoRefs(thisInst);
}
// Then remove all of the instances for the specified concept from the
// other list
this.instancesByConcept.remove(pConcept);
// Then remove the unused sentences and sources that may be left as a
// result
pAc.getModelBuilder().removeUnusedSentencesAndSources(pAc);
} | NONSATD | true | deleteInstanceNoRefs(thisInst);
}
// Then remove all of the instances for the specified concept from the
// other list
this.instancesByConcept.remove(pConcept);
// Then remove the unused sentences and sources that may be left as a | // If there are textual references to the instance (i.e. in a property
// that refers to it by name) then
// these references must be manually cleaned up separately by the code
// that calls this method.
// First remove the instances individually from the list of all
// instances
ArrayList<CeInstance> allInsts = getAllInstancesForConcept(pConcept);
for (CeInstance thisInst : allInsts) {
deleteInstanceNoRefs(thisInst);
}
// Then remove all of the instances for the specified concept from the
// other list
this.instancesByConcept.remove(pConcept);
// Then remove the unused sentences and sources that may be left as a
// result
pAc.getModelBuilder().removeUnusedSentencesAndSources(pAc);
} | protected void deleteAllInstancesForConcept(ActionContext pAc, CeConcept pConcept) {
// Note that instances are not referenced by any other model entity, so
// they can be safely deleted here
// without leaving any references in place.
// If there are textual references to the instance (i.e. in a property
// that refers to it by name) then
// these references must be manually cleaned up separately by the code
// that calls this method.
// First remove the instances individually from the list of all
// instances
ArrayList<CeInstance> allInsts = getAllInstancesForConcept(pConcept);
for (CeInstance thisInst : allInsts) {
deleteInstanceNoRefs(thisInst);
}
// Then remove all of the instances for the specified concept from the
// other list
this.instancesByConcept.remove(pConcept);
// Then remove the unused sentences and sources that may be left as a
// result
pAc.getModelBuilder().removeUnusedSentencesAndSources(pAc);
} |
1,232 | 3 | // Then remove the unused sentences and sources that may be left as a
// result | protected void deleteAllInstancesForConcept(ActionContext pAc, CeConcept pConcept) {
// Note that instances are not referenced by any other model entity, so
// they can be safely deleted here
// without leaving any references in place.
// If there are textual references to the instance (i.e. in a property
// that refers to it by name) then
// these references must be manually cleaned up separately by the code
// that calls this method.
// First remove the instances individually from the list of all
// instances
ArrayList<CeInstance> allInsts = getAllInstancesForConcept(pConcept);
for (CeInstance thisInst : allInsts) {
deleteInstanceNoRefs(thisInst);
}
// Then remove all of the instances for the specified concept from the
// other list
this.instancesByConcept.remove(pConcept);
// Then remove the unused sentences and sources that may be left as a
// result
pAc.getModelBuilder().removeUnusedSentencesAndSources(pAc);
} | NONSATD | true | // other list
this.instancesByConcept.remove(pConcept);
// Then remove the unused sentences and sources that may be left as a
// result
pAc.getModelBuilder().removeUnusedSentencesAndSources(pAc);
} | // that calls this method.
// First remove the instances individually from the list of all
// instances
ArrayList<CeInstance> allInsts = getAllInstancesForConcept(pConcept);
for (CeInstance thisInst : allInsts) {
deleteInstanceNoRefs(thisInst);
}
// Then remove all of the instances for the specified concept from the
// other list
this.instancesByConcept.remove(pConcept);
// Then remove the unused sentences and sources that may be left as a
// result
pAc.getModelBuilder().removeUnusedSentencesAndSources(pAc);
} | protected void deleteAllInstancesForConcept(ActionContext pAc, CeConcept pConcept) {
// Note that instances are not referenced by any other model entity, so
// they can be safely deleted here
// without leaving any references in place.
// If there are textual references to the instance (i.e. in a property
// that refers to it by name) then
// these references must be manually cleaned up separately by the code
// that calls this method.
// First remove the instances individually from the list of all
// instances
ArrayList<CeInstance> allInsts = getAllInstancesForConcept(pConcept);
for (CeInstance thisInst : allInsts) {
deleteInstanceNoRefs(thisInst);
}
// Then remove all of the instances for the specified concept from the
// other list
this.instancesByConcept.remove(pConcept);
// Then remove the unused sentences and sources that may be left as a
// result
pAc.getModelBuilder().removeUnusedSentencesAndSources(pAc);
} |
1,238 | 0 | /* DSW0 */ | public void handler() {
PORT_START(); /* DSW0 */
/* According to the manual, 0x04, 0x08 and 0x10 should always be off,
but... */
PORT_DIPNAME( 0x07, 0x00, "Rank" ); PORT_DIPSETTING( 0x00, "A" ); PORT_DIPSETTING( 0x01, "B" ); PORT_DIPSETTING( 0x02, "C" ); PORT_DIPSETTING( 0x03, "D" ); PORT_DIPSETTING( 0x04, "E" ); PORT_DIPSETTING( 0x05, "F" ); PORT_DIPSETTING( 0x06, "G" ); PORT_DIPSETTING( 0x07, "H" ); PORT_DIPNAME( 0x18, 0x00, DEF_STR( "Coin_B") );
PORT_DIPSETTING( 0x18, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x08, DEF_STR( "1C_5C"));
PORT_DIPSETTING( 0x10, DEF_STR( "1C_7C") );
PORT_DIPNAME( 0x20, 0x00, DEF_STR( "Demo_Sounds") );
PORT_DIPSETTING( 0x20, DEF_STR( "Off") );
PORT_DIPSETTING( 0x00, DEF_STR( "On") );
PORT_BITX( 0x40, 0x00, IPT_DIPSWITCH_NAME | IPF_CHEAT, "Rack Test", KEYCODE_F1, IP_JOY_NONE ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x40, DEF_STR( "On") );
PORT_DIPNAME( 0x80, 0x00, "Freeze" ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x80, DEF_STR( "On") );
PORT_START(); /* DSW1 */
PORT_DIPNAME( 0x07, 0x00, DEF_STR( "Coin_A") );
PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") );
PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x05, DEF_STR( "2C_3C") );
PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS );
INPUT_PORTS_END(); } | NONSATD | true | public void handler() {
PORT_START(); /* DSW0 */
/* According to the manual, 0x04, 0x08 and 0x10 should always be off,
but... */ | public void handler() {
PORT_START(); /* DSW0 */
/* According to the manual, 0x04, 0x08 and 0x10 should always be off,
but... */
PORT_DIPNAME( 0x07, 0x00, "Rank" ); PORT_DIPSETTING( 0x00, "A" ); PORT_DIPSETTING( 0x01, "B" ); PORT_DIPSETTING( 0x02, "C" ); PORT_DIPSETTING( 0x03, "D" ); PORT_DIPSETTING( 0x04, "E" ); PORT_DIPSETTING( 0x05, "F" ); PORT_DIPSETTING( 0x06, "G" ); PORT_DIPSETTING( 0x07, "H" ); PORT_DIPNAME( 0x18, 0x00, DEF_STR( "Coin_B") );
PORT_DIPSETTING( 0x18, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x08, DEF_STR( "1C_5C"));
PORT_DIPSETTING( 0x10, DEF_STR( "1C_7C") );
PORT_DIPNAME( 0x20, 0x00, DEF_STR( "Demo_Sounds") );
PORT_DIPSETTING( 0x20, DEF_STR( "Off") );
PORT_DIPSETTING( 0x00, DEF_STR( "On") ); | public void handler() {
PORT_START(); /* DSW0 */
/* According to the manual, 0x04, 0x08 and 0x10 should always be off,
but... */
PORT_DIPNAME( 0x07, 0x00, "Rank" ); PORT_DIPSETTING( 0x00, "A" ); PORT_DIPSETTING( 0x01, "B" ); PORT_DIPSETTING( 0x02, "C" ); PORT_DIPSETTING( 0x03, "D" ); PORT_DIPSETTING( 0x04, "E" ); PORT_DIPSETTING( 0x05, "F" ); PORT_DIPSETTING( 0x06, "G" ); PORT_DIPSETTING( 0x07, "H" ); PORT_DIPNAME( 0x18, 0x00, DEF_STR( "Coin_B") );
PORT_DIPSETTING( 0x18, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x08, DEF_STR( "1C_5C"));
PORT_DIPSETTING( 0x10, DEF_STR( "1C_7C") );
PORT_DIPNAME( 0x20, 0x00, DEF_STR( "Demo_Sounds") );
PORT_DIPSETTING( 0x20, DEF_STR( "Off") );
PORT_DIPSETTING( 0x00, DEF_STR( "On") );
PORT_BITX( 0x40, 0x00, IPT_DIPSWITCH_NAME | IPF_CHEAT, "Rack Test", KEYCODE_F1, IP_JOY_NONE ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x40, DEF_STR( "On") );
PORT_DIPNAME( 0x80, 0x00, "Freeze" ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x80, DEF_STR( "On") );
PORT_START(); /* DSW1 */
PORT_DIPNAME( 0x07, 0x00, DEF_STR( "Coin_A") );
PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") );
PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") ); |
1,238 | 1 | /* According to the manual, 0x04, 0x08 and 0x10 should always be off,
but... */ | public void handler() {
PORT_START(); /* DSW0 */
/* According to the manual, 0x04, 0x08 and 0x10 should always be off,
but... */
PORT_DIPNAME( 0x07, 0x00, "Rank" ); PORT_DIPSETTING( 0x00, "A" ); PORT_DIPSETTING( 0x01, "B" ); PORT_DIPSETTING( 0x02, "C" ); PORT_DIPSETTING( 0x03, "D" ); PORT_DIPSETTING( 0x04, "E" ); PORT_DIPSETTING( 0x05, "F" ); PORT_DIPSETTING( 0x06, "G" ); PORT_DIPSETTING( 0x07, "H" ); PORT_DIPNAME( 0x18, 0x00, DEF_STR( "Coin_B") );
PORT_DIPSETTING( 0x18, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x08, DEF_STR( "1C_5C"));
PORT_DIPSETTING( 0x10, DEF_STR( "1C_7C") );
PORT_DIPNAME( 0x20, 0x00, DEF_STR( "Demo_Sounds") );
PORT_DIPSETTING( 0x20, DEF_STR( "Off") );
PORT_DIPSETTING( 0x00, DEF_STR( "On") );
PORT_BITX( 0x40, 0x00, IPT_DIPSWITCH_NAME | IPF_CHEAT, "Rack Test", KEYCODE_F1, IP_JOY_NONE ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x40, DEF_STR( "On") );
PORT_DIPNAME( 0x80, 0x00, "Freeze" ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x80, DEF_STR( "On") );
PORT_START(); /* DSW1 */
PORT_DIPNAME( 0x07, 0x00, DEF_STR( "Coin_A") );
PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") );
PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x05, DEF_STR( "2C_3C") );
PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS );
INPUT_PORTS_END(); } | NONSATD | true | public void handler() {
PORT_START(); /* DSW0 */
/* According to the manual, 0x04, 0x08 and 0x10 should always be off,
but... */
PORT_DIPNAME( 0x07, 0x00, "Rank" ); PORT_DIPSETTING( 0x00, "A" ); PORT_DIPSETTING( 0x01, "B" ); PORT_DIPSETTING( 0x02, "C" ); PORT_DIPSETTING( 0x03, "D" ); PORT_DIPSETTING( 0x04, "E" ); PORT_DIPSETTING( 0x05, "F" ); PORT_DIPSETTING( 0x06, "G" ); PORT_DIPSETTING( 0x07, "H" ); PORT_DIPNAME( 0x18, 0x00, DEF_STR( "Coin_B") );
PORT_DIPSETTING( 0x18, DEF_STR( "2C_1C") ); | public void handler() {
PORT_START(); /* DSW0 */
/* According to the manual, 0x04, 0x08 and 0x10 should always be off,
but... */
PORT_DIPNAME( 0x07, 0x00, "Rank" ); PORT_DIPSETTING( 0x00, "A" ); PORT_DIPSETTING( 0x01, "B" ); PORT_DIPSETTING( 0x02, "C" ); PORT_DIPSETTING( 0x03, "D" ); PORT_DIPSETTING( 0x04, "E" ); PORT_DIPSETTING( 0x05, "F" ); PORT_DIPSETTING( 0x06, "G" ); PORT_DIPSETTING( 0x07, "H" ); PORT_DIPNAME( 0x18, 0x00, DEF_STR( "Coin_B") );
PORT_DIPSETTING( 0x18, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x08, DEF_STR( "1C_5C"));
PORT_DIPSETTING( 0x10, DEF_STR( "1C_7C") );
PORT_DIPNAME( 0x20, 0x00, DEF_STR( "Demo_Sounds") );
PORT_DIPSETTING( 0x20, DEF_STR( "Off") );
PORT_DIPSETTING( 0x00, DEF_STR( "On") );
PORT_BITX( 0x40, 0x00, IPT_DIPSWITCH_NAME | IPF_CHEAT, "Rack Test", KEYCODE_F1, IP_JOY_NONE ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x40, DEF_STR( "On") ); | public void handler() {
PORT_START(); /* DSW0 */
/* According to the manual, 0x04, 0x08 and 0x10 should always be off,
but... */
PORT_DIPNAME( 0x07, 0x00, "Rank" ); PORT_DIPSETTING( 0x00, "A" ); PORT_DIPSETTING( 0x01, "B" ); PORT_DIPSETTING( 0x02, "C" ); PORT_DIPSETTING( 0x03, "D" ); PORT_DIPSETTING( 0x04, "E" ); PORT_DIPSETTING( 0x05, "F" ); PORT_DIPSETTING( 0x06, "G" ); PORT_DIPSETTING( 0x07, "H" ); PORT_DIPNAME( 0x18, 0x00, DEF_STR( "Coin_B") );
PORT_DIPSETTING( 0x18, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x08, DEF_STR( "1C_5C"));
PORT_DIPSETTING( 0x10, DEF_STR( "1C_7C") );
PORT_DIPNAME( 0x20, 0x00, DEF_STR( "Demo_Sounds") );
PORT_DIPSETTING( 0x20, DEF_STR( "Off") );
PORT_DIPSETTING( 0x00, DEF_STR( "On") );
PORT_BITX( 0x40, 0x00, IPT_DIPSWITCH_NAME | IPF_CHEAT, "Rack Test", KEYCODE_F1, IP_JOY_NONE ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x40, DEF_STR( "On") );
PORT_DIPNAME( 0x80, 0x00, "Freeze" ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x80, DEF_STR( "On") );
PORT_START(); /* DSW1 */
PORT_DIPNAME( 0x07, 0x00, DEF_STR( "Coin_A") );
PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") );
PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x05, DEF_STR( "2C_3C") );
PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") ); |
1,238 | 2 | /* DSW1 */ | public void handler() {
PORT_START(); /* DSW0 */
/* According to the manual, 0x04, 0x08 and 0x10 should always be off,
but... */
PORT_DIPNAME( 0x07, 0x00, "Rank" ); PORT_DIPSETTING( 0x00, "A" ); PORT_DIPSETTING( 0x01, "B" ); PORT_DIPSETTING( 0x02, "C" ); PORT_DIPSETTING( 0x03, "D" ); PORT_DIPSETTING( 0x04, "E" ); PORT_DIPSETTING( 0x05, "F" ); PORT_DIPSETTING( 0x06, "G" ); PORT_DIPSETTING( 0x07, "H" ); PORT_DIPNAME( 0x18, 0x00, DEF_STR( "Coin_B") );
PORT_DIPSETTING( 0x18, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x08, DEF_STR( "1C_5C"));
PORT_DIPSETTING( 0x10, DEF_STR( "1C_7C") );
PORT_DIPNAME( 0x20, 0x00, DEF_STR( "Demo_Sounds") );
PORT_DIPSETTING( 0x20, DEF_STR( "Off") );
PORT_DIPSETTING( 0x00, DEF_STR( "On") );
PORT_BITX( 0x40, 0x00, IPT_DIPSWITCH_NAME | IPF_CHEAT, "Rack Test", KEYCODE_F1, IP_JOY_NONE ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x40, DEF_STR( "On") );
PORT_DIPNAME( 0x80, 0x00, "Freeze" ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x80, DEF_STR( "On") );
PORT_START(); /* DSW1 */
PORT_DIPNAME( 0x07, 0x00, DEF_STR( "Coin_A") );
PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") );
PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x05, DEF_STR( "2C_3C") );
PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS );
INPUT_PORTS_END(); } | NONSATD | true | PORT_DIPNAME( 0x80, 0x00, "Freeze" ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x80, DEF_STR( "On") );
PORT_START(); /* DSW1 */
PORT_DIPNAME( 0x07, 0x00, DEF_STR( "Coin_A") );
PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") ); | PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x08, DEF_STR( "1C_5C"));
PORT_DIPSETTING( 0x10, DEF_STR( "1C_7C") );
PORT_DIPNAME( 0x20, 0x00, DEF_STR( "Demo_Sounds") );
PORT_DIPSETTING( 0x20, DEF_STR( "Off") );
PORT_DIPSETTING( 0x00, DEF_STR( "On") );
PORT_BITX( 0x40, 0x00, IPT_DIPSWITCH_NAME | IPF_CHEAT, "Rack Test", KEYCODE_F1, IP_JOY_NONE ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x40, DEF_STR( "On") );
PORT_DIPNAME( 0x80, 0x00, "Freeze" ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x80, DEF_STR( "On") );
PORT_START(); /* DSW1 */
PORT_DIPNAME( 0x07, 0x00, DEF_STR( "Coin_A") );
PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") );
PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x05, DEF_STR( "2C_3C") );
PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */ | public void handler() {
PORT_START(); /* DSW0 */
/* According to the manual, 0x04, 0x08 and 0x10 should always be off,
but... */
PORT_DIPNAME( 0x07, 0x00, "Rank" ); PORT_DIPSETTING( 0x00, "A" ); PORT_DIPSETTING( 0x01, "B" ); PORT_DIPSETTING( 0x02, "C" ); PORT_DIPSETTING( 0x03, "D" ); PORT_DIPSETTING( 0x04, "E" ); PORT_DIPSETTING( 0x05, "F" ); PORT_DIPSETTING( 0x06, "G" ); PORT_DIPSETTING( 0x07, "H" ); PORT_DIPNAME( 0x18, 0x00, DEF_STR( "Coin_B") );
PORT_DIPSETTING( 0x18, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x08, DEF_STR( "1C_5C"));
PORT_DIPSETTING( 0x10, DEF_STR( "1C_7C") );
PORT_DIPNAME( 0x20, 0x00, DEF_STR( "Demo_Sounds") );
PORT_DIPSETTING( 0x20, DEF_STR( "Off") );
PORT_DIPSETTING( 0x00, DEF_STR( "On") );
PORT_BITX( 0x40, 0x00, IPT_DIPSWITCH_NAME | IPF_CHEAT, "Rack Test", KEYCODE_F1, IP_JOY_NONE ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x40, DEF_STR( "On") );
PORT_DIPNAME( 0x80, 0x00, "Freeze" ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x80, DEF_STR( "On") );
PORT_START(); /* DSW1 */
PORT_DIPNAME( 0x07, 0x00, DEF_STR( "Coin_A") );
PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") );
PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x05, DEF_STR( "2C_3C") );
PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") ); |
1,238 | 3 | /* TODO: bonus scores are different for 5 lives */ | public void handler() {
PORT_START(); /* DSW0 */
/* According to the manual, 0x04, 0x08 and 0x10 should always be off,
but... */
PORT_DIPNAME( 0x07, 0x00, "Rank" ); PORT_DIPSETTING( 0x00, "A" ); PORT_DIPSETTING( 0x01, "B" ); PORT_DIPSETTING( 0x02, "C" ); PORT_DIPSETTING( 0x03, "D" ); PORT_DIPSETTING( 0x04, "E" ); PORT_DIPSETTING( 0x05, "F" ); PORT_DIPSETTING( 0x06, "G" ); PORT_DIPSETTING( 0x07, "H" ); PORT_DIPNAME( 0x18, 0x00, DEF_STR( "Coin_B") );
PORT_DIPSETTING( 0x18, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x08, DEF_STR( "1C_5C"));
PORT_DIPSETTING( 0x10, DEF_STR( "1C_7C") );
PORT_DIPNAME( 0x20, 0x00, DEF_STR( "Demo_Sounds") );
PORT_DIPSETTING( 0x20, DEF_STR( "Off") );
PORT_DIPSETTING( 0x00, DEF_STR( "On") );
PORT_BITX( 0x40, 0x00, IPT_DIPSWITCH_NAME | IPF_CHEAT, "Rack Test", KEYCODE_F1, IP_JOY_NONE ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x40, DEF_STR( "On") );
PORT_DIPNAME( 0x80, 0x00, "Freeze" ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x80, DEF_STR( "On") );
PORT_START(); /* DSW1 */
PORT_DIPNAME( 0x07, 0x00, DEF_STR( "Coin_A") );
PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") );
PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x05, DEF_STR( "2C_3C") );
PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS );
INPUT_PORTS_END(); } | IMPLEMENTATION | true | PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives | PORT_START(); /* DSW1 */
PORT_DIPNAME( 0x07, 0x00, DEF_STR( "Coin_A") );
PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") );
PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x05, DEF_STR( "2C_3C") );
PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") ); | PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x08, DEF_STR( "1C_5C"));
PORT_DIPSETTING( 0x10, DEF_STR( "1C_7C") );
PORT_DIPNAME( 0x20, 0x00, DEF_STR( "Demo_Sounds") );
PORT_DIPSETTING( 0x20, DEF_STR( "Off") );
PORT_DIPSETTING( 0x00, DEF_STR( "On") );
PORT_BITX( 0x40, 0x00, IPT_DIPSWITCH_NAME | IPF_CHEAT, "Rack Test", KEYCODE_F1, IP_JOY_NONE ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x40, DEF_STR( "On") );
PORT_DIPNAME( 0x80, 0x00, "Freeze" ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x80, DEF_STR( "On") );
PORT_START(); /* DSW1 */
PORT_DIPNAME( 0x07, 0x00, DEF_STR( "Coin_A") );
PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") );
PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x05, DEF_STR( "2C_3C") );
PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); |
1,238 | 4 | /* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/ | public void handler() {
PORT_START(); /* DSW0 */
/* According to the manual, 0x04, 0x08 and 0x10 should always be off,
but... */
PORT_DIPNAME( 0x07, 0x00, "Rank" ); PORT_DIPSETTING( 0x00, "A" ); PORT_DIPSETTING( 0x01, "B" ); PORT_DIPSETTING( 0x02, "C" ); PORT_DIPSETTING( 0x03, "D" ); PORT_DIPSETTING( 0x04, "E" ); PORT_DIPSETTING( 0x05, "F" ); PORT_DIPSETTING( 0x06, "G" ); PORT_DIPSETTING( 0x07, "H" ); PORT_DIPNAME( 0x18, 0x00, DEF_STR( "Coin_B") );
PORT_DIPSETTING( 0x18, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x08, DEF_STR( "1C_5C"));
PORT_DIPSETTING( 0x10, DEF_STR( "1C_7C") );
PORT_DIPNAME( 0x20, 0x00, DEF_STR( "Demo_Sounds") );
PORT_DIPSETTING( 0x20, DEF_STR( "Off") );
PORT_DIPSETTING( 0x00, DEF_STR( "On") );
PORT_BITX( 0x40, 0x00, IPT_DIPSWITCH_NAME | IPF_CHEAT, "Rack Test", KEYCODE_F1, IP_JOY_NONE ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x40, DEF_STR( "On") );
PORT_DIPNAME( 0x80, 0x00, "Freeze" ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x80, DEF_STR( "On") );
PORT_START(); /* DSW1 */
PORT_DIPNAME( 0x07, 0x00, DEF_STR( "Coin_A") );
PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") );
PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x05, DEF_STR( "2C_3C") );
PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS );
INPUT_PORTS_END(); } | NONSATD | true | /* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" ); | PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") );
PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x05, DEF_STR( "2C_3C") );
PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */ | PORT_DIPSETTING( 0x10, DEF_STR( "1C_7C") );
PORT_DIPNAME( 0x20, 0x00, DEF_STR( "Demo_Sounds") );
PORT_DIPSETTING( 0x20, DEF_STR( "Off") );
PORT_DIPSETTING( 0x00, DEF_STR( "On") );
PORT_BITX( 0x40, 0x00, IPT_DIPSWITCH_NAME | IPF_CHEAT, "Rack Test", KEYCODE_F1, IP_JOY_NONE ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x40, DEF_STR( "On") );
PORT_DIPNAME( 0x80, 0x00, "Freeze" ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x80, DEF_STR( "On") );
PORT_START(); /* DSW1 */
PORT_DIPNAME( 0x07, 0x00, DEF_STR( "Coin_A") );
PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") );
PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x05, DEF_STR( "2C_3C") );
PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS );
INPUT_PORTS_END(); } |
1,238 | 5 | /* DSW2 */ | public void handler() {
PORT_START(); /* DSW0 */
/* According to the manual, 0x04, 0x08 and 0x10 should always be off,
but... */
PORT_DIPNAME( 0x07, 0x00, "Rank" ); PORT_DIPSETTING( 0x00, "A" ); PORT_DIPSETTING( 0x01, "B" ); PORT_DIPSETTING( 0x02, "C" ); PORT_DIPSETTING( 0x03, "D" ); PORT_DIPSETTING( 0x04, "E" ); PORT_DIPSETTING( 0x05, "F" ); PORT_DIPSETTING( 0x06, "G" ); PORT_DIPSETTING( 0x07, "H" ); PORT_DIPNAME( 0x18, 0x00, DEF_STR( "Coin_B") );
PORT_DIPSETTING( 0x18, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x08, DEF_STR( "1C_5C"));
PORT_DIPSETTING( 0x10, DEF_STR( "1C_7C") );
PORT_DIPNAME( 0x20, 0x00, DEF_STR( "Demo_Sounds") );
PORT_DIPSETTING( 0x20, DEF_STR( "Off") );
PORT_DIPSETTING( 0x00, DEF_STR( "On") );
PORT_BITX( 0x40, 0x00, IPT_DIPSWITCH_NAME | IPF_CHEAT, "Rack Test", KEYCODE_F1, IP_JOY_NONE ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x40, DEF_STR( "On") );
PORT_DIPNAME( 0x80, 0x00, "Freeze" ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x80, DEF_STR( "On") );
PORT_START(); /* DSW1 */
PORT_DIPNAME( 0x07, 0x00, DEF_STR( "Coin_A") );
PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") );
PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x05, DEF_STR( "2C_3C") );
PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS );
INPUT_PORTS_END(); } | NONSATD | true | PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") ); | PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */ | PORT_DIPSETTING( 0x40, DEF_STR( "On") );
PORT_DIPNAME( 0x80, 0x00, "Freeze" ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x80, DEF_STR( "On") );
PORT_START(); /* DSW1 */
PORT_DIPNAME( 0x07, 0x00, DEF_STR( "Coin_A") );
PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") );
PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x05, DEF_STR( "2C_3C") );
PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS );
INPUT_PORTS_END(); } |
1,238 | 6 | /* FAKE */ | public void handler() {
PORT_START(); /* DSW0 */
/* According to the manual, 0x04, 0x08 and 0x10 should always be off,
but... */
PORT_DIPNAME( 0x07, 0x00, "Rank" ); PORT_DIPSETTING( 0x00, "A" ); PORT_DIPSETTING( 0x01, "B" ); PORT_DIPSETTING( 0x02, "C" ); PORT_DIPSETTING( 0x03, "D" ); PORT_DIPSETTING( 0x04, "E" ); PORT_DIPSETTING( 0x05, "F" ); PORT_DIPSETTING( 0x06, "G" ); PORT_DIPSETTING( 0x07, "H" ); PORT_DIPNAME( 0x18, 0x00, DEF_STR( "Coin_B") );
PORT_DIPSETTING( 0x18, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x08, DEF_STR( "1C_5C"));
PORT_DIPSETTING( 0x10, DEF_STR( "1C_7C") );
PORT_DIPNAME( 0x20, 0x00, DEF_STR( "Demo_Sounds") );
PORT_DIPSETTING( 0x20, DEF_STR( "Off") );
PORT_DIPSETTING( 0x00, DEF_STR( "On") );
PORT_BITX( 0x40, 0x00, IPT_DIPSWITCH_NAME | IPF_CHEAT, "Rack Test", KEYCODE_F1, IP_JOY_NONE ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x40, DEF_STR( "On") );
PORT_DIPNAME( 0x80, 0x00, "Freeze" ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x80, DEF_STR( "On") );
PORT_START(); /* DSW1 */
PORT_DIPNAME( 0x07, 0x00, DEF_STR( "Coin_A") );
PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") );
PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x05, DEF_STR( "2C_3C") );
PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS );
INPUT_PORTS_END(); } | NONSATD | true | PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */ | PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS );
INPUT_PORTS_END(); } | PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") );
PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x05, DEF_STR( "2C_3C") );
PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS );
INPUT_PORTS_END(); } |
1,238 | 7 | /* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */ | public void handler() {
PORT_START(); /* DSW0 */
/* According to the manual, 0x04, 0x08 and 0x10 should always be off,
but... */
PORT_DIPNAME( 0x07, 0x00, "Rank" ); PORT_DIPSETTING( 0x00, "A" ); PORT_DIPSETTING( 0x01, "B" ); PORT_DIPSETTING( 0x02, "C" ); PORT_DIPSETTING( 0x03, "D" ); PORT_DIPSETTING( 0x04, "E" ); PORT_DIPSETTING( 0x05, "F" ); PORT_DIPSETTING( 0x06, "G" ); PORT_DIPSETTING( 0x07, "H" ); PORT_DIPNAME( 0x18, 0x00, DEF_STR( "Coin_B") );
PORT_DIPSETTING( 0x18, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x08, DEF_STR( "1C_5C"));
PORT_DIPSETTING( 0x10, DEF_STR( "1C_7C") );
PORT_DIPNAME( 0x20, 0x00, DEF_STR( "Demo_Sounds") );
PORT_DIPSETTING( 0x20, DEF_STR( "Off") );
PORT_DIPSETTING( 0x00, DEF_STR( "On") );
PORT_BITX( 0x40, 0x00, IPT_DIPSWITCH_NAME | IPF_CHEAT, "Rack Test", KEYCODE_F1, IP_JOY_NONE ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x40, DEF_STR( "On") );
PORT_DIPNAME( 0x80, 0x00, "Freeze" ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x80, DEF_STR( "On") );
PORT_START(); /* DSW1 */
PORT_DIPNAME( 0x07, 0x00, DEF_STR( "Coin_A") );
PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") );
PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x05, DEF_STR( "2C_3C") );
PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS );
INPUT_PORTS_END(); } | NONSATD | true | PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */ | PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS );
INPUT_PORTS_END(); } | PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x05, DEF_STR( "2C_3C") );
PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS );
INPUT_PORTS_END(); } |
1,238 | 8 | /* FAKE */ | public void handler() {
PORT_START(); /* DSW0 */
/* According to the manual, 0x04, 0x08 and 0x10 should always be off,
but... */
PORT_DIPNAME( 0x07, 0x00, "Rank" ); PORT_DIPSETTING( 0x00, "A" ); PORT_DIPSETTING( 0x01, "B" ); PORT_DIPSETTING( 0x02, "C" ); PORT_DIPSETTING( 0x03, "D" ); PORT_DIPSETTING( 0x04, "E" ); PORT_DIPSETTING( 0x05, "F" ); PORT_DIPSETTING( 0x06, "G" ); PORT_DIPSETTING( 0x07, "H" ); PORT_DIPNAME( 0x18, 0x00, DEF_STR( "Coin_B") );
PORT_DIPSETTING( 0x18, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x08, DEF_STR( "1C_5C"));
PORT_DIPSETTING( 0x10, DEF_STR( "1C_7C") );
PORT_DIPNAME( 0x20, 0x00, DEF_STR( "Demo_Sounds") );
PORT_DIPSETTING( 0x20, DEF_STR( "Off") );
PORT_DIPSETTING( 0x00, DEF_STR( "On") );
PORT_BITX( 0x40, 0x00, IPT_DIPSWITCH_NAME | IPF_CHEAT, "Rack Test", KEYCODE_F1, IP_JOY_NONE ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x40, DEF_STR( "On") );
PORT_DIPNAME( 0x80, 0x00, "Freeze" ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x80, DEF_STR( "On") );
PORT_START(); /* DSW1 */
PORT_DIPNAME( 0x07, 0x00, DEF_STR( "Coin_A") );
PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") );
PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x05, DEF_STR( "2C_3C") );
PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS );
INPUT_PORTS_END(); } | NONSATD | true | PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */ | PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS );
INPUT_PORTS_END(); } | PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") );
PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x05, DEF_STR( "2C_3C") );
PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS );
INPUT_PORTS_END(); } |
1,238 | 9 | /* Coin 2 is not working */ | public void handler() {
PORT_START(); /* DSW0 */
/* According to the manual, 0x04, 0x08 and 0x10 should always be off,
but... */
PORT_DIPNAME( 0x07, 0x00, "Rank" ); PORT_DIPSETTING( 0x00, "A" ); PORT_DIPSETTING( 0x01, "B" ); PORT_DIPSETTING( 0x02, "C" ); PORT_DIPSETTING( 0x03, "D" ); PORT_DIPSETTING( 0x04, "E" ); PORT_DIPSETTING( 0x05, "F" ); PORT_DIPSETTING( 0x06, "G" ); PORT_DIPSETTING( 0x07, "H" ); PORT_DIPNAME( 0x18, 0x00, DEF_STR( "Coin_B") );
PORT_DIPSETTING( 0x18, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x08, DEF_STR( "1C_5C"));
PORT_DIPSETTING( 0x10, DEF_STR( "1C_7C") );
PORT_DIPNAME( 0x20, 0x00, DEF_STR( "Demo_Sounds") );
PORT_DIPSETTING( 0x20, DEF_STR( "Off") );
PORT_DIPSETTING( 0x00, DEF_STR( "On") );
PORT_BITX( 0x40, 0x00, IPT_DIPSWITCH_NAME | IPF_CHEAT, "Rack Test", KEYCODE_F1, IP_JOY_NONE ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x40, DEF_STR( "On") );
PORT_DIPNAME( 0x80, 0x00, "Freeze" ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x80, DEF_STR( "On") );
PORT_START(); /* DSW1 */
PORT_DIPNAME( 0x07, 0x00, DEF_STR( "Coin_A") );
PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") );
PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x05, DEF_STR( "2C_3C") );
PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS );
INPUT_PORTS_END(); } | NONSATD | true | PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */ | PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS );
INPUT_PORTS_END(); } | PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS );
INPUT_PORTS_END(); } |
1,238 | 10 | /* FAKE */ | public void handler() {
PORT_START(); /* DSW0 */
/* According to the manual, 0x04, 0x08 and 0x10 should always be off,
but... */
PORT_DIPNAME( 0x07, 0x00, "Rank" ); PORT_DIPSETTING( 0x00, "A" ); PORT_DIPSETTING( 0x01, "B" ); PORT_DIPSETTING( 0x02, "C" ); PORT_DIPSETTING( 0x03, "D" ); PORT_DIPSETTING( 0x04, "E" ); PORT_DIPSETTING( 0x05, "F" ); PORT_DIPSETTING( 0x06, "G" ); PORT_DIPSETTING( 0x07, "H" ); PORT_DIPNAME( 0x18, 0x00, DEF_STR( "Coin_B") );
PORT_DIPSETTING( 0x18, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x08, DEF_STR( "1C_5C"));
PORT_DIPSETTING( 0x10, DEF_STR( "1C_7C") );
PORT_DIPNAME( 0x20, 0x00, DEF_STR( "Demo_Sounds") );
PORT_DIPSETTING( 0x20, DEF_STR( "Off") );
PORT_DIPSETTING( 0x00, DEF_STR( "On") );
PORT_BITX( 0x40, 0x00, IPT_DIPSWITCH_NAME | IPF_CHEAT, "Rack Test", KEYCODE_F1, IP_JOY_NONE ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x40, DEF_STR( "On") );
PORT_DIPNAME( 0x80, 0x00, "Freeze" ); PORT_DIPSETTING( 0x00, DEF_STR( "Off") );
PORT_DIPSETTING( 0x80, DEF_STR( "On") );
PORT_START(); /* DSW1 */
PORT_DIPNAME( 0x07, 0x00, DEF_STR( "Coin_A") );
PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") );
PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x05, DEF_STR( "2C_3C") );
PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS );
INPUT_PORTS_END(); } | NONSATD | true | PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */ | PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS );
INPUT_PORTS_END(); } | PORT_DIPSETTING( 0x06, DEF_STR( "3C_1C") );
PORT_DIPSETTING( 0x04, DEF_STR( "2C_1C") );
PORT_DIPSETTING( 0x07, DEF_STR( "3C_2C") );
PORT_DIPSETTING( 0x00, DEF_STR( "1C_1C") );
PORT_DIPSETTING( 0x05, DEF_STR( "2C_3C") );
PORT_DIPSETTING( 0x01, DEF_STR( "1C_2C") );
PORT_DIPSETTING( 0x02, DEF_STR( "1C_3C") );
PORT_DIPSETTING( 0x03, DEF_STR( "1C_6C") );
/* TODO: bonus scores are different for 5 lives */
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "20k 70k and every 70k" ); PORT_DIPSETTING( 0x30, "20k 80k and every 80k" ); PORT_DIPSETTING( 0x08, "20k 60k" ); PORT_DIPSETTING( 0x00, "20k 70k" ); PORT_DIPSETTING( 0x10, "20k 80k" ); PORT_DIPSETTING( 0x18, "30k 100k" ); PORT_DIPSETTING( 0x20, "20k" ); PORT_DIPSETTING( 0x38, "None" );/* those are the bonus with 5 lives
PORT_DIPNAME( 0x38, 0x00, DEF_STR( "Bonus_Life") );
PORT_DIPSETTING( 0x28, "30k 100k and every 100k" ); PORT_DIPSETTING( 0x30, "40k 120k and every 120k" ); PORT_DIPSETTING( 0x00, "30k 80k" ); PORT_DIPSETTING( 0x08, "30k 100k" ); PORT_DIPSETTING( 0x10, "30k 120k" ); PORT_DIPSETTING( 0x18, "30k" ); PORT_DIPSETTING( 0x20, "40k" ); PORT_DIPSETTING( 0x38, "None" );*/
PORT_DIPNAME( 0xc0, 0x00, DEF_STR( "Lives") );
PORT_DIPSETTING( 0x80, "1" ); PORT_DIPSETTING( 0xc0, "2" ); PORT_DIPSETTING( 0x00, "3" ); PORT_DIPSETTING( 0x40, "5" );
PORT_START(); /* DSW2 */
PORT_BIT( 0x03, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_DIPNAME( 0x04, 0x00, DEF_STR( "Cabinet") );
PORT_DIPSETTING( 0x00, DEF_STR( "Upright") );
PORT_DIPSETTING( 0x04, DEF_STR( "Cocktail") );
PORT_SERVICE( 0x08, IP_ACTIVE_HIGH ); PORT_BIT( 0xf0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
/* The player inputs are not memory mapped, they are handled by an I/O chip. */
/* These fake input ports are read by mappy_customio_data_r() */
PORT_BIT( 0x01, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY ); PORT_BIT( 0x04, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT_IMPULSE( 0x01, IP_ACTIVE_HIGH, IPT_COIN1, 1 );/* Coin 2 is not working */
PORT_BIT_IMPULSE( 0x02, IP_ACTIVE_HIGH, IPT_COIN2, 1 ); PORT_BIT( 0x0c, IP_ACTIVE_HIGH, IPT_UNUSED ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_START1, 1 ); PORT_BIT_IMPULSE( 0x20, IP_ACTIVE_HIGH, IPT_START2, 1 ); PORT_BIT( 0xc0, IP_ACTIVE_HIGH, IPT_UNUSED );
PORT_START(); /* FAKE */
PORT_BIT( 0x02, IP_ACTIVE_HIGH, IPT_JOYSTICK_RIGHT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT( 0x08, IP_ACTIVE_HIGH, IPT_JOYSTICK_LEFT | IPF_2WAY | IPF_COCKTAIL ); PORT_BIT_IMPULSE( 0x10, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, 1 ); PORT_BITX(0x20, IP_ACTIVE_HIGH, IPT_BUTTON1 | IPF_COCKTAIL, null, IP_KEY_PREVIOUS, IP_JOY_PREVIOUS );
INPUT_PORTS_END(); } |
25,814 | 0 | // this code used to be smart about which validator was used at which java-path, and provide only the contexts for that particular
// java-path to the processor; but that doesn't work in SEER*DMS where some edits are persisted but not registered to the engine! | private void updateProcessorsContexts(Map<Long, Map<String, Object>> allContexts) {
// this code used to be smart about which validator was used at which java-path, and provide only the contexts for that particular
// java-path to the processor; but that doesn't work in SEER*DMS where some edits are persisted but not registered to the engine!
for (ValidatingProcessor p : _processors.values())
p.setContexts(allContexts);
} | DESIGN | true | private void updateProcessorsContexts(Map<Long, Map<String, Object>> allContexts) {
// this code used to be smart about which validator was used at which java-path, and provide only the contexts for that particular
// java-path to the processor; but that doesn't work in SEER*DMS where some edits are persisted but not registered to the engine!
for (ValidatingProcessor p : _processors.values())
p.setContexts(allContexts); | private void updateProcessorsContexts(Map<Long, Map<String, Object>> allContexts) {
// this code used to be smart about which validator was used at which java-path, and provide only the contexts for that particular
// java-path to the processor; but that doesn't work in SEER*DMS where some edits are persisted but not registered to the engine!
for (ValidatingProcessor p : _processors.values())
p.setContexts(allContexts);
} | private void updateProcessorsContexts(Map<Long, Map<String, Object>> allContexts) {
// this code used to be smart about which validator was used at which java-path, and provide only the contexts for that particular
// java-path to the processor; but that doesn't work in SEER*DMS where some edits are persisted but not registered to the engine!
for (ValidatingProcessor p : _processors.values())
p.setContexts(allContexts);
} |
17,632 | 0 | // Retrieve view type | private void readPreference(String file, AndroidView root, Integer preferenceId) {
Document doc;
try {
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
doc = dBuilder.parse(file);
} catch (Exception ex) {
throw new RuntimeException(ex);
}
Element rootElement = doc.getDocumentElement();
String rootTagName = rootElement.getTagName();
if (!(rootTagName.equals("PreferenceScreen")
|| rootTagName.equals("preference-headers")
|| rootTagName.equals("android.support.v7.preference.PreferenceScreen")
|| rootTagName.equals("android.support.v7.preference.preference-headers"))) {
return;
}
LinkedList<Pair<Node, AndroidView>> work = Lists.newLinkedList();
work.add(new Pair<>(rootElement, root));
while (!work.isEmpty()) {
Pair<Node, AndroidView> p = work.removeFirst();
Node node = p.getO1();
AndroidView view = p.getO2();
view.setOrigin(file);
NamedNodeMap attrMap = node.getAttributes();
if (attrMap == null) {
System.out.println(file + "!!!" + node.getClass() + "!!!"
+ node.toString() + "!!!" + node.getTextContent());
}
Node keyNode = attrMap.getNamedItem(KEY_ATTR);
if (keyNode != null) {
String key = keyNode.getTextContent();
view.addAttr(KEY_ATTR, key);
HashMap<Integer, String> maps = preferencesMap.get("preference-screen");
if (maps == null) {
maps = Maps.newHashMap();
}
maps.put(preferenceId, FilenameUtils.removeExtension(new File(file).getName()));
preferencesMap.put("preference-screen", maps);
Set<String> preferenceKeys = preferenceKeyMaps.get(preferenceId);
if (preferenceKeys == null) {
preferenceKeys = new HashSet<>();
preferenceKeyMaps.put(preferenceId, preferenceKeys);
}
preferenceKeys.add(key);
}
Node fragmentNode = attrMap.getNamedItem(FRAGMENT_ATTR);
if (fragmentNode != null) {
view.addAttr(FRAGMENT_ATTR, fragmentNode.getTextContent());
HashMap<Integer, String> maps = preferencesMap.get("preference-header");
if (maps == null) {
maps = Maps.newHashMap();
}
maps.put(preferenceId, FilenameUtils.removeExtension(new File(file).getName()));
preferencesMap.put("preference-header", maps);
}
int guiId = -1;
// Retrieve view type
String guiName = node.getNodeName();
String title = readAndroidTextOrTitle(attrMap, "title");
String summary = readAndroidTextOrTitle(attrMap, "summary");
String tooltip = readAndroidTextOrTitle(attrMap, "tooltipText");
String contentDescription = readAndroidTextOrTitle(attrMap, "contentDescription");
String images = readAndroidImageResource(attrMap);
view.save(guiId, title, summary, tooltip, contentDescription, images, guiName);
//view.save(guiId, title, "", guiName);
NodeList children = node.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
Node newNode = children.item(i);
String nodeName = newNode.getNodeName();
if ("#comment".equals(nodeName)) {
continue;
}
if ("#text".equals(nodeName)) {
// possible for XML files created on a different operating system
// than the one our analysis is run on
continue;
}
AndroidView newView = new AndroidView();
// FIXME: we assume that every node has attributes, may be wrong
if (!newNode.hasAttributes()) {
Logger.verb("WARNING", "xml node " + newNode + " has no attributes");
// Fixed: this is wrong for the case group item -> menu -> item
//continue;
} else {
NamedNodeMap attrs = newNode.getAttributes();
for (int idx = 0; idx < attrs.getLength(); idx += 1) {
Node attr = attrs.item(idx);
String name = attr.getNodeName();
String value = attr.getNodeValue();
newView.addAttr(name, value);
}
}
newView.setParent(view);
work.add(new Pair<>(newNode, newView));
}
}
} | NONSATD | true | }
int guiId = -1;
// Retrieve view type
String guiName = node.getNodeName();
String title = readAndroidTextOrTitle(attrMap, "title"); | if (fragmentNode != null) {
view.addAttr(FRAGMENT_ATTR, fragmentNode.getTextContent());
HashMap<Integer, String> maps = preferencesMap.get("preference-header");
if (maps == null) {
maps = Maps.newHashMap();
}
maps.put(preferenceId, FilenameUtils.removeExtension(new File(file).getName()));
preferencesMap.put("preference-header", maps);
}
int guiId = -1;
// Retrieve view type
String guiName = node.getNodeName();
String title = readAndroidTextOrTitle(attrMap, "title");
String summary = readAndroidTextOrTitle(attrMap, "summary");
String tooltip = readAndroidTextOrTitle(attrMap, "tooltipText");
String contentDescription = readAndroidTextOrTitle(attrMap, "contentDescription");
String images = readAndroidImageResource(attrMap);
view.save(guiId, title, summary, tooltip, contentDescription, images, guiName);
//view.save(guiId, title, "", guiName);
NodeList children = node.getChildNodes();
for (int i = 0; i < children.getLength(); i++) { | maps.put(preferenceId, FilenameUtils.removeExtension(new File(file).getName()));
preferencesMap.put("preference-screen", maps);
Set<String> preferenceKeys = preferenceKeyMaps.get(preferenceId);
if (preferenceKeys == null) {
preferenceKeys = new HashSet<>();
preferenceKeyMaps.put(preferenceId, preferenceKeys);
}
preferenceKeys.add(key);
}
Node fragmentNode = attrMap.getNamedItem(FRAGMENT_ATTR);
if (fragmentNode != null) {
view.addAttr(FRAGMENT_ATTR, fragmentNode.getTextContent());
HashMap<Integer, String> maps = preferencesMap.get("preference-header");
if (maps == null) {
maps = Maps.newHashMap();
}
maps.put(preferenceId, FilenameUtils.removeExtension(new File(file).getName()));
preferencesMap.put("preference-header", maps);
}
int guiId = -1;
// Retrieve view type
String guiName = node.getNodeName();
String title = readAndroidTextOrTitle(attrMap, "title");
String summary = readAndroidTextOrTitle(attrMap, "summary");
String tooltip = readAndroidTextOrTitle(attrMap, "tooltipText");
String contentDescription = readAndroidTextOrTitle(attrMap, "contentDescription");
String images = readAndroidImageResource(attrMap);
view.save(guiId, title, summary, tooltip, contentDescription, images, guiName);
//view.save(guiId, title, "", guiName);
NodeList children = node.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
Node newNode = children.item(i);
String nodeName = newNode.getNodeName();
if ("#comment".equals(nodeName)) {
continue;
}
if ("#text".equals(nodeName)) {
// possible for XML files created on a different operating system
// than the one our analysis is run on
continue;
} |
17,632 | 1 | //view.save(guiId, title, "", guiName); | private void readPreference(String file, AndroidView root, Integer preferenceId) {
Document doc;
try {
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
doc = dBuilder.parse(file);
} catch (Exception ex) {
throw new RuntimeException(ex);
}
Element rootElement = doc.getDocumentElement();
String rootTagName = rootElement.getTagName();
if (!(rootTagName.equals("PreferenceScreen")
|| rootTagName.equals("preference-headers")
|| rootTagName.equals("android.support.v7.preference.PreferenceScreen")
|| rootTagName.equals("android.support.v7.preference.preference-headers"))) {
return;
}
LinkedList<Pair<Node, AndroidView>> work = Lists.newLinkedList();
work.add(new Pair<>(rootElement, root));
while (!work.isEmpty()) {
Pair<Node, AndroidView> p = work.removeFirst();
Node node = p.getO1();
AndroidView view = p.getO2();
view.setOrigin(file);
NamedNodeMap attrMap = node.getAttributes();
if (attrMap == null) {
System.out.println(file + "!!!" + node.getClass() + "!!!"
+ node.toString() + "!!!" + node.getTextContent());
}
Node keyNode = attrMap.getNamedItem(KEY_ATTR);
if (keyNode != null) {
String key = keyNode.getTextContent();
view.addAttr(KEY_ATTR, key);
HashMap<Integer, String> maps = preferencesMap.get("preference-screen");
if (maps == null) {
maps = Maps.newHashMap();
}
maps.put(preferenceId, FilenameUtils.removeExtension(new File(file).getName()));
preferencesMap.put("preference-screen", maps);
Set<String> preferenceKeys = preferenceKeyMaps.get(preferenceId);
if (preferenceKeys == null) {
preferenceKeys = new HashSet<>();
preferenceKeyMaps.put(preferenceId, preferenceKeys);
}
preferenceKeys.add(key);
}
Node fragmentNode = attrMap.getNamedItem(FRAGMENT_ATTR);
if (fragmentNode != null) {
view.addAttr(FRAGMENT_ATTR, fragmentNode.getTextContent());
HashMap<Integer, String> maps = preferencesMap.get("preference-header");
if (maps == null) {
maps = Maps.newHashMap();
}
maps.put(preferenceId, FilenameUtils.removeExtension(new File(file).getName()));
preferencesMap.put("preference-header", maps);
}
int guiId = -1;
// Retrieve view type
String guiName = node.getNodeName();
String title = readAndroidTextOrTitle(attrMap, "title");
String summary = readAndroidTextOrTitle(attrMap, "summary");
String tooltip = readAndroidTextOrTitle(attrMap, "tooltipText");
String contentDescription = readAndroidTextOrTitle(attrMap, "contentDescription");
String images = readAndroidImageResource(attrMap);
view.save(guiId, title, summary, tooltip, contentDescription, images, guiName);
//view.save(guiId, title, "", guiName);
NodeList children = node.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
Node newNode = children.item(i);
String nodeName = newNode.getNodeName();
if ("#comment".equals(nodeName)) {
continue;
}
if ("#text".equals(nodeName)) {
// possible for XML files created on a different operating system
// than the one our analysis is run on
continue;
}
AndroidView newView = new AndroidView();
// FIXME: we assume that every node has attributes, may be wrong
if (!newNode.hasAttributes()) {
Logger.verb("WARNING", "xml node " + newNode + " has no attributes");
// Fixed: this is wrong for the case group item -> menu -> item
//continue;
} else {
NamedNodeMap attrs = newNode.getAttributes();
for (int idx = 0; idx < attrs.getLength(); idx += 1) {
Node attr = attrs.item(idx);
String name = attr.getNodeName();
String value = attr.getNodeValue();
newView.addAttr(name, value);
}
}
newView.setParent(view);
work.add(new Pair<>(newNode, newView));
}
}
} | NONSATD | true | String images = readAndroidImageResource(attrMap);
view.save(guiId, title, summary, tooltip, contentDescription, images, guiName);
//view.save(guiId, title, "", guiName);
NodeList children = node.getChildNodes();
for (int i = 0; i < children.getLength(); i++) { | }
int guiId = -1;
// Retrieve view type
String guiName = node.getNodeName();
String title = readAndroidTextOrTitle(attrMap, "title");
String summary = readAndroidTextOrTitle(attrMap, "summary");
String tooltip = readAndroidTextOrTitle(attrMap, "tooltipText");
String contentDescription = readAndroidTextOrTitle(attrMap, "contentDescription");
String images = readAndroidImageResource(attrMap);
view.save(guiId, title, summary, tooltip, contentDescription, images, guiName);
//view.save(guiId, title, "", guiName);
NodeList children = node.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
Node newNode = children.item(i);
String nodeName = newNode.getNodeName();
if ("#comment".equals(nodeName)) {
continue;
}
if ("#text".equals(nodeName)) {
// possible for XML files created on a different operating system
// than the one our analysis is run on | }
Node fragmentNode = attrMap.getNamedItem(FRAGMENT_ATTR);
if (fragmentNode != null) {
view.addAttr(FRAGMENT_ATTR, fragmentNode.getTextContent());
HashMap<Integer, String> maps = preferencesMap.get("preference-header");
if (maps == null) {
maps = Maps.newHashMap();
}
maps.put(preferenceId, FilenameUtils.removeExtension(new File(file).getName()));
preferencesMap.put("preference-header", maps);
}
int guiId = -1;
// Retrieve view type
String guiName = node.getNodeName();
String title = readAndroidTextOrTitle(attrMap, "title");
String summary = readAndroidTextOrTitle(attrMap, "summary");
String tooltip = readAndroidTextOrTitle(attrMap, "tooltipText");
String contentDescription = readAndroidTextOrTitle(attrMap, "contentDescription");
String images = readAndroidImageResource(attrMap);
view.save(guiId, title, summary, tooltip, contentDescription, images, guiName);
//view.save(guiId, title, "", guiName);
NodeList children = node.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
Node newNode = children.item(i);
String nodeName = newNode.getNodeName();
if ("#comment".equals(nodeName)) {
continue;
}
if ("#text".equals(nodeName)) {
// possible for XML files created on a different operating system
// than the one our analysis is run on
continue;
}
AndroidView newView = new AndroidView();
// FIXME: we assume that every node has attributes, may be wrong
if (!newNode.hasAttributes()) {
Logger.verb("WARNING", "xml node " + newNode + " has no attributes");
// Fixed: this is wrong for the case group item -> menu -> item
//continue;
} else {
NamedNodeMap attrs = newNode.getAttributes(); |
17,632 | 2 | // possible for XML files created on a different operating system
// than the one our analysis is run on | private void readPreference(String file, AndroidView root, Integer preferenceId) {
Document doc;
try {
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
doc = dBuilder.parse(file);
} catch (Exception ex) {
throw new RuntimeException(ex);
}
Element rootElement = doc.getDocumentElement();
String rootTagName = rootElement.getTagName();
if (!(rootTagName.equals("PreferenceScreen")
|| rootTagName.equals("preference-headers")
|| rootTagName.equals("android.support.v7.preference.PreferenceScreen")
|| rootTagName.equals("android.support.v7.preference.preference-headers"))) {
return;
}
LinkedList<Pair<Node, AndroidView>> work = Lists.newLinkedList();
work.add(new Pair<>(rootElement, root));
while (!work.isEmpty()) {
Pair<Node, AndroidView> p = work.removeFirst();
Node node = p.getO1();
AndroidView view = p.getO2();
view.setOrigin(file);
NamedNodeMap attrMap = node.getAttributes();
if (attrMap == null) {
System.out.println(file + "!!!" + node.getClass() + "!!!"
+ node.toString() + "!!!" + node.getTextContent());
}
Node keyNode = attrMap.getNamedItem(KEY_ATTR);
if (keyNode != null) {
String key = keyNode.getTextContent();
view.addAttr(KEY_ATTR, key);
HashMap<Integer, String> maps = preferencesMap.get("preference-screen");
if (maps == null) {
maps = Maps.newHashMap();
}
maps.put(preferenceId, FilenameUtils.removeExtension(new File(file).getName()));
preferencesMap.put("preference-screen", maps);
Set<String> preferenceKeys = preferenceKeyMaps.get(preferenceId);
if (preferenceKeys == null) {
preferenceKeys = new HashSet<>();
preferenceKeyMaps.put(preferenceId, preferenceKeys);
}
preferenceKeys.add(key);
}
Node fragmentNode = attrMap.getNamedItem(FRAGMENT_ATTR);
if (fragmentNode != null) {
view.addAttr(FRAGMENT_ATTR, fragmentNode.getTextContent());
HashMap<Integer, String> maps = preferencesMap.get("preference-header");
if (maps == null) {
maps = Maps.newHashMap();
}
maps.put(preferenceId, FilenameUtils.removeExtension(new File(file).getName()));
preferencesMap.put("preference-header", maps);
}
int guiId = -1;
// Retrieve view type
String guiName = node.getNodeName();
String title = readAndroidTextOrTitle(attrMap, "title");
String summary = readAndroidTextOrTitle(attrMap, "summary");
String tooltip = readAndroidTextOrTitle(attrMap, "tooltipText");
String contentDescription = readAndroidTextOrTitle(attrMap, "contentDescription");
String images = readAndroidImageResource(attrMap);
view.save(guiId, title, summary, tooltip, contentDescription, images, guiName);
//view.save(guiId, title, "", guiName);
NodeList children = node.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
Node newNode = children.item(i);
String nodeName = newNode.getNodeName();
if ("#comment".equals(nodeName)) {
continue;
}
if ("#text".equals(nodeName)) {
// possible for XML files created on a different operating system
// than the one our analysis is run on
continue;
}
AndroidView newView = new AndroidView();
// FIXME: we assume that every node has attributes, may be wrong
if (!newNode.hasAttributes()) {
Logger.verb("WARNING", "xml node " + newNode + " has no attributes");
// Fixed: this is wrong for the case group item -> menu -> item
//continue;
} else {
NamedNodeMap attrs = newNode.getAttributes();
for (int idx = 0; idx < attrs.getLength(); idx += 1) {
Node attr = attrs.item(idx);
String name = attr.getNodeName();
String value = attr.getNodeValue();
newView.addAttr(name, value);
}
}
newView.setParent(view);
work.add(new Pair<>(newNode, newView));
}
}
} | NONSATD | true | }
if ("#text".equals(nodeName)) {
// possible for XML files created on a different operating system
// than the one our analysis is run on
continue;
} | view.save(guiId, title, summary, tooltip, contentDescription, images, guiName);
//view.save(guiId, title, "", guiName);
NodeList children = node.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
Node newNode = children.item(i);
String nodeName = newNode.getNodeName();
if ("#comment".equals(nodeName)) {
continue;
}
if ("#text".equals(nodeName)) {
// possible for XML files created on a different operating system
// than the one our analysis is run on
continue;
}
AndroidView newView = new AndroidView();
// FIXME: we assume that every node has attributes, may be wrong
if (!newNode.hasAttributes()) {
Logger.verb("WARNING", "xml node " + newNode + " has no attributes");
// Fixed: this is wrong for the case group item -> menu -> item
//continue;
} else {
NamedNodeMap attrs = newNode.getAttributes(); | preferencesMap.put("preference-header", maps);
}
int guiId = -1;
// Retrieve view type
String guiName = node.getNodeName();
String title = readAndroidTextOrTitle(attrMap, "title");
String summary = readAndroidTextOrTitle(attrMap, "summary");
String tooltip = readAndroidTextOrTitle(attrMap, "tooltipText");
String contentDescription = readAndroidTextOrTitle(attrMap, "contentDescription");
String images = readAndroidImageResource(attrMap);
view.save(guiId, title, summary, tooltip, contentDescription, images, guiName);
//view.save(guiId, title, "", guiName);
NodeList children = node.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
Node newNode = children.item(i);
String nodeName = newNode.getNodeName();
if ("#comment".equals(nodeName)) {
continue;
}
if ("#text".equals(nodeName)) {
// possible for XML files created on a different operating system
// than the one our analysis is run on
continue;
}
AndroidView newView = new AndroidView();
// FIXME: we assume that every node has attributes, may be wrong
if (!newNode.hasAttributes()) {
Logger.verb("WARNING", "xml node " + newNode + " has no attributes");
// Fixed: this is wrong for the case group item -> menu -> item
//continue;
} else {
NamedNodeMap attrs = newNode.getAttributes();
for (int idx = 0; idx < attrs.getLength(); idx += 1) {
Node attr = attrs.item(idx);
String name = attr.getNodeName();
String value = attr.getNodeValue();
newView.addAttr(name, value);
}
}
newView.setParent(view);
work.add(new Pair<>(newNode, newView));
} |
17,632 | 3 | // FIXME: we assume that every node has attributes, may be wrong | private void readPreference(String file, AndroidView root, Integer preferenceId) {
Document doc;
try {
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
doc = dBuilder.parse(file);
} catch (Exception ex) {
throw new RuntimeException(ex);
}
Element rootElement = doc.getDocumentElement();
String rootTagName = rootElement.getTagName();
if (!(rootTagName.equals("PreferenceScreen")
|| rootTagName.equals("preference-headers")
|| rootTagName.equals("android.support.v7.preference.PreferenceScreen")
|| rootTagName.equals("android.support.v7.preference.preference-headers"))) {
return;
}
LinkedList<Pair<Node, AndroidView>> work = Lists.newLinkedList();
work.add(new Pair<>(rootElement, root));
while (!work.isEmpty()) {
Pair<Node, AndroidView> p = work.removeFirst();
Node node = p.getO1();
AndroidView view = p.getO2();
view.setOrigin(file);
NamedNodeMap attrMap = node.getAttributes();
if (attrMap == null) {
System.out.println(file + "!!!" + node.getClass() + "!!!"
+ node.toString() + "!!!" + node.getTextContent());
}
Node keyNode = attrMap.getNamedItem(KEY_ATTR);
if (keyNode != null) {
String key = keyNode.getTextContent();
view.addAttr(KEY_ATTR, key);
HashMap<Integer, String> maps = preferencesMap.get("preference-screen");
if (maps == null) {
maps = Maps.newHashMap();
}
maps.put(preferenceId, FilenameUtils.removeExtension(new File(file).getName()));
preferencesMap.put("preference-screen", maps);
Set<String> preferenceKeys = preferenceKeyMaps.get(preferenceId);
if (preferenceKeys == null) {
preferenceKeys = new HashSet<>();
preferenceKeyMaps.put(preferenceId, preferenceKeys);
}
preferenceKeys.add(key);
}
Node fragmentNode = attrMap.getNamedItem(FRAGMENT_ATTR);
if (fragmentNode != null) {
view.addAttr(FRAGMENT_ATTR, fragmentNode.getTextContent());
HashMap<Integer, String> maps = preferencesMap.get("preference-header");
if (maps == null) {
maps = Maps.newHashMap();
}
maps.put(preferenceId, FilenameUtils.removeExtension(new File(file).getName()));
preferencesMap.put("preference-header", maps);
}
int guiId = -1;
// Retrieve view type
String guiName = node.getNodeName();
String title = readAndroidTextOrTitle(attrMap, "title");
String summary = readAndroidTextOrTitle(attrMap, "summary");
String tooltip = readAndroidTextOrTitle(attrMap, "tooltipText");
String contentDescription = readAndroidTextOrTitle(attrMap, "contentDescription");
String images = readAndroidImageResource(attrMap);
view.save(guiId, title, summary, tooltip, contentDescription, images, guiName);
//view.save(guiId, title, "", guiName);
NodeList children = node.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
Node newNode = children.item(i);
String nodeName = newNode.getNodeName();
if ("#comment".equals(nodeName)) {
continue;
}
if ("#text".equals(nodeName)) {
// possible for XML files created on a different operating system
// than the one our analysis is run on
continue;
}
AndroidView newView = new AndroidView();
// FIXME: we assume that every node has attributes, may be wrong
if (!newNode.hasAttributes()) {
Logger.verb("WARNING", "xml node " + newNode + " has no attributes");
// Fixed: this is wrong for the case group item -> menu -> item
//continue;
} else {
NamedNodeMap attrs = newNode.getAttributes();
for (int idx = 0; idx < attrs.getLength(); idx += 1) {
Node attr = attrs.item(idx);
String name = attr.getNodeName();
String value = attr.getNodeValue();
newView.addAttr(name, value);
}
}
newView.setParent(view);
work.add(new Pair<>(newNode, newView));
}
}
} | DEFECT | true | }
AndroidView newView = new AndroidView();
// FIXME: we assume that every node has attributes, may be wrong
if (!newNode.hasAttributes()) {
Logger.verb("WARNING", "xml node " + newNode + " has no attributes"); | String nodeName = newNode.getNodeName();
if ("#comment".equals(nodeName)) {
continue;
}
if ("#text".equals(nodeName)) {
// possible for XML files created on a different operating system
// than the one our analysis is run on
continue;
}
AndroidView newView = new AndroidView();
// FIXME: we assume that every node has attributes, may be wrong
if (!newNode.hasAttributes()) {
Logger.verb("WARNING", "xml node " + newNode + " has no attributes");
// Fixed: this is wrong for the case group item -> menu -> item
//continue;
} else {
NamedNodeMap attrs = newNode.getAttributes();
for (int idx = 0; idx < attrs.getLength(); idx += 1) {
Node attr = attrs.item(idx);
String name = attr.getNodeName();
String value = attr.getNodeValue(); | String title = readAndroidTextOrTitle(attrMap, "title");
String summary = readAndroidTextOrTitle(attrMap, "summary");
String tooltip = readAndroidTextOrTitle(attrMap, "tooltipText");
String contentDescription = readAndroidTextOrTitle(attrMap, "contentDescription");
String images = readAndroidImageResource(attrMap);
view.save(guiId, title, summary, tooltip, contentDescription, images, guiName);
//view.save(guiId, title, "", guiName);
NodeList children = node.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
Node newNode = children.item(i);
String nodeName = newNode.getNodeName();
if ("#comment".equals(nodeName)) {
continue;
}
if ("#text".equals(nodeName)) {
// possible for XML files created on a different operating system
// than the one our analysis is run on
continue;
}
AndroidView newView = new AndroidView();
// FIXME: we assume that every node has attributes, may be wrong
if (!newNode.hasAttributes()) {
Logger.verb("WARNING", "xml node " + newNode + " has no attributes");
// Fixed: this is wrong for the case group item -> menu -> item
//continue;
} else {
NamedNodeMap attrs = newNode.getAttributes();
for (int idx = 0; idx < attrs.getLength(); idx += 1) {
Node attr = attrs.item(idx);
String name = attr.getNodeName();
String value = attr.getNodeValue();
newView.addAttr(name, value);
}
}
newView.setParent(view);
work.add(new Pair<>(newNode, newView));
}
}
} |
17,632 | 4 | // Fixed: this is wrong for the case group item -> menu -> item
//continue; | private void readPreference(String file, AndroidView root, Integer preferenceId) {
Document doc;
try {
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
doc = dBuilder.parse(file);
} catch (Exception ex) {
throw new RuntimeException(ex);
}
Element rootElement = doc.getDocumentElement();
String rootTagName = rootElement.getTagName();
if (!(rootTagName.equals("PreferenceScreen")
|| rootTagName.equals("preference-headers")
|| rootTagName.equals("android.support.v7.preference.PreferenceScreen")
|| rootTagName.equals("android.support.v7.preference.preference-headers"))) {
return;
}
LinkedList<Pair<Node, AndroidView>> work = Lists.newLinkedList();
work.add(new Pair<>(rootElement, root));
while (!work.isEmpty()) {
Pair<Node, AndroidView> p = work.removeFirst();
Node node = p.getO1();
AndroidView view = p.getO2();
view.setOrigin(file);
NamedNodeMap attrMap = node.getAttributes();
if (attrMap == null) {
System.out.println(file + "!!!" + node.getClass() + "!!!"
+ node.toString() + "!!!" + node.getTextContent());
}
Node keyNode = attrMap.getNamedItem(KEY_ATTR);
if (keyNode != null) {
String key = keyNode.getTextContent();
view.addAttr(KEY_ATTR, key);
HashMap<Integer, String> maps = preferencesMap.get("preference-screen");
if (maps == null) {
maps = Maps.newHashMap();
}
maps.put(preferenceId, FilenameUtils.removeExtension(new File(file).getName()));
preferencesMap.put("preference-screen", maps);
Set<String> preferenceKeys = preferenceKeyMaps.get(preferenceId);
if (preferenceKeys == null) {
preferenceKeys = new HashSet<>();
preferenceKeyMaps.put(preferenceId, preferenceKeys);
}
preferenceKeys.add(key);
}
Node fragmentNode = attrMap.getNamedItem(FRAGMENT_ATTR);
if (fragmentNode != null) {
view.addAttr(FRAGMENT_ATTR, fragmentNode.getTextContent());
HashMap<Integer, String> maps = preferencesMap.get("preference-header");
if (maps == null) {
maps = Maps.newHashMap();
}
maps.put(preferenceId, FilenameUtils.removeExtension(new File(file).getName()));
preferencesMap.put("preference-header", maps);
}
int guiId = -1;
// Retrieve view type
String guiName = node.getNodeName();
String title = readAndroidTextOrTitle(attrMap, "title");
String summary = readAndroidTextOrTitle(attrMap, "summary");
String tooltip = readAndroidTextOrTitle(attrMap, "tooltipText");
String contentDescription = readAndroidTextOrTitle(attrMap, "contentDescription");
String images = readAndroidImageResource(attrMap);
view.save(guiId, title, summary, tooltip, contentDescription, images, guiName);
//view.save(guiId, title, "", guiName);
NodeList children = node.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
Node newNode = children.item(i);
String nodeName = newNode.getNodeName();
if ("#comment".equals(nodeName)) {
continue;
}
if ("#text".equals(nodeName)) {
// possible for XML files created on a different operating system
// than the one our analysis is run on
continue;
}
AndroidView newView = new AndroidView();
// FIXME: we assume that every node has attributes, may be wrong
if (!newNode.hasAttributes()) {
Logger.verb("WARNING", "xml node " + newNode + " has no attributes");
// Fixed: this is wrong for the case group item -> menu -> item
//continue;
} else {
NamedNodeMap attrs = newNode.getAttributes();
for (int idx = 0; idx < attrs.getLength(); idx += 1) {
Node attr = attrs.item(idx);
String name = attr.getNodeName();
String value = attr.getNodeValue();
newView.addAttr(name, value);
}
}
newView.setParent(view);
work.add(new Pair<>(newNode, newView));
}
}
} | DEFECT | true | if (!newNode.hasAttributes()) {
Logger.verb("WARNING", "xml node " + newNode + " has no attributes");
// Fixed: this is wrong for the case group item -> menu -> item
//continue;
} else {
NamedNodeMap attrs = newNode.getAttributes(); | }
if ("#text".equals(nodeName)) {
// possible for XML files created on a different operating system
// than the one our analysis is run on
continue;
}
AndroidView newView = new AndroidView();
// FIXME: we assume that every node has attributes, may be wrong
if (!newNode.hasAttributes()) {
Logger.verb("WARNING", "xml node " + newNode + " has no attributes");
// Fixed: this is wrong for the case group item -> menu -> item
//continue;
} else {
NamedNodeMap attrs = newNode.getAttributes();
for (int idx = 0; idx < attrs.getLength(); idx += 1) {
Node attr = attrs.item(idx);
String name = attr.getNodeName();
String value = attr.getNodeValue();
newView.addAttr(name, value);
}
}
newView.setParent(view); | String contentDescription = readAndroidTextOrTitle(attrMap, "contentDescription");
String images = readAndroidImageResource(attrMap);
view.save(guiId, title, summary, tooltip, contentDescription, images, guiName);
//view.save(guiId, title, "", guiName);
NodeList children = node.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
Node newNode = children.item(i);
String nodeName = newNode.getNodeName();
if ("#comment".equals(nodeName)) {
continue;
}
if ("#text".equals(nodeName)) {
// possible for XML files created on a different operating system
// than the one our analysis is run on
continue;
}
AndroidView newView = new AndroidView();
// FIXME: we assume that every node has attributes, may be wrong
if (!newNode.hasAttributes()) {
Logger.verb("WARNING", "xml node " + newNode + " has no attributes");
// Fixed: this is wrong for the case group item -> menu -> item
//continue;
} else {
NamedNodeMap attrs = newNode.getAttributes();
for (int idx = 0; idx < attrs.getLength(); idx += 1) {
Node attr = attrs.item(idx);
String name = attr.getNodeName();
String value = attr.getNodeValue();
newView.addAttr(name, value);
}
}
newView.setParent(view);
work.add(new Pair<>(newNode, newView));
}
}
} |
17,634 | 0 | // TODO: due to the way we implement resolveIncludes(), now we need
// to change findViewById. | private void resolveIncludes(String resRoot, HashMap<Integer, String> nameMap,
HashMap<Integer, AndroidView> viewMap, boolean isSys) {
HashMap<String, AndroidView> name2View = Maps.newHashMap();
for (Map.Entry<Integer, String> entry : nameMap.entrySet()) {
String name = entry.getValue();
AndroidView view = viewMap.get(entry.getKey());
name2View.put(name, view);
}
// boolean isSys = (viewMap == sysId2View);
LinkedList<AndroidView> work = Lists.newLinkedList();
work.addAll(viewMap.values());
while (!work.isEmpty()) {
AndroidView view = work.remove();
for (int i = 0; i < view.getNumberOfChildren(); i++) {
IAndroidView child = view.getChildInternal(i);
if (child instanceof AndroidView) {
work.add((AndroidView) child);
continue;
}
IncludeAndroidView iav = (IncludeAndroidView) child;
String layoutId = iav.layoutId;
AndroidView tgt = name2View.get(layoutId);
if (tgt != null) {
tgt = (AndroidView) tgt.deepCopy();
tgt.setParent(view, i);
} else if (getLayoutFilePath(resRoot, layoutId, isSys) != null) {
// not exist, let's get it on-demand
String file = getLayoutFilePath(resRoot, layoutId, isSys);
tgt = new AndroidView();
tgt.setParent(view, i);
tgt.setOrigin(file);
readLayout(file, tgt, isSys);
int newId = nonRId--;
viewMap.put(newId, tgt);
nameMap.put(newId, layoutId);
} else if (sysRGeneralIdMap.get("layout").containsKey(layoutId) && sysId2View.containsKey
(sysRGeneralIdMap.get("layout").get(layoutId)
)) {
// <include> is used with an in built android layout id
tgt = (AndroidView) sysId2View.get(sysRGeneralIdMap.get("layout").get(layoutId)).deepCopy();
tgt.setParent(view, i);
} else {
Logger.warn(this.getClass().getSimpleName(), "Unknown layout " + layoutId
+ " included by " + view.getOrigin());
continue;
}
Integer includeeId = iav.includeeId;
if (includeeId != null) {
tgt.setId(includeeId.intValue());
}
work.add(tgt);
}
}
} | DESIGN | true | private void resolveIncludes(String resRoot, HashMap<Integer, String> nameMap,
HashMap<Integer, AndroidView> viewMap, boolean isSys) {
HashMap<String, AndroidView> name2View = Maps.newHashMap();
for (Map.Entry<Integer, String> entry : nameMap.entrySet()) {
String name = entry.getValue();
AndroidView view = viewMap.get(entry.getKey());
name2View.put(name, view);
}
// boolean isSys = (viewMap == sysId2View);
LinkedList<AndroidView> work = Lists.newLinkedList();
work.addAll(viewMap.values());
while (!work.isEmpty()) {
AndroidView view = work.remove();
for (int i = 0; i < view.getNumberOfChildren(); i++) {
IAndroidView child = view.getChildInternal(i);
if (child instanceof AndroidView) {
work.add((AndroidView) child);
continue;
}
IncludeAndroidView iav = (IncludeAndroidView) child;
String layoutId = iav.layoutId;
AndroidView tgt = name2View.get(layoutId);
if (tgt != null) {
tgt = (AndroidView) tgt.deepCopy();
tgt.setParent(view, i);
} else if (getLayoutFilePath(resRoot, layoutId, isSys) != null) {
// not exist, let's get it on-demand
String file = getLayoutFilePath(resRoot, layoutId, isSys);
tgt = new AndroidView();
tgt.setParent(view, i);
tgt.setOrigin(file);
readLayout(file, tgt, isSys);
int newId = nonRId--;
viewMap.put(newId, tgt);
nameMap.put(newId, layoutId);
} else if (sysRGeneralIdMap.get("layout").containsKey(layoutId) && sysId2View.containsKey
(sysRGeneralIdMap.get("layout").get(layoutId)
)) {
// <include> is used with an in built android layout id
tgt = (AndroidView) sysId2View.get(sysRGeneralIdMap.get("layout").get(layoutId)).deepCopy();
tgt.setParent(view, i);
} else {
Logger.warn(this.getClass().getSimpleName(), "Unknown layout " + layoutId
+ " included by " + view.getOrigin());
continue;
}
Integer includeeId = iav.includeeId;
if (includeeId != null) {
tgt.setId(includeeId.intValue());
}
work.add(tgt);
}
}
} | private void resolveIncludes(String resRoot, HashMap<Integer, String> nameMap,
HashMap<Integer, AndroidView> viewMap, boolean isSys) {
HashMap<String, AndroidView> name2View = Maps.newHashMap();
for (Map.Entry<Integer, String> entry : nameMap.entrySet()) {
String name = entry.getValue();
AndroidView view = viewMap.get(entry.getKey());
name2View.put(name, view);
}
// boolean isSys = (viewMap == sysId2View);
LinkedList<AndroidView> work = Lists.newLinkedList();
work.addAll(viewMap.values());
while (!work.isEmpty()) {
AndroidView view = work.remove();
for (int i = 0; i < view.getNumberOfChildren(); i++) {
IAndroidView child = view.getChildInternal(i);
if (child instanceof AndroidView) {
work.add((AndroidView) child);
continue;
}
IncludeAndroidView iav = (IncludeAndroidView) child;
String layoutId = iav.layoutId;
AndroidView tgt = name2View.get(layoutId);
if (tgt != null) {
tgt = (AndroidView) tgt.deepCopy();
tgt.setParent(view, i);
} else if (getLayoutFilePath(resRoot, layoutId, isSys) != null) {
// not exist, let's get it on-demand
String file = getLayoutFilePath(resRoot, layoutId, isSys);
tgt = new AndroidView();
tgt.setParent(view, i);
tgt.setOrigin(file);
readLayout(file, tgt, isSys);
int newId = nonRId--;
viewMap.put(newId, tgt);
nameMap.put(newId, layoutId);
} else if (sysRGeneralIdMap.get("layout").containsKey(layoutId) && sysId2View.containsKey
(sysRGeneralIdMap.get("layout").get(layoutId)
)) {
// <include> is used with an in built android layout id
tgt = (AndroidView) sysId2View.get(sysRGeneralIdMap.get("layout").get(layoutId)).deepCopy();
tgt.setParent(view, i);
} else {
Logger.warn(this.getClass().getSimpleName(), "Unknown layout " + layoutId
+ " included by " + view.getOrigin());
continue;
}
Integer includeeId = iav.includeeId;
if (includeeId != null) {
tgt.setId(includeeId.intValue());
}
work.add(tgt);
}
}
} | private void resolveIncludes(String resRoot, HashMap<Integer, String> nameMap,
HashMap<Integer, AndroidView> viewMap, boolean isSys) {
HashMap<String, AndroidView> name2View = Maps.newHashMap();
for (Map.Entry<Integer, String> entry : nameMap.entrySet()) {
String name = entry.getValue();
AndroidView view = viewMap.get(entry.getKey());
name2View.put(name, view);
}
// boolean isSys = (viewMap == sysId2View);
LinkedList<AndroidView> work = Lists.newLinkedList();
work.addAll(viewMap.values());
while (!work.isEmpty()) {
AndroidView view = work.remove();
for (int i = 0; i < view.getNumberOfChildren(); i++) {
IAndroidView child = view.getChildInternal(i);
if (child instanceof AndroidView) {
work.add((AndroidView) child);
continue;
}
IncludeAndroidView iav = (IncludeAndroidView) child;
String layoutId = iav.layoutId;
AndroidView tgt = name2View.get(layoutId);
if (tgt != null) {
tgt = (AndroidView) tgt.deepCopy();
tgt.setParent(view, i);
} else if (getLayoutFilePath(resRoot, layoutId, isSys) != null) {
// not exist, let's get it on-demand
String file = getLayoutFilePath(resRoot, layoutId, isSys);
tgt = new AndroidView();
tgt.setParent(view, i);
tgt.setOrigin(file);
readLayout(file, tgt, isSys);
int newId = nonRId--;
viewMap.put(newId, tgt);
nameMap.put(newId, layoutId);
} else if (sysRGeneralIdMap.get("layout").containsKey(layoutId) && sysId2View.containsKey
(sysRGeneralIdMap.get("layout").get(layoutId)
)) {
// <include> is used with an in built android layout id
tgt = (AndroidView) sysId2View.get(sysRGeneralIdMap.get("layout").get(layoutId)).deepCopy();
tgt.setParent(view, i);
} else {
Logger.warn(this.getClass().getSimpleName(), "Unknown layout " + layoutId
+ " included by " + view.getOrigin());
continue;
}
Integer includeeId = iav.includeeId;
if (includeeId != null) {
tgt.setId(includeeId.intValue());
}
work.add(tgt);
}
}
} |
17,634 | 1 | // boolean isSys = (viewMap == sysId2View); | private void resolveIncludes(String resRoot, HashMap<Integer, String> nameMap,
HashMap<Integer, AndroidView> viewMap, boolean isSys) {
HashMap<String, AndroidView> name2View = Maps.newHashMap();
for (Map.Entry<Integer, String> entry : nameMap.entrySet()) {
String name = entry.getValue();
AndroidView view = viewMap.get(entry.getKey());
name2View.put(name, view);
}
// boolean isSys = (viewMap == sysId2View);
LinkedList<AndroidView> work = Lists.newLinkedList();
work.addAll(viewMap.values());
while (!work.isEmpty()) {
AndroidView view = work.remove();
for (int i = 0; i < view.getNumberOfChildren(); i++) {
IAndroidView child = view.getChildInternal(i);
if (child instanceof AndroidView) {
work.add((AndroidView) child);
continue;
}
IncludeAndroidView iav = (IncludeAndroidView) child;
String layoutId = iav.layoutId;
AndroidView tgt = name2View.get(layoutId);
if (tgt != null) {
tgt = (AndroidView) tgt.deepCopy();
tgt.setParent(view, i);
} else if (getLayoutFilePath(resRoot, layoutId, isSys) != null) {
// not exist, let's get it on-demand
String file = getLayoutFilePath(resRoot, layoutId, isSys);
tgt = new AndroidView();
tgt.setParent(view, i);
tgt.setOrigin(file);
readLayout(file, tgt, isSys);
int newId = nonRId--;
viewMap.put(newId, tgt);
nameMap.put(newId, layoutId);
} else if (sysRGeneralIdMap.get("layout").containsKey(layoutId) && sysId2View.containsKey
(sysRGeneralIdMap.get("layout").get(layoutId)
)) {
// <include> is used with an in built android layout id
tgt = (AndroidView) sysId2View.get(sysRGeneralIdMap.get("layout").get(layoutId)).deepCopy();
tgt.setParent(view, i);
} else {
Logger.warn(this.getClass().getSimpleName(), "Unknown layout " + layoutId
+ " included by " + view.getOrigin());
continue;
}
Integer includeeId = iav.includeeId;
if (includeeId != null) {
tgt.setId(includeeId.intValue());
}
work.add(tgt);
}
}
} | NONSATD | true | name2View.put(name, view);
}
// boolean isSys = (viewMap == sysId2View);
LinkedList<AndroidView> work = Lists.newLinkedList();
work.addAll(viewMap.values()); | private void resolveIncludes(String resRoot, HashMap<Integer, String> nameMap,
HashMap<Integer, AndroidView> viewMap, boolean isSys) {
HashMap<String, AndroidView> name2View = Maps.newHashMap();
for (Map.Entry<Integer, String> entry : nameMap.entrySet()) {
String name = entry.getValue();
AndroidView view = viewMap.get(entry.getKey());
name2View.put(name, view);
}
// boolean isSys = (viewMap == sysId2View);
LinkedList<AndroidView> work = Lists.newLinkedList();
work.addAll(viewMap.values());
while (!work.isEmpty()) {
AndroidView view = work.remove();
for (int i = 0; i < view.getNumberOfChildren(); i++) {
IAndroidView child = view.getChildInternal(i);
if (child instanceof AndroidView) {
work.add((AndroidView) child);
continue;
} | private void resolveIncludes(String resRoot, HashMap<Integer, String> nameMap,
HashMap<Integer, AndroidView> viewMap, boolean isSys) {
HashMap<String, AndroidView> name2View = Maps.newHashMap();
for (Map.Entry<Integer, String> entry : nameMap.entrySet()) {
String name = entry.getValue();
AndroidView view = viewMap.get(entry.getKey());
name2View.put(name, view);
}
// boolean isSys = (viewMap == sysId2View);
LinkedList<AndroidView> work = Lists.newLinkedList();
work.addAll(viewMap.values());
while (!work.isEmpty()) {
AndroidView view = work.remove();
for (int i = 0; i < view.getNumberOfChildren(); i++) {
IAndroidView child = view.getChildInternal(i);
if (child instanceof AndroidView) {
work.add((AndroidView) child);
continue;
}
IncludeAndroidView iav = (IncludeAndroidView) child;
String layoutId = iav.layoutId;
AndroidView tgt = name2View.get(layoutId);
if (tgt != null) {
tgt = (AndroidView) tgt.deepCopy();
tgt.setParent(view, i);
} else if (getLayoutFilePath(resRoot, layoutId, isSys) != null) {
// not exist, let's get it on-demand
String file = getLayoutFilePath(resRoot, layoutId, isSys);
tgt = new AndroidView(); |
17,634 | 2 | // not exist, let's get it on-demand | private void resolveIncludes(String resRoot, HashMap<Integer, String> nameMap,
HashMap<Integer, AndroidView> viewMap, boolean isSys) {
HashMap<String, AndroidView> name2View = Maps.newHashMap();
for (Map.Entry<Integer, String> entry : nameMap.entrySet()) {
String name = entry.getValue();
AndroidView view = viewMap.get(entry.getKey());
name2View.put(name, view);
}
// boolean isSys = (viewMap == sysId2View);
LinkedList<AndroidView> work = Lists.newLinkedList();
work.addAll(viewMap.values());
while (!work.isEmpty()) {
AndroidView view = work.remove();
for (int i = 0; i < view.getNumberOfChildren(); i++) {
IAndroidView child = view.getChildInternal(i);
if (child instanceof AndroidView) {
work.add((AndroidView) child);
continue;
}
IncludeAndroidView iav = (IncludeAndroidView) child;
String layoutId = iav.layoutId;
AndroidView tgt = name2View.get(layoutId);
if (tgt != null) {
tgt = (AndroidView) tgt.deepCopy();
tgt.setParent(view, i);
} else if (getLayoutFilePath(resRoot, layoutId, isSys) != null) {
// not exist, let's get it on-demand
String file = getLayoutFilePath(resRoot, layoutId, isSys);
tgt = new AndroidView();
tgt.setParent(view, i);
tgt.setOrigin(file);
readLayout(file, tgt, isSys);
int newId = nonRId--;
viewMap.put(newId, tgt);
nameMap.put(newId, layoutId);
} else if (sysRGeneralIdMap.get("layout").containsKey(layoutId) && sysId2View.containsKey
(sysRGeneralIdMap.get("layout").get(layoutId)
)) {
// <include> is used with an in built android layout id
tgt = (AndroidView) sysId2View.get(sysRGeneralIdMap.get("layout").get(layoutId)).deepCopy();
tgt.setParent(view, i);
} else {
Logger.warn(this.getClass().getSimpleName(), "Unknown layout " + layoutId
+ " included by " + view.getOrigin());
continue;
}
Integer includeeId = iav.includeeId;
if (includeeId != null) {
tgt.setId(includeeId.intValue());
}
work.add(tgt);
}
}
} | NONSATD | true | tgt.setParent(view, i);
} else if (getLayoutFilePath(resRoot, layoutId, isSys) != null) {
// not exist, let's get it on-demand
String file = getLayoutFilePath(resRoot, layoutId, isSys);
tgt = new AndroidView(); | work.add((AndroidView) child);
continue;
}
IncludeAndroidView iav = (IncludeAndroidView) child;
String layoutId = iav.layoutId;
AndroidView tgt = name2View.get(layoutId);
if (tgt != null) {
tgt = (AndroidView) tgt.deepCopy();
tgt.setParent(view, i);
} else if (getLayoutFilePath(resRoot, layoutId, isSys) != null) {
// not exist, let's get it on-demand
String file = getLayoutFilePath(resRoot, layoutId, isSys);
tgt = new AndroidView();
tgt.setParent(view, i);
tgt.setOrigin(file);
readLayout(file, tgt, isSys);
int newId = nonRId--;
viewMap.put(newId, tgt);
nameMap.put(newId, layoutId);
} else if (sysRGeneralIdMap.get("layout").containsKey(layoutId) && sysId2View.containsKey
(sysRGeneralIdMap.get("layout").get(layoutId) | name2View.put(name, view);
}
// boolean isSys = (viewMap == sysId2View);
LinkedList<AndroidView> work = Lists.newLinkedList();
work.addAll(viewMap.values());
while (!work.isEmpty()) {
AndroidView view = work.remove();
for (int i = 0; i < view.getNumberOfChildren(); i++) {
IAndroidView child = view.getChildInternal(i);
if (child instanceof AndroidView) {
work.add((AndroidView) child);
continue;
}
IncludeAndroidView iav = (IncludeAndroidView) child;
String layoutId = iav.layoutId;
AndroidView tgt = name2View.get(layoutId);
if (tgt != null) {
tgt = (AndroidView) tgt.deepCopy();
tgt.setParent(view, i);
} else if (getLayoutFilePath(resRoot, layoutId, isSys) != null) {
// not exist, let's get it on-demand
String file = getLayoutFilePath(resRoot, layoutId, isSys);
tgt = new AndroidView();
tgt.setParent(view, i);
tgt.setOrigin(file);
readLayout(file, tgt, isSys);
int newId = nonRId--;
viewMap.put(newId, tgt);
nameMap.put(newId, layoutId);
} else if (sysRGeneralIdMap.get("layout").containsKey(layoutId) && sysId2View.containsKey
(sysRGeneralIdMap.get("layout").get(layoutId)
)) {
// <include> is used with an in built android layout id
tgt = (AndroidView) sysId2View.get(sysRGeneralIdMap.get("layout").get(layoutId)).deepCopy();
tgt.setParent(view, i);
} else {
Logger.warn(this.getClass().getSimpleName(), "Unknown layout " + layoutId
+ " included by " + view.getOrigin());
continue;
}
Integer includeeId = iav.includeeId; |
Subsets and Splits