prefix
stringlengths 82
32.6k
| middle
stringlengths 5
470
| suffix
stringlengths 0
81.2k
| file_path
stringlengths 6
168
| repo_name
stringlengths 16
77
| context
listlengths 5
5
| lang
stringclasses 4
values | ground_truth
stringlengths 5
470
|
---|---|---|---|---|---|---|---|
/*
* Created on Sun Mar 26 2023
*
* Copyright (c) storycraft. Licensed under the Apache Licence 2.0.
*/
package sh.pancake.serdemc.data.nbt.io;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import javax.annotation.Nullable;
import lombok.AllArgsConstructor;
import sh.pancake.serdemc.data.nbt.NbtRootCompound;
import sh.pancake.serdemc.data.nbt.NbtTagCompound;
import sh.pancake.serdemc.data.nbt.NbtTagList;
import sh.pancake.serdemc.data.nbt.NbtTagValue;
import sh.pancake.serdemc.io.DataReader;
@AllArgsConstructor
public class NbtReader {
private final DataReader reader;
public byte readByte() throws IOException {
return reader.readByte();
}
public short readShort() throws IOException {
return reader.readShort();
}
public int readInt() throws IOException {
return reader.readInt();
}
public long readLong() throws IOException {
return reader.readLong();
}
public float readFloat() throws IOException {
return reader.readFloat();
}
public double readDouble() throws IOException {
return reader.readDouble();
}
public byte[] readByteArray() throws IOException {
int length = reader.readInt();
byte[] data = new byte[length];
reader.readBytes(data);
return data;
}
public String readString() throws IOException {
| int length = reader.readShort(); |
byte[] data = new byte[length];
reader.readBytes(data);
return new String(data, StandardCharsets.UTF_8);
}
@SuppressWarnings("unchecked")
public NbtTagList<?> readList() throws IOException {
byte type = reader.readByte();
int length = reader.readInt();
NbtTagList<Object> list = new NbtTagList<>(type, length);
for (int i = 0; i < length; i++) {
list.add((NbtTagValue<Object>) readTag(type));
}
return list;
}
public NbtTagCompound readCompound() throws IOException {
NbtTagCompound compound = new NbtTagCompound();
for (byte type; (type = reader.readByte()) != NbtTagValue.TAG_END;) {
compound.put(readString(), readTag(type));
}
return compound;
}
public @Nullable NbtRootCompound readRootCompoundOptional() throws IOException {
byte type = reader.readByte();
if (type == NbtTagValue.TAG_END) return null;
return readRootCompoundInner(type);
}
public NbtRootCompound readRootCompound() throws IOException {
return readRootCompoundInner(reader.readByte());
}
private NbtRootCompound readRootCompoundInner(byte type) throws IOException {
if (type != NbtTagValue.TAG_COMPOUND) {
throw new RuntimeException("Root is not TAG_Compound type");
}
return new NbtRootCompound(readString(), readCompound());
}
public int[] readIntArray() throws IOException {
int length = reader.readInt();
int[] value = new int[length];
for (int i = 0; i < length; i++) {
value[i] = reader.readInt();
}
return value;
}
public long[] readLongArray() throws IOException {
int length = reader.readInt();
long[] value = new long[length];
for (int i = 0; i < length; i++) {
value[i] = reader.readLong();
}
return value;
}
public NbtTagValue<?> readTag(byte type) throws IOException {
switch (type) {
case NbtTagValue.TAG_BYTE: return NbtTagValue.fromByte(readByte());
case NbtTagValue.TAG_SHORT: return NbtTagValue.fromShort(readShort());
case NbtTagValue.TAG_INT: return NbtTagValue.fromInt(readInt());
case NbtTagValue.TAG_LONG: return NbtTagValue.fromLong(readLong());
case NbtTagValue.TAG_FLOAT: return NbtTagValue.fromFloat(readFloat());
case NbtTagValue.TAG_DOUBLE: return NbtTagValue.fromDouble(readDouble());
case NbtTagValue.TAG_BYTE_ARRAY: return NbtTagValue.fromByteArray(readByteArray());
case NbtTagValue.TAG_STRING: return NbtTagValue.fromString(readString());
case NbtTagValue.TAG_LIST: return NbtTagValue.fromList(readList());
case NbtTagValue.TAG_COMPOUND: return NbtTagValue.fromCompound(readCompound());
case NbtTagValue.TAG_INT_ARRAY: return NbtTagValue.fromIntArray(readIntArray());
case NbtTagValue.TAG_LONG_ARRAY: return NbtTagValue.fromLongArray(readLongArray());
default: throw new RuntimeException("Unknown NBT type: " + type);
}
}
}
| api/src/main/java/sh/pancake/serdemc/data/nbt/io/NbtReader.java | storycraft-serde-mc-757a9e4 | [
{
"filename": "api/src/main/java/sh/pancake/serdemc/network/io/PacketDataReader.java",
"retrieved_chunk": " public double readDouble() throws IOException {\n return reader.readDouble();\n }\n public String readString() throws IOException {\n return readString(32767);\n }\n public String readString(int maxLength) throws IOException {\n int length = readVarInt();\n if (length > maxLength) {\n throw new RuntimeException(\"String exceed max length. maxLength: \" + maxLength + \" length: \" + length);",
"score": 41.81059711513789
},
{
"filename": "api/src/main/java/sh/pancake/serdemc/network/io/PacketDataReader.java",
"retrieved_chunk": " }\n byte[] buf = new byte[length];\n reader.readBytes(buf);\n return new String(buf, StandardCharsets.UTF_8);\n }\n public String readChat() throws IOException {\n return readString(262144);\n }\n public String readIdentifier() throws IOException {\n return readString(32767);",
"score": 37.294003430930424
},
{
"filename": "api/src/main/java/sh/pancake/serdemc/network/io/PacketDataReader.java",
"retrieved_chunk": " }\n public short readShort() throws IOException {\n return reader.readShort();\n }\n public int readShortUnsigned() throws IOException {\n return reader.readShortUnsigned();\n }\n public int readInt() throws IOException {\n return reader.readInt();\n }",
"score": 31.683053474276292
},
{
"filename": "api/src/main/java/sh/pancake/serdemc/io/DataReader.java",
"retrieved_chunk": " return Byte.toUnsignedInt(readByte());\n }\n int readBytes(byte[] dst, int offset, int length) throws IOException;\n default int readBytes(byte[] dst, int offset) throws IOException {\n return readBytes(dst, offset, dst.length);\n }\n default int readBytes(byte[] dst) throws IOException {\n return readBytes(dst, 0, dst.length);\n }\n short readShort() throws IOException;",
"score": 30.414892465856255
},
{
"filename": "api/src/main/java/sh/pancake/serdemc/network/io/PacketDataReader.java",
"retrieved_chunk": " return reader.readByteUnsigned();\n }\n public boolean readBoolean() throws IOException {\n if (reader.readByte() == 0x00) {\n return false;\n }\n return true;\n }\n public int readBytes(byte[] dst) throws IOException {\n return reader.readBytes(dst);",
"score": 27.43445009839653
}
] | java | int length = reader.readShort(); |
/*
* Copyright Strimzi authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.strimzi.kafka.bridge.mqtt.mapper;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Responsible for handling all the topic mapping using named placeholders instead of regular expressions.
*
* @see MappingRule
* @see MqttKafkaMapper
* @see MqttKafkaRegexMapper
*/
public class MqttKafkaSimpleMapper extends MqttKafkaMapper {
// find any word inside a curly bracket. E.g. {something}, this is known as a placeholder.
private static final String MQTT_TOPIC_PLACEHOLDER_REGEX = "\\{\\w+\\}";
// identifies a single level wildcard character in the mqtt pattern. E.g. sensors/+/data
private static final String MQTT_TOPIC_SINGLE_LEVEL_WILDCARD_CHARACTER = "+";
// Regex expression used to replace the + in the mqtt pattern.
private static final String SINGLE_LEVEL_WILDCARD_REGEX = "[^/]+";
// identifies a multi level wildcard character in the mqtt pattern. E.g. sensors/#
private static final String MQTT_TOPIC_MULTI_LEVEL_WILDCARD_CHARACTER = "#";
// used to replace the # in the mqtt pattern.
public static final String WILDCARD_REGEX = "(?:\\/.*)?$";
/**
* Constructor.
*
* @param rules the list of mapping rules.
*/
public MqttKafkaSimpleMapper(List<MappingRule> rules) {
super(rules, Pattern.compile(MQTT_TOPIC_PLACEHOLDER_REGEX));
}
@Override
public MappingResult map(String mqttTopic) {
for (MappingRule rule : this.rules) {
Matcher matcher = this.patterns.get(this.rules.indexOf(rule)).matcher(mqttTopic);
if (matcher.matches()) {
HashMap<String, String> placeholders = new HashMap<>();
String mappedKafkaTopic = rule.getKafkaTopicTemplate();
String kafkaKey = rule.getKafkaKeyTemplate();
// find MQTT_TOPIC_PLACEHOLDER_REGEX in the kafkaTopicTemplate.
Matcher placeholderMatcher = this.placeholderPattern | .matcher(rule.getKafkaTopicTemplate()); |
while (placeholderMatcher.find()) {
String placeholderKey = placeholderMatcher.group();
placeholders.put(placeholderKey, null);
}
// find MQTT_TOPIC_PLACEHOLDER_REGEX in the kafkaKey
if (kafkaKey != null) {
placeholderMatcher = this.placeholderPattern.matcher(kafkaKey);
while (placeholderMatcher.find()) {
String placeholderKey = placeholderMatcher.group();
placeholders.put(placeholderKey, null);
}
}
if (!placeholders.isEmpty()) {
Matcher mqttTopicMatcher = this.placeholderPattern.matcher(rule.getMqttTopicPattern());
// find the placeholders in the mqtt topic pattern and assign them a value.
while (mqttTopicMatcher.find()) {
String placeholderKey = mqttTopicMatcher.group();
String placeholderValue = matcher.group(removeBrackets(placeholderKey));
placeholders.put(placeholderKey, placeholderValue);
}
// build the Kafka topic using the placeholders.
for (Map.Entry<String, String> entry : placeholders.entrySet()) {
if (entry.getValue() != null) {
mappedKafkaTopic = mappedKafkaTopic.replace(entry.getKey(), entry.getValue());
kafkaKey = kafkaKey != null ? kafkaKey.replace(entry.getKey(), entry.getValue()) : null;
} else {
throw new IllegalArgumentException("The placeholder " + entry.getKey() + " was not found assigned any value.");
}
}
}
return new MappingResult(mappedKafkaTopic, kafkaKey);
}
}
return new MappingResult(MqttKafkaMapper.DEFAULT_KAFKA_TOPIC, null);
}
@Override
protected void buildOrCompilePatterns() {
// convert the mqtt patterns to a valid regex expression.
// the mqtt pattern can contain placeholders like {something}, + and #.
// if the mqtt topic contains a +, we replace it with @singleLevelWildcardRegex
// if the mqtt topic contains a #, we replace it with @multiLevelWildcardRegex
// if the mqtt topic contains a placeholder (pattern \{\w+\}), we replace it with @placeholderRegex
String[] mqttTopicPatternParts;
StringBuilder ruleRegex;
for (MappingRule rule : this.rules) {
mqttTopicPatternParts = rule.getMqttTopicPattern().split(MQTT_TOPIC_SEPARATOR);
ruleRegex = new StringBuilder();
for (String part : mqttTopicPatternParts) {
if (part.matches(MQTT_TOPIC_PLACEHOLDER_REGEX)) {
ruleRegex.append(buildNamedRegexExpression(part));
} else if (part.equals(MQTT_TOPIC_SINGLE_LEVEL_WILDCARD_CHARACTER)) {
ruleRegex.append(SINGLE_LEVEL_WILDCARD_REGEX);
} else if (part.equals(MQTT_TOPIC_MULTI_LEVEL_WILDCARD_CHARACTER)) {
if (ruleRegex.length() > 1) {
ruleRegex.deleteCharAt(ruleRegex.length() - 1);
}
ruleRegex.append(WILDCARD_REGEX);
} else {
ruleRegex.append(part);
}
ruleRegex.append(MQTT_TOPIC_SEPARATOR);
}
// remove the last slash
ruleRegex.deleteCharAt(ruleRegex.length() - 1);
// compile the regex expression for the rule.
patterns.add(Pattern.compile(ruleRegex.toString()));
}
}
/**
* Helper method for building a named regex expression.
* A named regex expression is a regex expression that contains a named capturing group.
* E.g. (?<groupName>regexExpression)
*
* @param placeholder represents a placeholder in the mqtt pattern.
* @return a named regex expression.
*/
private String buildNamedRegexExpression(String placeholder) {
String groupName = removeBrackets(placeholder);
return "(?<" + groupName + ">[^/]+)";
}
/**
* Helper method for removing the curly brackets from a placeholder.
*
* @param placeholder represents a placeholder in the pattern.
* @return a placeholder without the curly brackets.
*/
private String removeBrackets(String placeholder) {
return placeholder.replaceAll("\\{+|\\}+", "");
}
}
| src/main/java/io/strimzi/kafka/bridge/mqtt/mapper/MqttKafkaSimpleMapper.java | strimzi-strimzi-mqtt-bridge-cb0a4b8 | [
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/mapper/MqttKafkaRegexMapper.java",
"retrieved_chunk": " }\n @Override\n public MappingResult map(String mqttTopic) {\n for (MappingRule rule : this.rules) {\n Matcher matcher = this.patterns.get(this.rules.indexOf(rule)).matcher(mqttTopic);\n if (matcher.matches()) {\n String mappedKafkaTopic = rule.getKafkaTopicTemplate();\n String kafkaKey = rule.getKafkaKeyTemplate();\n for (int i = 1; i < matcher.groupCount() + 1; i++) {\n mappedKafkaTopic = mappedKafkaTopic.replace(\"$\" + i, matcher.group(i));",
"score": 138.77007651249207
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/mapper/MqttKafkaRegexMapper.java",
"retrieved_chunk": " *\n * @param template the placeholder to check.\n */\n private void checkPlaceholder(String template) {\n Matcher matcher = this.placeholderPattern.matcher(template);\n if (matcher.find()) {\n throw new IllegalArgumentException(\"The placeholder \" + matcher.group() + \" was not found or assigned any value.\");\n }\n }\n}",
"score": 63.850668432384786
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/mapper/MqttKafkaRegexMapper.java",
"retrieved_chunk": " }\n }\n return new MappingResult(MqttKafkaMapper.DEFAULT_KAFKA_TOPIC, null);\n }\n @Override\n protected void buildOrCompilePatterns() {\n this.rules.forEach(rule-> this.patterns.add(Pattern.compile(rule.getMqttTopicPattern())));\n }\n /**\n * Checks if there are any pending placeholders in the Kafka topic or Kafka key template.",
"score": 55.29970138190611
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/mapper/MqttKafkaRegexMapper.java",
"retrieved_chunk": " kafkaKey = kafkaKey != null ? kafkaKey.replace(\"$\" + i, matcher.group(i)) : null;\n }\n // check for pending placeholders replacement in the Kafka topic\n checkPlaceholder(mappedKafkaTopic);\n if (kafkaKey != null) {\n // check for pending placeholders replacement in the Kafka key.\n checkPlaceholder(kafkaKey);\n }\n // return the first match\n return new MappingResult(mappedKafkaTopic, kafkaKey);",
"score": 44.75117490981899
},
{
"filename": "src/test/java/io/strimzi/kafka/bridge/mqtt/mapper/MappingRulesLoaderTest.java",
"retrieved_chunk": " rules.size(), is(7));\n assertThat(\"Should not have null values\",\n rules.stream().anyMatch(rule -> rule.getMqttTopicPattern() == null || rule.getKafkaTopicTemplate() == null), is(false));\n verify(loader, atMostOnce()).init(filePath);\n }\n /**\n * Test for initializing mapping rules loader more than once.\n */\n @Test\n public void testInitMoreThanOnce() {",
"score": 41.50317795377812
}
] | java | .matcher(rule.getKafkaTopicTemplate()); |
/*
* Copyright Strimzi authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.strimzi.kafka.bridge.mqtt.kafka;
import io.strimzi.kafka.bridge.mqtt.config.KafkaConfig;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.apache.kafka.common.serialization.StringSerializer;
import java.util.Properties;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionStage;
/**
* Represents a Kafka producer for the Bridge.
*/
public class KafkaBridgeProducer {
private final Producer<String, byte[]> noAckProducer;
private final Producer<String, byte[]> ackOneProducer;
/**
* Constructor
*/
public KafkaBridgeProducer(KafkaConfig config) {
this.noAckProducer = createProducer(config, KafkaProducerAckLevel.ZERO);
this.ackOneProducer = createProducer(config, KafkaProducerAckLevel.ONE);
}
/**
* Send the given record to the Kafka topic
*
* @param record record to be sent
* @return a future which completes when the record is acknowledged
*/
public CompletionStage<RecordMetadata> send(ProducerRecord<String, byte[]> record) {
CompletableFuture<RecordMetadata> promise = new CompletableFuture<>();
this.ackOneProducer.send(record, (metadata, exception) -> {
if (exception != null) {
promise.completeExceptionally(exception);
} else {
promise.complete(metadata);
}
});
return promise;
}
/**
* Send the given record to the Kafka topic
*
* @param record record to be sent
*/
public void sendNoAck(ProducerRecord<String, byte[]> record) {
this.noAckProducer.send(record);
}
/**
* Create the Kafka producer client with the given configuration
*/
private Producer<String, byte[]> createProducer(KafkaConfig kafkaConfig, KafkaProducerAckLevel producerAckLevel) {
Properties props = new Properties();
props.putAll(kafkaConfig.getConfig());
props.putAll( | kafkaConfig.getProducerConfig().getConfig()); |
props.put(ProducerConfig.ACKS_CONFIG, String.valueOf(producerAckLevel.getValue()));
return new KafkaProducer<>(props, new StringSerializer(), new ByteArraySerializer());
}
/**
* Close the producer
*/
public void close() {
if (this.noAckProducer != null) {
this.noAckProducer.flush();
this.noAckProducer.close();
}
if (this.ackOneProducer != null) {
this.ackOneProducer.flush();
this.ackOneProducer.close();
}
}
}
| src/main/java/io/strimzi/kafka/bridge/mqtt/kafka/KafkaBridgeProducer.java | strimzi-strimzi-mqtt-bridge-cb0a4b8 | [
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/ConfigRetriever.java",
"retrieved_chunk": " * @param additionalConfig additional configuration to add\n * @return configuration as key-value pairs\n * @throws IOException when not possible to get the properties file\n */\n public static Map<String, Object> getConfig(String path, Map<String, String> additionalConfig) throws IOException {\n Map<String, Object> configuration;\n try (InputStream is = new FileInputStream(path)) {\n Properties props = new Properties();\n props.load(is);\n configuration =",
"score": 41.88070534951264
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/BridgeConfig.java",
"retrieved_chunk": " final KafkaConfig kafkaConfig = KafkaConfig.fromMap(map);\n return new BridgeConfig(map.entrySet().stream()\n .filter(entry -> entry.getKey().startsWith(BridgeConfig.BRIDGE_CONFIG_PREFIX))\n .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)), mqttConfig, kafkaConfig);\n }\n /**\n * @return the Kafka configuration properties\n */\n public KafkaConfig getKafkaConfig() {\n return this.kafkaConfig;",
"score": 31.637930476691988
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/ConfigRetriever.java",
"retrieved_chunk": " props.entrySet().stream().collect(\n Collectors.toMap(\n e -> String.valueOf(e.getKey()),\n e -> String.valueOf(e.getValue()),\n (prev, next) -> next, HashMap::new\n ));\n }\n configuration.putAll(additionalConfig);\n return configuration;\n }",
"score": 31.139426451498057
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/BridgeConfig.java",
"retrieved_chunk": " /**\n * Constructor\n *\n * @param config configuration parameters map\n * @param mqttConfig MQTT configuration properties\n * @param kafkaConfig Kafka configuration properties\n */\n public BridgeConfig(Map<String, Object> config, MqttConfig mqttConfig, KafkaConfig kafkaConfig) {\n super(config);\n this.mqttConfig = mqttConfig;",
"score": 28.158717898284074
},
{
"filename": "src/test/java/io/strimzi/kafka/bridge/mqtt/kafka/KafkaBridgeProducerTest.java",
"retrieved_chunk": " * Test the {@link KafkaBridgeProducer#send(ProducerRecord)}} method\n */\n @Test\n public void testSend() {\n // mock the producer\n KafkaBridgeProducer producer = mock(KafkaBridgeProducer.class);\n String kafkaTopic = \"test-topic\";\n ProducerRecord<String, byte[]> record = new ProducerRecord<>(kafkaTopic, \"test\".getBytes());\n // simulate the send method with ack\n when(producer.send(any(ProducerRecord.class)))",
"score": 25.722913826052086
}
] | java | kafkaConfig.getProducerConfig().getConfig()); |
/*
* Created on Sun Mar 26 2023
*
* Copyright (c) storycraft. Licensed under the Apache Licence 2.0.
*/
package sh.pancake.serdemc.data.nbt.io;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import javax.annotation.Nullable;
import lombok.AllArgsConstructor;
import sh.pancake.serdemc.data.nbt.NbtRootCompound;
import sh.pancake.serdemc.data.nbt.NbtTagCompound;
import sh.pancake.serdemc.data.nbt.NbtTagList;
import sh.pancake.serdemc.data.nbt.NbtTagValue;
import sh.pancake.serdemc.io.DataReader;
@AllArgsConstructor
public class NbtReader {
private final DataReader reader;
public byte readByte() throws IOException {
return reader.readByte();
}
public short readShort() throws IOException {
return reader.readShort();
}
public int readInt() throws IOException {
return reader.readInt();
}
public long readLong() throws IOException {
return reader.readLong();
}
public float readFloat() throws IOException {
return reader.readFloat();
}
public double readDouble() throws IOException {
return reader.readDouble();
}
public byte[] readByteArray() throws IOException {
int length = reader.readInt();
byte[] data = new byte[length];
reader.readBytes(data);
return data;
}
public String readString() throws IOException {
int length = reader.readShort();
byte[] data = new byte[length];
reader.readBytes(data);
return new String(data, StandardCharsets.UTF_8);
}
@SuppressWarnings("unchecked")
public NbtTagList<?> readList() throws IOException {
byte type = reader.readByte();
int | length = reader.readInt(); |
NbtTagList<Object> list = new NbtTagList<>(type, length);
for (int i = 0; i < length; i++) {
list.add((NbtTagValue<Object>) readTag(type));
}
return list;
}
public NbtTagCompound readCompound() throws IOException {
NbtTagCompound compound = new NbtTagCompound();
for (byte type; (type = reader.readByte()) != NbtTagValue.TAG_END;) {
compound.put(readString(), readTag(type));
}
return compound;
}
public @Nullable NbtRootCompound readRootCompoundOptional() throws IOException {
byte type = reader.readByte();
if (type == NbtTagValue.TAG_END) return null;
return readRootCompoundInner(type);
}
public NbtRootCompound readRootCompound() throws IOException {
return readRootCompoundInner(reader.readByte());
}
private NbtRootCompound readRootCompoundInner(byte type) throws IOException {
if (type != NbtTagValue.TAG_COMPOUND) {
throw new RuntimeException("Root is not TAG_Compound type");
}
return new NbtRootCompound(readString(), readCompound());
}
public int[] readIntArray() throws IOException {
int length = reader.readInt();
int[] value = new int[length];
for (int i = 0; i < length; i++) {
value[i] = reader.readInt();
}
return value;
}
public long[] readLongArray() throws IOException {
int length = reader.readInt();
long[] value = new long[length];
for (int i = 0; i < length; i++) {
value[i] = reader.readLong();
}
return value;
}
public NbtTagValue<?> readTag(byte type) throws IOException {
switch (type) {
case NbtTagValue.TAG_BYTE: return NbtTagValue.fromByte(readByte());
case NbtTagValue.TAG_SHORT: return NbtTagValue.fromShort(readShort());
case NbtTagValue.TAG_INT: return NbtTagValue.fromInt(readInt());
case NbtTagValue.TAG_LONG: return NbtTagValue.fromLong(readLong());
case NbtTagValue.TAG_FLOAT: return NbtTagValue.fromFloat(readFloat());
case NbtTagValue.TAG_DOUBLE: return NbtTagValue.fromDouble(readDouble());
case NbtTagValue.TAG_BYTE_ARRAY: return NbtTagValue.fromByteArray(readByteArray());
case NbtTagValue.TAG_STRING: return NbtTagValue.fromString(readString());
case NbtTagValue.TAG_LIST: return NbtTagValue.fromList(readList());
case NbtTagValue.TAG_COMPOUND: return NbtTagValue.fromCompound(readCompound());
case NbtTagValue.TAG_INT_ARRAY: return NbtTagValue.fromIntArray(readIntArray());
case NbtTagValue.TAG_LONG_ARRAY: return NbtTagValue.fromLongArray(readLongArray());
default: throw new RuntimeException("Unknown NBT type: " + type);
}
}
}
| api/src/main/java/sh/pancake/serdemc/data/nbt/io/NbtReader.java | storycraft-serde-mc-757a9e4 | [
{
"filename": "api/src/main/java/sh/pancake/serdemc/network/io/PacketDataReader.java",
"retrieved_chunk": " }\n byte[] buf = new byte[length];\n reader.readBytes(buf);\n return new String(buf, StandardCharsets.UTF_8);\n }\n public String readChat() throws IOException {\n return readString(262144);\n }\n public String readIdentifier() throws IOException {\n return readString(32767);",
"score": 39.31047260323987
},
{
"filename": "api/src/main/java/sh/pancake/serdemc/io/DataReader.java",
"retrieved_chunk": " return Byte.toUnsignedInt(readByte());\n }\n int readBytes(byte[] dst, int offset, int length) throws IOException;\n default int readBytes(byte[] dst, int offset) throws IOException {\n return readBytes(dst, offset, dst.length);\n }\n default int readBytes(byte[] dst) throws IOException {\n return readBytes(dst, 0, dst.length);\n }\n short readShort() throws IOException;",
"score": 30.90249089120818
},
{
"filename": "api/src/main/java/sh/pancake/serdemc/data/nbt/NbtTagList.java",
"retrieved_chunk": " @Getter\n private final byte type;\n private final ArrayList<NbtTagValue<T>> list;\n public NbtTagList(byte type) {\n this.type = type;\n this.list = new ArrayList<>();\n }\n public NbtTagList(byte type, int length) {\n this.type = type;\n this.list = new ArrayList<>(length);",
"score": 28.689720956196535
},
{
"filename": "api/src/main/java/sh/pancake/serdemc/network/io/PacketDataReader.java",
"retrieved_chunk": " public double readDouble() throws IOException {\n return reader.readDouble();\n }\n public String readString() throws IOException {\n return readString(32767);\n }\n public String readString(int maxLength) throws IOException {\n int length = readVarInt();\n if (length > maxLength) {\n throw new RuntimeException(\"String exceed max length. maxLength: \" + maxLength + \" length: \" + length);",
"score": 28.28852540610698
},
{
"filename": "api/src/main/java/sh/pancake/serdemc/network/io/PacketDataReader.java",
"retrieved_chunk": " }\n public short readShort() throws IOException {\n return reader.readShort();\n }\n public int readShortUnsigned() throws IOException {\n return reader.readShortUnsigned();\n }\n public int readInt() throws IOException {\n return reader.readInt();\n }",
"score": 28.15946917696219
}
] | java | length = reader.readInt(); |
/*
* Copyright Strimzi authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.strimzi.kafka.bridge.mqtt.kafka;
import io.strimzi.kafka.bridge.mqtt.config.KafkaConfig;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.apache.kafka.common.serialization.StringSerializer;
import java.util.Properties;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionStage;
/**
* Represents a Kafka producer for the Bridge.
*/
public class KafkaBridgeProducer {
private final Producer<String, byte[]> noAckProducer;
private final Producer<String, byte[]> ackOneProducer;
/**
* Constructor
*/
public KafkaBridgeProducer(KafkaConfig config) {
this.noAckProducer = createProducer(config, KafkaProducerAckLevel.ZERO);
this.ackOneProducer = createProducer(config, KafkaProducerAckLevel.ONE);
}
/**
* Send the given record to the Kafka topic
*
* @param record record to be sent
* @return a future which completes when the record is acknowledged
*/
public CompletionStage<RecordMetadata> send(ProducerRecord<String, byte[]> record) {
CompletableFuture<RecordMetadata> promise = new CompletableFuture<>();
this.ackOneProducer.send(record, (metadata, exception) -> {
if (exception != null) {
promise.completeExceptionally(exception);
} else {
promise.complete(metadata);
}
});
return promise;
}
/**
* Send the given record to the Kafka topic
*
* @param record record to be sent
*/
public void sendNoAck(ProducerRecord<String, byte[]> record) {
this.noAckProducer.send(record);
}
/**
* Create the Kafka producer client with the given configuration
*/
private Producer<String, byte[]> createProducer(KafkaConfig kafkaConfig, KafkaProducerAckLevel producerAckLevel) {
Properties props = new Properties();
props.putAll(kafkaConfig.getConfig());
props.putAll(kafkaConfig.getProducerConfig().getConfig());
props.put(ProducerConfig. | ACKS_CONFIG, String.valueOf(producerAckLevel.getValue())); |
return new KafkaProducer<>(props, new StringSerializer(), new ByteArraySerializer());
}
/**
* Close the producer
*/
public void close() {
if (this.noAckProducer != null) {
this.noAckProducer.flush();
this.noAckProducer.close();
}
if (this.ackOneProducer != null) {
this.ackOneProducer.flush();
this.ackOneProducer.close();
}
}
}
| src/main/java/io/strimzi/kafka/bridge/mqtt/kafka/KafkaBridgeProducer.java | strimzi-strimzi-mqtt-bridge-cb0a4b8 | [
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/ConfigRetriever.java",
"retrieved_chunk": " * @param additionalConfig additional configuration to add\n * @return configuration as key-value pairs\n * @throws IOException when not possible to get the properties file\n */\n public static Map<String, Object> getConfig(String path, Map<String, String> additionalConfig) throws IOException {\n Map<String, Object> configuration;\n try (InputStream is = new FileInputStream(path)) {\n Properties props = new Properties();\n props.load(is);\n configuration =",
"score": 48.83912053864105
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/ConfigRetriever.java",
"retrieved_chunk": " props.entrySet().stream().collect(\n Collectors.toMap(\n e -> String.valueOf(e.getKey()),\n e -> String.valueOf(e.getValue()),\n (prev, next) -> next, HashMap::new\n ));\n }\n configuration.putAll(additionalConfig);\n return configuration;\n }",
"score": 48.118766794315
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/BridgeConfig.java",
"retrieved_chunk": " final KafkaConfig kafkaConfig = KafkaConfig.fromMap(map);\n return new BridgeConfig(map.entrySet().stream()\n .filter(entry -> entry.getKey().startsWith(BridgeConfig.BRIDGE_CONFIG_PREFIX))\n .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)), mqttConfig, kafkaConfig);\n }\n /**\n * @return the Kafka configuration properties\n */\n public KafkaConfig getKafkaConfig() {\n return this.kafkaConfig;",
"score": 33.371046712978234
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/BridgeConfig.java",
"retrieved_chunk": " /**\n * Constructor\n *\n * @param config configuration parameters map\n * @param mqttConfig MQTT configuration properties\n * @param kafkaConfig Kafka configuration properties\n */\n public BridgeConfig(Map<String, Object> config, MqttConfig mqttConfig, KafkaConfig kafkaConfig) {\n super(config);\n this.mqttConfig = mqttConfig;",
"score": 27.05617680125356
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/BridgeConfig.java",
"retrieved_chunk": " * @see MqttConfig\n * @see KafkaConfig\n */\npublic class BridgeConfig extends AbstractConfig {\n // Prefix for all the specific configuration parameters for the bridge\n public static final String BRIDGE_CONFIG_PREFIX = \"bridge.\";\n // Bridge identification number\n public static final String BRIDGE_ID = BRIDGE_CONFIG_PREFIX + \"id\";\n private final MqttConfig mqttConfig;\n private final KafkaConfig kafkaConfig;",
"score": 24.421023978964097
}
] | java | ACKS_CONFIG, String.valueOf(producerAckLevel.getValue())); |
/*
* Created on Sun Mar 26 2023
*
* Copyright (c) storycraft. Licensed under the Apache Licence 2.0.
*/
package sh.pancake.serdemc.data.nbt.io;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import javax.annotation.Nullable;
import lombok.AllArgsConstructor;
import sh.pancake.serdemc.data.nbt.NbtRootCompound;
import sh.pancake.serdemc.data.nbt.NbtTagCompound;
import sh.pancake.serdemc.data.nbt.NbtTagList;
import sh.pancake.serdemc.data.nbt.NbtTagValue;
import sh.pancake.serdemc.io.DataReader;
@AllArgsConstructor
public class NbtReader {
private final DataReader reader;
public byte readByte() throws IOException {
return reader.readByte();
}
public short readShort() throws IOException {
return reader.readShort();
}
public int readInt() throws IOException {
return reader.readInt();
}
public long readLong() throws IOException {
return reader.readLong();
}
public float readFloat() throws IOException {
return reader.readFloat();
}
public double readDouble() throws IOException {
return reader.readDouble();
}
public byte[] readByteArray() throws IOException {
int length = reader.readInt();
byte[] data = new byte[length];
reader.readBytes(data);
return data;
}
public String readString() throws IOException {
int length = reader.readShort();
byte[] data = new byte[length];
reader.readBytes(data);
return new String(data, StandardCharsets.UTF_8);
}
@SuppressWarnings("unchecked")
public NbtTagList<?> readList() throws IOException {
byte type = reader.readByte();
int length = reader.readInt();
NbtTagList<Object> list = new NbtTagList<>(type, length);
for (int i = 0; i < length; i++) {
list.add((NbtTagValue<Object>) readTag(type));
}
return list;
}
public NbtTagCompound readCompound() throws IOException {
NbtTagCompound compound = new NbtTagCompound();
for (byte type; ( | type = reader.readByte()) != NbtTagValue.TAG_END; | ) {
compound.put(readString(), readTag(type));
}
return compound;
}
public @Nullable NbtRootCompound readRootCompoundOptional() throws IOException {
byte type = reader.readByte();
if (type == NbtTagValue.TAG_END) return null;
return readRootCompoundInner(type);
}
public NbtRootCompound readRootCompound() throws IOException {
return readRootCompoundInner(reader.readByte());
}
private NbtRootCompound readRootCompoundInner(byte type) throws IOException {
if (type != NbtTagValue.TAG_COMPOUND) {
throw new RuntimeException("Root is not TAG_Compound type");
}
return new NbtRootCompound(readString(), readCompound());
}
public int[] readIntArray() throws IOException {
int length = reader.readInt();
int[] value = new int[length];
for (int i = 0; i < length; i++) {
value[i] = reader.readInt();
}
return value;
}
public long[] readLongArray() throws IOException {
int length = reader.readInt();
long[] value = new long[length];
for (int i = 0; i < length; i++) {
value[i] = reader.readLong();
}
return value;
}
public NbtTagValue<?> readTag(byte type) throws IOException {
switch (type) {
case NbtTagValue.TAG_BYTE: return NbtTagValue.fromByte(readByte());
case NbtTagValue.TAG_SHORT: return NbtTagValue.fromShort(readShort());
case NbtTagValue.TAG_INT: return NbtTagValue.fromInt(readInt());
case NbtTagValue.TAG_LONG: return NbtTagValue.fromLong(readLong());
case NbtTagValue.TAG_FLOAT: return NbtTagValue.fromFloat(readFloat());
case NbtTagValue.TAG_DOUBLE: return NbtTagValue.fromDouble(readDouble());
case NbtTagValue.TAG_BYTE_ARRAY: return NbtTagValue.fromByteArray(readByteArray());
case NbtTagValue.TAG_STRING: return NbtTagValue.fromString(readString());
case NbtTagValue.TAG_LIST: return NbtTagValue.fromList(readList());
case NbtTagValue.TAG_COMPOUND: return NbtTagValue.fromCompound(readCompound());
case NbtTagValue.TAG_INT_ARRAY: return NbtTagValue.fromIntArray(readIntArray());
case NbtTagValue.TAG_LONG_ARRAY: return NbtTagValue.fromLongArray(readLongArray());
default: throw new RuntimeException("Unknown NBT type: " + type);
}
}
}
| api/src/main/java/sh/pancake/serdemc/data/nbt/io/NbtReader.java | storycraft-serde-mc-757a9e4 | [
{
"filename": "api/src/main/java/sh/pancake/serdemc/data/nbt/io/NbtWriter.java",
"retrieved_chunk": " writer.writeInt(length);\n for (int i = 0; i < length; i++) {\n writeLong(value[i]);\n }\n }\n @SuppressWarnings(\"unchecked\")\n public void writeTag(NbtTagValue<?> tag) throws IOException {\n byte type = tag.getType();\n Object value = tag.getValue();\n switch (type) {",
"score": 62.49761157221048
},
{
"filename": "api/src/main/java/sh/pancake/serdemc/data/nbt/NbtTagList.java",
"retrieved_chunk": " @Getter\n private final byte type;\n private final ArrayList<NbtTagValue<T>> list;\n public NbtTagList(byte type) {\n this.type = type;\n this.list = new ArrayList<>();\n }\n public NbtTagList(byte type, int length) {\n this.type = type;\n this.list = new ArrayList<>(length);",
"score": 61.15800283222864
},
{
"filename": "api/src/main/java/sh/pancake/serdemc/data/nbt/io/NbtWriter.java",
"retrieved_chunk": " }\n public void writeIntArray(int[] value) throws IOException {\n int length = value.length;\n writer.writeInt(length);\n for (int i = 0; i < length; i++) {\n writeInt(value[i]);\n }\n }\n public void writeLongArray(long[] value) throws IOException {\n int length = value.length;",
"score": 42.08966281012771
},
{
"filename": "api/src/main/java/sh/pancake/serdemc/data/nbt/io/NbtWriter.java",
"retrieved_chunk": " writer.writeShortUnsigned(value.length());\n writer.writeBytes(value.getBytes());\n }\n public <Item> void writeList(NbtTagList<Item> list) throws IOException {\n writer.writeByte(list.getType());\n writer.writeInt(list.size());\n for (NbtTagValue<?> tag : list) {\n writeTag(tag);\n }\n }",
"score": 40.73736753600622
},
{
"filename": "api/src/main/java/sh/pancake/serdemc/data/metadata/codec/MetadataCodec.java",
"retrieved_chunk": " }\n return new Particle(type, data);\n }\n @SuppressWarnings(\"unchecked\")\n public void write(PacketDataWriter writer, Particle value) throws IOException {\n int type = value.getId();\n writer.writeVarInt(type);\n ParticleDataCodec<Object> codec = (ParticleDataCodec<Object>) ParticleDataCodec.getCodec(type);\n if (codec == null) {\n return;",
"score": 30.252268086078306
}
] | java | type = reader.readByte()) != NbtTagValue.TAG_END; |
/*
* Created on Sun Mar 26 2023
*
* Copyright (c) storycraft. Licensed under the Apache Licence 2.0.
*/
package sh.pancake.serdemc.network.io;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import javax.annotation.Nullable;
import sh.pancake.serdemc.data.BlockPosition;
import sh.pancake.serdemc.data.ItemStack;
import sh.pancake.serdemc.data.metadata.MetadataValue;
import sh.pancake.serdemc.data.metadata.codec.MetadataCodec;
import sh.pancake.serdemc.data.nbt.NbtRootCompound;
import sh.pancake.serdemc.data.nbt.io.NbtReader;
import sh.pancake.serdemc.io.DataReader;
public class PacketDataReader {
private final DataReader reader;
private final NbtReader nbtReader;
public PacketDataReader(DataReader reader) {
this.reader = reader;
this.nbtReader = new NbtReader(reader);
}
public byte readByte() throws IOException {
return reader.readByte();
}
public int readByteUnsigned() throws IOException {
return reader.readByteUnsigned();
}
public boolean readBoolean() throws IOException {
if (reader.readByte() == 0x00) {
return false;
}
return true;
}
public int readBytes(byte[] dst) throws IOException {
return reader.readBytes(dst);
}
public short readShort() throws IOException {
return reader.readShort();
}
public int readShortUnsigned() throws IOException {
return reader.readShortUnsigned();
}
public int readInt() throws IOException {
return reader.readInt();
}
public int readVarInt() throws IOException {
int value = 0;
for (int position = 0;; position += 7) {
if (position >= 32) throw new RuntimeException("VarInt is too big");
byte current = readByte();
value |= (current & 0x7F) << position;
if ((current & 0x80) == 0) {
break;
}
}
return value;
}
public long readLong() throws IOException {
return reader.readLong();
}
public long readVarLong() throws IOException {
long value = 0;
for (int position = 0;; position += 7) {
if (position >= 64) throw new RuntimeException("VarLong is too big");
byte current = readByte();
value |= (long) (current & 0x7F) << position;
if ((current & 0x80) == 0) {
break;
}
}
return value;
}
public float readFloat() throws IOException {
return reader.readFloat();
}
public double readDouble() throws IOException {
return reader.readDouble();
}
public String readString() throws IOException {
return readString(32767);
}
public String readString(int maxLength) throws IOException {
int length = readVarInt();
if (length > maxLength) {
throw new RuntimeException("String exceed max length. maxLength: " + maxLength + " length: " + length);
}
byte[] buf = new byte[length];
reader.readBytes(buf);
return new String(buf, StandardCharsets.UTF_8);
}
public String readChat() throws IOException {
return readString(262144);
}
public String readIdentifier() throws IOException {
return readString(32767);
}
public UUID readUUID() throws IOException {
return new UUID(reader.readLong(), reader.readLong());
}
public @Nullable ItemStack readSlot() throws IOException {
if (!readBoolean()) {
return null;
}
return new ItemStack(readVarInt(), readByte(), readNbt());
}
public BlockPosition readPosition() throws IOException {
long pos = reader.readLong();
return new BlockPosition(
(int) (pos >>> 38L),
(int) ((pos >>> 12L) & 67108863L),
(int) (pos & 4095L)
);
}
public NbtRootCompound readNbt() throws IOException {
return nbtReader.readRootCompound();
}
public @Nullable NbtRootCompound readNbtOptional() throws IOException {
return nbtReader.readRootCompoundOptional();
}
public Map<Byte, MetadataValue> readEntityMetadata() throws IOException {
Map<Byte, MetadataValue> map = new HashMap<>();
for (int index; (index = readByteUnsigned()) != 0xFF;) {
int type = readVarInt();
MetadataCodec< | ?> codec = MetadataCodec.getCodec(type); |
if (codec == null) {
throw new RuntimeException("Unknown metadata type: " + type);
}
map.put((byte) index, new MetadataValue(type, codec.read(this)));
}
return map;
}
}
| api/src/main/java/sh/pancake/serdemc/network/io/PacketDataReader.java | storycraft-serde-mc-757a9e4 | [
{
"filename": "api/src/main/java/sh/pancake/serdemc/network/io/PacketDataWriter.java",
"retrieved_chunk": " }\n public void readNbtOptional(@Nullable NbtRootCompound compound) throws IOException {\n nbtWriter.writeRootCompoundOptional(compound);\n }\n @SuppressWarnings(\"unchecked\")\n public void writeEntityMetadata(Map<Byte, MetadataValue> metadata) throws IOException {\n for (Byte index : metadata.keySet()) {\n if (index.byteValue() == (byte) 0xFF) {\n throw new RuntimeException(\"Index cannot be 255\");\n }",
"score": 51.971806703182146
},
{
"filename": "api/src/main/java/sh/pancake/serdemc/network/io/PacketDataWriter.java",
"retrieved_chunk": " MetadataValue value = metadata.get(index);\n int type = value.getType();\n MetadataCodec<Object> codec = (MetadataCodec<Object>) MetadataCodec.getCodec(type);\n if (codec == null) {\n throw new RuntimeException(\"Unknown metadata type: \" + type);\n }\n writeVarInt(type);\n codec.write(this, value.getValue());\n }\n writeByte((byte) 0xFF);",
"score": 44.91934319983293
},
{
"filename": "api/src/main/java/sh/pancake/serdemc/data/metadata/codec/MetadataCodec.java",
"retrieved_chunk": " public static final MetadataCodec<UUID> UUID = create(PacketDataReader::readUUID, PacketDataWriter::writeUUID);\n public static final MetadataCodec<NbtRootCompound> NBT = create(PacketDataReader::readNbt, PacketDataWriter::writeNbt);\n public static final MetadataCodec<Particle> PARTICLE = new MetadataCodec<>() {\n @Override\n public Particle read(PacketDataReader reader) throws IOException {\n int type = reader.readVarInt();\n Object data = null;\n ParticleDataCodec<?> codec = ParticleDataCodec.getCodec(type);\n if (codec != null) {\n data = codec.read(reader);",
"score": 29.14077332587495
},
{
"filename": "api/src/main/java/sh/pancake/serdemc/data/nbt/io/NbtReader.java",
"retrieved_chunk": " for (byte type; (type = reader.readByte()) != NbtTagValue.TAG_END;) {\n compound.put(readString(), readTag(type));\n }\n return compound;\n }\n public @Nullable NbtRootCompound readRootCompoundOptional() throws IOException {\n byte type = reader.readByte();\n if (type == NbtTagValue.TAG_END) return null;\n return readRootCompoundInner(type);\n }",
"score": 24.604256806285733
},
{
"filename": "api/src/main/java/sh/pancake/serdemc/data/nbt/NbtTagCompound.java",
"retrieved_chunk": "public class NbtTagCompound implements Map<String, NbtTagValue<?>> {\n private final HashMap<String, NbtTagValue<?>> map;\n public NbtTagCompound() {\n this.map = new HashMap<>();\n }\n public NbtTagCompound(int capacity) {\n this.map = new HashMap<>(capacity);\n }\n @Override\n public Set<Entry<String, NbtTagValue<?>>> entrySet() {",
"score": 23.875832890950623
}
] | java | ?> codec = MetadataCodec.getCodec(type); |
/*
* Created on Sun Mar 26 2023
*
* Copyright (c) storycraft. Licensed under the Apache Licence 2.0.
*/
package sh.pancake.serdemc.network.io;
import java.io.IOException;
import java.util.Map;
import java.util.UUID;
import javax.annotation.Nullable;
import sh.pancake.serdemc.data.BlockPosition;
import sh.pancake.serdemc.data.ItemStack;
import sh.pancake.serdemc.data.metadata.MetadataValue;
import sh.pancake.serdemc.data.metadata.codec.MetadataCodec;
import sh.pancake.serdemc.data.nbt.NbtRootCompound;
import sh.pancake.serdemc.data.nbt.io.NbtWriter;
import sh.pancake.serdemc.io.DataWriter;
public class PacketDataWriter {
private final DataWriter writer;
private final NbtWriter nbtWriter;
public PacketDataWriter(DataWriter writer) {
this.writer = writer;
this.nbtWriter = new NbtWriter(writer);
}
public void writeByte(byte value) throws IOException {
writer.writeByte(value);
}
public void writeByteUnsigned(int value) throws IOException {
writer.writeByteUnsigned(value);
}
public void writeBoolean(boolean value) throws IOException {
writer.writeByte((byte) (value ? 1 : 0));
}
public void writeBytes(byte[] dst) throws IOException {
writer.writeBytes(dst);
}
public void writeShort(short value) throws IOException {
writer.writeShort(value);
}
public void writeShortUnsigned(int value) throws IOException {
writer.writeShortUnsigned(value);
}
public void writeInt(int value) throws IOException {
writer.writeInt(value);
}
public void writeVarInt(int value) throws IOException {
while (true) {
if ((value & ~0x7F) == 0) {
writeByte((byte) value);
return;
}
writeByte((byte) ((value & 0x7F) | 0x80));
value >>>= 7;
}
}
public void writeLong(long value) throws IOException {
writer.writeLong(value);
}
public void writeVarLong(long value) throws IOException {
while (true) {
if ((value & ~0x7FL) == 0) {
writeByte((byte) value);
return;
}
writeByte((byte) ((value & 0x7F) | 0x80));
value >>>= 7;
}
}
public void writeFloat(float value) throws IOException {
writer.writeFloat(value);
}
public void writeDouble(double value) throws IOException {
writer.writeDouble(value);
}
public void writeString(String value) throws IOException {
writeString(value, 32767);
}
public void writeString(String value, int maxLength) throws IOException {
int length = value.length();
if (length > maxLength) {
throw new RuntimeException("String exceed max length. maxLength: " + maxLength + " length: " + length);
}
writeVarInt(length);
writeBytes(value.getBytes());
}
public void writeChat(String value) throws IOException {
writeString(value, 262144);
}
public void writeIdentifier(String value) throws IOException {
writeString(value, 32767);
}
public void writeUUID(UUID uuid) throws IOException {
writer.writeLong(uuid.getMostSignificantBits());
writer.writeLong(uuid.getLeastSignificantBits());
}
public void writeSlot(@Nullable ItemStack item) throws IOException {
if (item == null) {
writeBoolean(false);
return;
}
writeBoolean(true);
writeVarInt(item.getId());
writeByte(item.getCount());
writeNbt(item.getNbt());
}
public void writePosition(BlockPosition position) throws IOException {
writer.writeLong(position.getX() << 38L | (position.getZ() & 0x3FFFFFFL) << 12L | position.getY() & 0xFFFL);
}
public void writeNbt(NbtRootCompound compound) throws IOException {
nbtWriter.writeRootCompound(compound);
}
public void readNbtOptional(@Nullable NbtRootCompound compound) throws IOException {
nbtWriter.writeRootCompoundOptional(compound);
}
@SuppressWarnings("unchecked")
public void writeEntityMetadata(Map<Byte, MetadataValue> metadata) throws IOException {
for (Byte index : metadata.keySet()) {
if (index.byteValue() == (byte) 0xFF) {
throw new RuntimeException("Index cannot be 255");
}
MetadataValue value = metadata.get(index);
int type = value.getType();
MetadataCodec<Object | > codec = (MetadataCodec<Object>) MetadataCodec.getCodec(type); |
if (codec == null) {
throw new RuntimeException("Unknown metadata type: " + type);
}
writeVarInt(type);
codec.write(this, value.getValue());
}
writeByte((byte) 0xFF);
}
}
| api/src/main/java/sh/pancake/serdemc/network/io/PacketDataWriter.java | storycraft-serde-mc-757a9e4 | [
{
"filename": "api/src/main/java/sh/pancake/serdemc/network/io/PacketDataReader.java",
"retrieved_chunk": " }\n public @Nullable NbtRootCompound readNbtOptional() throws IOException {\n return nbtReader.readRootCompoundOptional();\n }\n public Map<Byte, MetadataValue> readEntityMetadata() throws IOException {\n Map<Byte, MetadataValue> map = new HashMap<>();\n for (int index; (index = readByteUnsigned()) != 0xFF;) {\n int type = readVarInt();\n MetadataCodec<?> codec = MetadataCodec.getCodec(type);\n if (codec == null) {",
"score": 75.04400804784811
},
{
"filename": "api/src/main/java/sh/pancake/serdemc/network/io/PacketDataReader.java",
"retrieved_chunk": " throw new RuntimeException(\"Unknown metadata type: \" + type);\n }\n map.put((byte) index, new MetadataValue(type, codec.read(this)));\n }\n return map;\n }\n}",
"score": 57.302696721137515
},
{
"filename": "api/src/main/java/sh/pancake/serdemc/data/metadata/codec/MetadataCodec.java",
"retrieved_chunk": " }\n return new Particle(type, data);\n }\n @SuppressWarnings(\"unchecked\")\n public void write(PacketDataWriter writer, Particle value) throws IOException {\n int type = value.getId();\n writer.writeVarInt(type);\n ParticleDataCodec<Object> codec = (ParticleDataCodec<Object>) ParticleDataCodec.getCodec(type);\n if (codec == null) {\n return;",
"score": 36.68736730071677
},
{
"filename": "api/src/main/java/sh/pancake/serdemc/data/metadata/codec/MetadataCodec.java",
"retrieved_chunk": " public static final MetadataCodec<UUID> UUID = create(PacketDataReader::readUUID, PacketDataWriter::writeUUID);\n public static final MetadataCodec<NbtRootCompound> NBT = create(PacketDataReader::readNbt, PacketDataWriter::writeNbt);\n public static final MetadataCodec<Particle> PARTICLE = new MetadataCodec<>() {\n @Override\n public Particle read(PacketDataReader reader) throws IOException {\n int type = reader.readVarInt();\n Object data = null;\n ParticleDataCodec<?> codec = ParticleDataCodec.getCodec(type);\n if (codec != null) {\n data = codec.read(reader);",
"score": 31.924078317280816
},
{
"filename": "api/src/main/java/sh/pancake/serdemc/data/nbt/io/NbtWriter.java",
"retrieved_chunk": " writer.writeInt(length);\n for (int i = 0; i < length; i++) {\n writeLong(value[i]);\n }\n }\n @SuppressWarnings(\"unchecked\")\n public void writeTag(NbtTagValue<?> tag) throws IOException {\n byte type = tag.getType();\n Object value = tag.getValue();\n switch (type) {",
"score": 29.965142057014056
}
] | java | > codec = (MetadataCodec<Object>) MetadataCodec.getCodec(type); |
package com.github.stupdit1t.jackson.expand.cache;
import com.github.stupdit1t.jackson.expand.config.JacksonExpandProperties;
import com.github.stupdit1t.jackson.expand.util.SpringUtil;
import com.github.stupdit1t.jackson.expand.serializer.ExpandSerializer;
import org.springframework.util.AntPathMatcher;
import java.time.Duration;
import java.util.Set;
import java.util.StringJoiner;
/**
* 缓存抽象
*/
public interface ExpandCache {
/**
* 放入缓存
*
* @param key
* @param value
* @param <T>
*/
<T> void put(String key, T value, Duration timeout);
/**
* 获取缓存
*
* @param key
* @return
*/
<T> T get(String key);
/**
* 列出匹配的的key
*
* @param pattern
* @return
*/
Set<String> keys(String pattern);
/**
* 清空缓存
*/
void clear();
/**
* 删除缓存
*
* @param key
*/
void delete(String key);
/**
* 按照bean删除缓存
*/
default void delete(String beanName, String method, Object bindData, Object... annotationVal) {
JacksonExpandProperties properties = SpringUtil.getBean(JacksonExpandProperties.class);
StringJoiner key = new StringJoiner("-");
key.add(String.valueOf(bindData));
for (Object subVal : annotationVal) {
key.add(String.valueOf(subVal));
}
String cacheKey = | properties.getCachePrefix() + ":" + beanName + ":" + method + ":%s:" + key.toString(); |
delete(String.format(cacheKey, ExpandSerializer.OK));
delete(String.format(cacheKey, ExpandSerializer.FAIL));
}
/**
* 模糊匹配key
*
* @param pattern
* @param key
*/
default boolean matchKey(String pattern, String key) {
AntPathMatcher antPathMatcher = new AntPathMatcher();
// *
if ("*".equals(pattern)) {
return true;
}
// h?llo
if (pattern.contains("?")) {
if (antPathMatcher.match(pattern, key)) {
return true;
}
}
// h*llo
if (pattern.contains("*")) {
if (antPathMatcher.match(pattern, key)) {
return true;
}
}
// h[ae]llo
if (pattern.contains("[") && pattern.contains("]")) {
return key.matches(pattern);
}
return false;
}
}
| src/main/java/com/github/stupdit1t/jackson/expand/cache/ExpandCache.java | stupdit1t-jackson-expand-boot-starter-b492b74 | [
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/handler/params/ParamsHandler.java",
"retrieved_chunk": " if (annotationVal == null) {\n return val.toString();\n }\n StringJoiner key = new StringJoiner(\"-\");\n key.add(String.valueOf(val));\n for (Object subVal : annotationVal) {\n key.add(String.valueOf(subVal));\n }\n return key.toString();\n }",
"score": 85.08322283116021
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/serializer/ExpandSerializer.java",
"retrieved_chunk": " }\n if (bindData == null || loadService == null) {\n gen.writeObject(bindData);\n return;\n }\n // 获取缓存KEY\n Object[] args = params.getRemoteParams();\n int argsLength = args == null ? 0 : args.length;\n String cacheKey = jacksonExpandProperties.getCachePrefix() + \":\" + beanName + \":\" + method + \":%s:\" + paramsHandler.getCacheKey(bindData, args);\n Object result = getCacheInfo(cacheKey);",
"score": 30.049857949322742
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/serializer/ExpandSerializer.java",
"retrieved_chunk": " this();\n this.loadService = SpringUtil.getBean(beanName);\n this.method = method;\n this.params = params;\n this.responseHandler = otherResponseHandler;\n this.paramsHandler = paramsHandler;\n this.beanName = beanName;\n }\n @Override\n public void serialize(Object bindData, JsonGenerator gen, SerializerProvider serializers) throws IOException {",
"score": 26.66487884813099
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/cache/LocalCache.java",
"retrieved_chunk": " public void delete(String key) {\n this.cacheMap.remove(key);\n }\n /**\n * 计时到期\n *\n * @param key\n * @param expirationTimeMillis\n */\n private void scheduleExpiration(String key, long expirationTimeMillis) {",
"score": 24.97174601417519
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/serializer/ExpandSerializer.java",
"retrieved_chunk": " if (cache == null) {\n synchronized (ExpandSerializer.class) {\n if (cache == null) {\n cache = SpringUtil.getBean(ExpandCache.class);\n jacksonExpandProperties = SpringUtil.getBean(JacksonExpandProperties.class);\n }\n }\n }\n }\n public ExpandSerializer(String beanName, String method, SerializerParam params, ParamsHandler paramsHandler, ResponseHandler otherResponseHandler) {",
"score": 23.933270164336893
}
] | java | properties.getCachePrefix() + ":" + beanName + ":" + method + ":%s:" + key.toString(); |
package com.solodroid.ads.sdkdemo.activity;
import static com.solodroid.ads.sdk.util.Constant.ADMOB;
import static com.solodroid.ads.sdk.util.Constant.AD_STATUS_ON;
import static com.solodroid.ads.sdk.util.Constant.APPLOVIN;
import static com.solodroid.ads.sdk.util.Constant.APPLOVIN_MAX;
import static com.solodroid.ads.sdk.util.Constant.GOOGLE_AD_MANAGER;
import static com.solodroid.ads.sdk.util.Constant.WORTISE;
import android.app.Application;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import com.solodroid.ads.sdk.format.AdNetwork;
import com.solodroid.ads.sdk.format.AppOpenAd;
import com.solodroid.ads.sdkdemo.BuildConfig;
import com.solodroid.ads.sdkdemo.R;
import com.solodroid.ads.sdkdemo.application.MyApplication;
import com.solodroid.ads.sdkdemo.callback.CallbackConfig;
import com.solodroid.ads.sdkdemo.data.Constant;
import com.solodroid.ads.sdkdemo.database.SharedPref;
import com.solodroid.ads.sdkdemo.rest.RestAdapter;
import java.util.Arrays;
import java.util.List;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
@SuppressWarnings("ConstantConditions")
public class ActivitySplash extends AppCompatActivity {
private static final String TAG = "ActivitySplash";
Call<CallbackConfig> callbackConfigCall = null;
public static int DELAY_PROGRESS = 1500;
AdNetwork.Initialize adNetwork;
AppOpenAd.Builder appOpenAdBuilder;
SharedPref sharedPref;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_splash);
sharedPref = new SharedPref(this);
initAds();
if (Constant.AD_STATUS.equals(AD_STATUS_ON) && Constant.OPEN_ADS_ON_START) {
if (!Constant.FORCE_TO_SHOW_APP_OPEN_AD_ON_START) {
new Handler(Looper.getMainLooper()).postDelayed(() -> {
switch (Constant.AD_NETWORK) {
case ADMOB:
if (!Constant.ADMOB_APP_OPEN_AD_ID.equals("0")) {
((MyApplication) getApplication()).showAdIfAvailable(ActivitySplash.this, this::requestConfig);
} else {
requestConfig();
}
break;
case GOOGLE_AD_MANAGER:
if (!Constant.GOOGLE_AD_MANAGER_APP_OPEN_AD_ID.equals("0")) {
((MyApplication) getApplication()).showAdIfAvailable(ActivitySplash.this, this::requestConfig);
} else {
requestConfig();
}
break;
case APPLOVIN:
case APPLOVIN_MAX:
if (!Constant.APPLOVIN_APP_OPEN_AP_ID.equals("0")) {
((MyApplication) getApplication()).showAdIfAvailable(ActivitySplash.this, this::requestConfig);
} else {
requestConfig();
}
break;
case WORTISE:
if (!Constant.WORTISE_APP_OPEN_AD_ID.equals("0")) {
((MyApplication) getApplication()).showAdIfAvailable(ActivitySplash.this, this::requestConfig);
} else {
requestConfig();
}
break;
default:
requestConfig();
break;
}
}, DELAY_PROGRESS);
} else {
requestConfig();
}
} else {
requestConfig();
}
}
private void requestConfig() {
requestAPI("https://raw.githubusercontent.com/solodroidev/content/uploads/json/android.json");
}
private void requestAPI(@SuppressWarnings("SameParameterValue") String url) {
if (url.startsWith("http://") || url.startsWith("https://")) {
if (url.contains("https://drive.google.com")) {
String driveUrl = url.replace("https://", "").replace("http://", "");
List<String> data = Arrays.asList(driveUrl.split("/"));
String googleDriveFileId = data.get(3);
callbackConfigCall = RestAdapter.createApi().getDriveJsonFileId(googleDriveFileId);
} else {
| callbackConfigCall = RestAdapter.createApi().getJsonUrl(url); |
}
} else {
callbackConfigCall = RestAdapter.createApi().getDriveJsonFileId(url);
}
callbackConfigCall.enqueue(new Callback<CallbackConfig>() {
public void onResponse(@NonNull Call<CallbackConfig> call, @NonNull Response<CallbackConfig> response) {
CallbackConfig resp = response.body();
if (resp != null) {
sharedPref.savePostList(resp.android);
loadOpenAds();
Log.d(TAG, "responses success");
} else {
loadOpenAds();
Log.d(TAG, "responses null");
}
}
public void onFailure(@NonNull Call<CallbackConfig> call, @NonNull Throwable th) {
Log.d(TAG, "responses failed: " + th.getMessage());
loadOpenAds();
}
});
}
private void initAds() {
adNetwork = new AdNetwork.Initialize(this)
.setAdStatus(Constant.AD_STATUS)
.setAdNetwork(Constant.AD_NETWORK)
.setBackupAdNetwork(Constant.BACKUP_AD_NETWORK)
.setAdMobAppId(null)
.setStartappAppId(Constant.STARTAPP_APP_ID)
.setUnityGameId(Constant.UNITY_GAME_ID)
.setAppLovinSdkKey(getResources().getString(R.string.applovin_sdk_key))
.setIronSourceAppKey(Constant.IRONSOURCE_APP_KEY)
.setWortiseAppId(Constant.WORTISE_APP_ID)
.setDebug(BuildConfig.DEBUG)
.build();
}
private void loadOpenAds() {
if (Constant.FORCE_TO_SHOW_APP_OPEN_AD_ON_START && Constant.OPEN_ADS_ON_START) {
appOpenAdBuilder = new AppOpenAd.Builder(this)
.setAdStatus(Constant.AD_STATUS)
.setAdNetwork(Constant.AD_NETWORK)
.setBackupAdNetwork(Constant.BACKUP_AD_NETWORK)
.setAdMobAppOpenId(Constant.ADMOB_APP_OPEN_AD_ID)
.setAdManagerAppOpenId(Constant.GOOGLE_AD_MANAGER_APP_OPEN_AD_ID)
.setApplovinAppOpenId(Constant.APPLOVIN_APP_OPEN_AP_ID)
.setWortiseAppOpenId(Constant.WORTISE_APP_OPEN_AD_ID)
.build(this::startMainActivity);
} else {
startMainActivity();
}
}
public void startMainActivity() {
new Handler().postDelayed(() -> {
Intent intent = new Intent(this, MainActivity.class);
startActivity(intent);
finish();
}, DELAY_PROGRESS);
}
}
| demo/src/main/java/com/solodroid/ads/sdkdemo/activity/ActivitySplash.java | solodroidx-multi-ads-sdk-4f23cdf | [
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/rest/ApiInterface.java",
"retrieved_chunk": " @Headers({CACHE, AGENT})\n @GET\n Call<CallbackConfig> getJsonUrl(\n @Url String url\n );\n @Headers({CACHE, AGENT})\n @GET(\"uc?export=download\")\n Call<CallbackConfig> getDriveJsonFileId(\n @Query(\"id\") String id\n );",
"score": 72.58242207079864
},
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/rest/ApiInterface.java",
"retrieved_chunk": "package com.solodroid.ads.sdkdemo.rest;\nimport com.solodroid.ads.sdkdemo.callback.CallbackConfig;\nimport retrofit2.Call;\nimport retrofit2.http.GET;\nimport retrofit2.http.Headers;\nimport retrofit2.http.Query;\nimport retrofit2.http.Url;\npublic interface ApiInterface {\n String CACHE = \"Cache-Control: max-age=0\";\n String AGENT = \"Data-Agent: Solodroid\";",
"score": 33.11691042788432
},
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/rest/RestAdapter.java",
"retrieved_chunk": " OkHttpClient okHttpClient = new OkHttpClient.Builder()\n .connectTimeout(5, TimeUnit.SECONDS)\n .writeTimeout(10, TimeUnit.SECONDS)\n .readTimeout(30, TimeUnit.SECONDS)\n .cache(null)\n .build();\n Retrofit retrofit = new Retrofit.Builder()\n .baseUrl(\"https://drive.google.com/\")\n .addConverterFactory(GsonConverterFactory.create())\n .client(okHttpClient)",
"score": 30.09464460375129
},
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/rest/RestAdapter.java",
"retrieved_chunk": "package com.solodroid.ads.sdkdemo.rest;\nimport java.util.concurrent.TimeUnit;\nimport okhttp3.OkHttpClient;\nimport okhttp3.logging.HttpLoggingInterceptor;\nimport retrofit2.Retrofit;\nimport retrofit2.converter.gson.GsonConverterFactory;\npublic class RestAdapter {\n public static ApiInterface createApi() {\n HttpLoggingInterceptor logging = new HttpLoggingInterceptor();\n logging.setLevel(HttpLoggingInterceptor.Level.BODY);",
"score": 24.91657569719876
},
{
"filename": "library/src/main/java/com/solodroid/ads/sdk/format/BannerAd.java",
"retrieved_chunk": " private com.wortise.ads.banner.BannerAd wortiseBannerAd;\n FrameLayout wortiseBannerView;\n private String adStatus = \"\";\n private String adNetwork = \"\";\n private String backupAdNetwork = \"\";\n private String adMobBannerId = \"\";\n private String googleAdManagerBannerId = \"\";\n private String fanBannerId = \"\";\n private String unityBannerId = \"\";\n private String appLovinBannerId = \"\";",
"score": 22.899305040685576
}
] | java | callbackConfigCall = RestAdapter.createApi().getJsonUrl(url); |
package com.github.stupdit1t.jackson.expand.serializer;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonStreamContext;
import com.fasterxml.jackson.databind.BeanProperty;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.ser.ContextualSerializer;
import com.github.stupdit1t.jackson.expand.annotation.Expand;
import com.github.stupdit1t.jackson.expand.cache.ExpandCache;
import com.github.stupdit1t.jackson.expand.cache.LocalCache;
import com.github.stupdit1t.jackson.expand.config.JacksonExpandProperties;
import com.github.stupdit1t.jackson.expand.domain.ExpandStrategy;
import com.github.stupdit1t.jackson.expand.domain.SerializerParam;
import com.github.stupdit1t.jackson.expand.handler.params.ParamsHandler;
import com.github.stupdit1t.jackson.expand.handler.rsp.ResponseHandler;
import com.github.stupdit1t.jackson.expand.util.ReflectUtil;
import com.github.stupdit1t.jackson.expand.util.SpringUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.ReflectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.web.context.request.RequestAttributes;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import javax.servlet.ServletRequest;
import java.io.IOException;
import java.lang.reflect.Field;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.locks.StampedLock;
import java.util.stream.Collectors;
public class ExpandSerializer extends JsonSerializer<Object> implements ContextualSerializer {
private static final Logger LOG = LoggerFactory.getLogger(ExpandSerializer.class);
/**
* 成功数据
*/
public static final String OK = "OK";
/**
* 失败数据
*/
public static final String FAIL = "FAIL";
/**
* 缓存
*/
private static ExpandCache cache;
/**
* 配置
*/
private static JacksonExpandProperties jacksonExpandProperties;
/**
* 本地锁缓存,防止同时查询
*/
private static final LocalCache lockCache = new LocalCache();
/**
* 远程调用服务
*/
private Object loadService;
/**
* 方法
*/
private String method;
/**
* 注解参数处理
*/
private SerializerParam params;
/**
* 返回结果处理类
*/
private ParamsHandler paramsHandler;
/**
* 返回结果处理类
*/
private ResponseHandler responseHandler;
/**
* bean名称
*/
private String beanName;
public ExpandSerializer() {
super();
if (cache == null) {
synchronized (ExpandSerializer.class) {
if (cache == null) {
cache = SpringUtil.getBean(ExpandCache.class);
jacksonExpandProperties = SpringUtil.getBean(JacksonExpandProperties.class);
}
}
}
}
public ExpandSerializer(String beanName, String method, SerializerParam params, ParamsHandler paramsHandler, ResponseHandler otherResponseHandler) {
this();
this.loadService = SpringUtil.getBean(beanName);
this.method = method;
this.params = params;
this.responseHandler = otherResponseHandler;
this.paramsHandler = paramsHandler;
this.beanName = beanName;
}
@Override
public void serialize(Object bindData, JsonGenerator gen, SerializerProvider serializers) throws IOException {
String writeFieldPath = getFieldPath(gen.getOutputContext());
// 统一path替换
String dynamicExpandCommonPrefix = jacksonExpandProperties.getDynamicExpandCommonPrefix();
if (StringUtils.hasText(dynamicExpandCommonPrefix) && writeFieldPath.startsWith(dynamicExpandCommonPrefix)) {
writeFieldPath = writeFieldPath.substring(dynamicExpandCommonPrefix.length() + 1);
}
// 是否展开
boolean expand;
// 动态展开开启,判断是否展开
boolean dynamicExpand = jacksonExpandProperties.isDynamicExpand();
if (dynamicExpand) {
Set<String> needExpandField = getParam(jacksonExpandProperties.getDynamicExpandParameterName());
// 如果代码里设置不展开,动态展开也不生效
expand = needExpandField.contains(writeFieldPath) && params.isOpen();
} else {
expand = params.isOpen();
}
if (!expand) {
gen.writeObject(bindData);
return;
}
// 判断要写入的字段
String writeField = gen.getOutputContext().getCurrentName();
if (jacksonExpandProperties.getExpandStrategy() == ExpandStrategy.COVER) {
writeField = gen.getOutputContext().getCurrentName();
} else if (jacksonExpandProperties.getExpandStrategy() == ExpandStrategy.COPY) {
writeField = String.format(jacksonExpandProperties.getCopyStrategyFormat(), gen.getOutputContext().getCurrentName());
}
// 自定义要写入的优先级最高
if (StringUtils.hasText(params.getWriteField())) {
writeField = params.getWriteField();
}
// 设置理论上的响应类型,要不要使用取决于 ResponseHandler 要不要处理,比如只能写入数据对象存在的对象,默认是忽略存不存在
Class<?> writeClass = null;
if (params.getWriteField() != null && StringUtils.hasText(params.getWriteField())) {
Field field = ReflectionUtils.findField(gen.getCurrentValue().getClass(), params.getWriteField());
if (field != null) {
writeClass = field.getType();
}
}
// 关闭不存在字段扩展,被写入的字段类型找不到,不扩展
if (!jacksonExpandProperties.isCanExpandToNotExistField() && writeClass == null) {
gen.writeObject(bindData);
return;
}
// 翻译为非当前字段,先写入当前字段值再翻译
boolean currField = gen.getOutputContext().getCurrentName().equals(writeField);
if (!currField) {
gen.writeObject(bindData);
gen.writeFieldName(writeField);
}
if (bindData == null || loadService == null) {
gen.writeObject(bindData);
return;
}
// 获取缓存KEY
Object[] args = params.getRemoteParams();
int argsLength = args == null ? 0 : args.length;
String cacheKey = jacksonExpandProperties.getCachePrefix() + ":" + beanName + ":" + method + ":%s:" + paramsHandler.getCacheKey(bindData, args);
Object result = getCacheInfo(cacheKey);
if (result != null) {
LOG.info("{} Expand cache 命中: {}", beanName, result);
gen.writeObject(result);
return;
}
StampedLock lock = lockCache.get(cacheKey, new StampedLock(), Duration.ofSeconds(300));
// 写锁避免同一业务ID重复查询
long stamp = lock.writeLock();
Integer cacheTime = params.getCacheTime();
try {
// 多参数组装
Object[] objectParams = new Object[argsLength + 1];
objectParams[0] = paramsHandler.handleVal(bindData);
if(objectParams.length > 1){
System.arraycopy(args, 0, objectParams, 1, argsLength);
}
// 请求翻译结果
Object loadResult = ReflectUtil.invoke(loadService, method, objectParams);
if (loadResult != null) {
result = this.responseHandler.handle(this.beanName, method, loadResult, writeClass, objectParams);
cache.put(String.format(cacheKey, OK), result, Duration.ofSeconds(cacheTime));
} else {
LOG.error("【{}】 Expand失败,未找到:{}", beanName, bindData);
| cache.put(String.format(cacheKey, FAIL), bindData, Duration.ofSeconds(cacheTime)); |
result = bindData;
}
} catch (Exception e) {
LOG.error("【{}】 Expand异常:", beanName, e);
cache.put(String.format(cacheKey, FAIL), bindData, Duration.ofSeconds(cacheTime));
result = bindData;
} finally {
lock.unlockWrite(stamp);
}
gen.writeObject(result);
}
/**
* 获取当前字段的path路径
*
* @param outputContext
* @return
*/
private String getFieldPath(JsonStreamContext outputContext) {
List<String> path = new ArrayList<>(4);
while (outputContext != null) {
String currentName = outputContext.getCurrentName();
if (StringUtils.hasText(currentName)) {
path.add(currentName);
}
outputContext = outputContext.getParent();
}
Collections.reverse(path);
return String.join(".", path);
}
/**
* 获取厍信息
*
* @param cacheKey 缓存的KEY
* @return
*/
private Object getCacheInfo(String cacheKey) {
Object result = cache.get(String.format(cacheKey, OK));
if (result == null) {
result = cache.get(String.format(cacheKey, FAIL));
}
return result;
}
@Override
public JsonSerializer<?> createContextual(SerializerProvider prov, BeanProperty property) throws JsonMappingException {
if (property != null) {
Expand load = property.getAnnotation(Expand.class);
if (load == null) {
throw new RuntimeException("未注解相关 @Expand 注解");
}
String bean = load.bean();
Class<? extends ParamsHandler> paramsHandlerClass = load.paramsHandler();
Class<? extends ResponseHandler> responseHandlerClass = load.responseHandler();
String method = load.method();
try {
ParamsHandler paramsHandler = paramsHandlerClass.getDeclaredConstructor().newInstance();
ResponseHandler responseHandler = responseHandlerClass.getDeclaredConstructor().newInstance();
int cacheTime = load.cacheTime();
// 额外参数处理
SerializerParam params = paramsHandler.handleAnnotation(property);
// 参数处理器没设置,且父注设置了,以父注解为主
if (params.getCacheTime() == null && cacheTime != -1) {
params.setCacheTime(cacheTime);
}
// 缓存时间未设置,取默认
if (params.getCacheTime() == null) {
params.setCacheTime(jacksonExpandProperties.getCacheTimeout());
}
if (params.isOpen() == null) {
params.setExpand(load.expand());
}
return new ExpandSerializer(bean, method, params, paramsHandler, responseHandler);
} catch (Exception e) {
LOG.error("@Expand error: ", e);
}
}
return prov.findNullValueSerializer(null);
}
/**
* 获取展开参数
*
* @param key
* @return
*/
private Set<String> getParam(String key) {
RequestAttributes attributes = RequestContextHolder.getRequestAttributes();
if (attributes == null) {
return Collections.emptySet();
}
ServletRequest request = ((ServletRequestAttributes) attributes).getRequest();
String[] parameterValues = request.getParameterValues(key);
if (parameterValues == null) {
return Collections.emptySet();
}
return Arrays.stream(parameterValues).flatMap(o -> Arrays.stream(o.split(",")))
.collect(Collectors.toSet());
}
}
| src/main/java/com/github/stupdit1t/jackson/expand/serializer/ExpandSerializer.java | stupdit1t-jackson-expand-boot-starter-b492b74 | [
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/cache/ExpandCache.java",
"retrieved_chunk": " key.add(String.valueOf(bindData));\n for (Object subVal : annotationVal) {\n key.add(String.valueOf(subVal));\n }\n String cacheKey = properties.getCachePrefix() + \":\" + beanName + \":\" + method + \":%s:\" + key.toString();\n delete(String.format(cacheKey, ExpandSerializer.OK));\n delete(String.format(cacheKey, ExpandSerializer.FAIL));\n }\n /**\n * 模糊匹配key",
"score": 41.010035714750344
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/util/ReflectUtil.java",
"retrieved_chunk": " *\n * @param obj\n * @param methodName\n * @param args\n * @return\n */\n public static Object invoke(Object obj, String methodName, Object[] args) throws InvocationTargetException, IllegalAccessException {\n String cacheKey = obj.getClass().getName() + methodName;\n final Method method = METHODS_CACHE.computeIfAbsent(cacheKey, (key) -> getMethod(obj.getClass(), methodName, args));\n if (null == method) {",
"score": 23.190344941793256
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/cache/ExpandCache.java",
"retrieved_chunk": " *\n * @param key\n */\n void delete(String key);\n /**\n * 按照bean删除缓存\n */\n default void delete(String beanName, String method, Object bindData, Object... annotationVal) {\n JacksonExpandProperties properties = SpringUtil.getBean(JacksonExpandProperties.class);\n StringJoiner key = new StringJoiner(\"-\");",
"score": 18.749095534069642
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/util/ReflectUtil.java",
"retrieved_chunk": " private static Class<?> unWrap(Class<?> clazz) {\n if (null == clazz || clazz.isPrimitive()) {\n return clazz;\n }\n Class<?> result = WRAPPER_PRIMITIVE_MAP.get(clazz);\n return (null == result) ? clazz : result;\n }\n}",
"score": 17.023859699899084
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/domain/SerializerParam.java",
"retrieved_chunk": " public void setWriteField(String writeField) {\n this.writeField = writeField;\n }\n public Integer getCacheTime() {\n return cacheTime;\n }\n public void setCacheTime(Integer cacheTime) {\n this.cacheTime = cacheTime;\n }\n public Boolean isOpen() {",
"score": 15.982710380612094
}
] | java | cache.put(String.format(cacheKey, FAIL), bindData, Duration.ofSeconds(cacheTime)); |
/*
* Copyright Strimzi authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.strimzi.kafka.bridge.mqtt.mapper;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Responsible for handling all the topic mapping using named placeholders instead of regular expressions.
*
* @see MappingRule
* @see MqttKafkaMapper
* @see MqttKafkaRegexMapper
*/
public class MqttKafkaSimpleMapper extends MqttKafkaMapper {
// find any word inside a curly bracket. E.g. {something}, this is known as a placeholder.
private static final String MQTT_TOPIC_PLACEHOLDER_REGEX = "\\{\\w+\\}";
// identifies a single level wildcard character in the mqtt pattern. E.g. sensors/+/data
private static final String MQTT_TOPIC_SINGLE_LEVEL_WILDCARD_CHARACTER = "+";
// Regex expression used to replace the + in the mqtt pattern.
private static final String SINGLE_LEVEL_WILDCARD_REGEX = "[^/]+";
// identifies a multi level wildcard character in the mqtt pattern. E.g. sensors/#
private static final String MQTT_TOPIC_MULTI_LEVEL_WILDCARD_CHARACTER = "#";
// used to replace the # in the mqtt pattern.
public static final String WILDCARD_REGEX = "(?:\\/.*)?$";
/**
* Constructor.
*
* @param rules the list of mapping rules.
*/
public MqttKafkaSimpleMapper(List<MappingRule> rules) {
super(rules, Pattern.compile(MQTT_TOPIC_PLACEHOLDER_REGEX));
}
@Override
public MappingResult map(String mqttTopic) {
for (MappingRule rule : this.rules) {
Matcher matcher = this.patterns.get(this.rules.indexOf(rule)).matcher(mqttTopic);
if (matcher.matches()) {
HashMap<String, String> placeholders = new HashMap<>();
String mappedKafkaTopic = rule.getKafkaTopicTemplate();
String kafkaKey = rule.getKafkaKeyTemplate();
// find MQTT_TOPIC_PLACEHOLDER_REGEX in the kafkaTopicTemplate.
Matcher placeholderMatcher = this.placeholderPattern.matcher(rule.getKafkaTopicTemplate());
while (placeholderMatcher.find()) {
String placeholderKey = placeholderMatcher.group();
placeholders.put(placeholderKey, null);
}
// find MQTT_TOPIC_PLACEHOLDER_REGEX in the kafkaKey
if (kafkaKey != null) {
placeholderMatcher = this.placeholderPattern.matcher(kafkaKey);
while (placeholderMatcher.find()) {
String placeholderKey = placeholderMatcher.group();
placeholders.put(placeholderKey, null);
}
}
if (!placeholders.isEmpty()) {
Matcher mqttTopicMatcher = this.placeholderPattern. | matcher(rule.getMqttTopicPattern()); |
// find the placeholders in the mqtt topic pattern and assign them a value.
while (mqttTopicMatcher.find()) {
String placeholderKey = mqttTopicMatcher.group();
String placeholderValue = matcher.group(removeBrackets(placeholderKey));
placeholders.put(placeholderKey, placeholderValue);
}
// build the Kafka topic using the placeholders.
for (Map.Entry<String, String> entry : placeholders.entrySet()) {
if (entry.getValue() != null) {
mappedKafkaTopic = mappedKafkaTopic.replace(entry.getKey(), entry.getValue());
kafkaKey = kafkaKey != null ? kafkaKey.replace(entry.getKey(), entry.getValue()) : null;
} else {
throw new IllegalArgumentException("The placeholder " + entry.getKey() + " was not found assigned any value.");
}
}
}
return new MappingResult(mappedKafkaTopic, kafkaKey);
}
}
return new MappingResult(MqttKafkaMapper.DEFAULT_KAFKA_TOPIC, null);
}
@Override
protected void buildOrCompilePatterns() {
// convert the mqtt patterns to a valid regex expression.
// the mqtt pattern can contain placeholders like {something}, + and #.
// if the mqtt topic contains a +, we replace it with @singleLevelWildcardRegex
// if the mqtt topic contains a #, we replace it with @multiLevelWildcardRegex
// if the mqtt topic contains a placeholder (pattern \{\w+\}), we replace it with @placeholderRegex
String[] mqttTopicPatternParts;
StringBuilder ruleRegex;
for (MappingRule rule : this.rules) {
mqttTopicPatternParts = rule.getMqttTopicPattern().split(MQTT_TOPIC_SEPARATOR);
ruleRegex = new StringBuilder();
for (String part : mqttTopicPatternParts) {
if (part.matches(MQTT_TOPIC_PLACEHOLDER_REGEX)) {
ruleRegex.append(buildNamedRegexExpression(part));
} else if (part.equals(MQTT_TOPIC_SINGLE_LEVEL_WILDCARD_CHARACTER)) {
ruleRegex.append(SINGLE_LEVEL_WILDCARD_REGEX);
} else if (part.equals(MQTT_TOPIC_MULTI_LEVEL_WILDCARD_CHARACTER)) {
if (ruleRegex.length() > 1) {
ruleRegex.deleteCharAt(ruleRegex.length() - 1);
}
ruleRegex.append(WILDCARD_REGEX);
} else {
ruleRegex.append(part);
}
ruleRegex.append(MQTT_TOPIC_SEPARATOR);
}
// remove the last slash
ruleRegex.deleteCharAt(ruleRegex.length() - 1);
// compile the regex expression for the rule.
patterns.add(Pattern.compile(ruleRegex.toString()));
}
}
/**
* Helper method for building a named regex expression.
* A named regex expression is a regex expression that contains a named capturing group.
* E.g. (?<groupName>regexExpression)
*
* @param placeholder represents a placeholder in the mqtt pattern.
* @return a named regex expression.
*/
private String buildNamedRegexExpression(String placeholder) {
String groupName = removeBrackets(placeholder);
return "(?<" + groupName + ">[^/]+)";
}
/**
* Helper method for removing the curly brackets from a placeholder.
*
* @param placeholder represents a placeholder in the pattern.
* @return a placeholder without the curly brackets.
*/
private String removeBrackets(String placeholder) {
return placeholder.replaceAll("\\{+|\\}+", "");
}
}
| src/main/java/io/strimzi/kafka/bridge/mqtt/mapper/MqttKafkaSimpleMapper.java | strimzi-strimzi-mqtt-bridge-cb0a4b8 | [
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/mapper/MqttKafkaRegexMapper.java",
"retrieved_chunk": " *\n * @param template the placeholder to check.\n */\n private void checkPlaceholder(String template) {\n Matcher matcher = this.placeholderPattern.matcher(template);\n if (matcher.find()) {\n throw new IllegalArgumentException(\"The placeholder \" + matcher.group() + \" was not found or assigned any value.\");\n }\n }\n}",
"score": 47.284244919036546
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/mapper/MqttKafkaRegexMapper.java",
"retrieved_chunk": " }\n @Override\n public MappingResult map(String mqttTopic) {\n for (MappingRule rule : this.rules) {\n Matcher matcher = this.patterns.get(this.rules.indexOf(rule)).matcher(mqttTopic);\n if (matcher.matches()) {\n String mappedKafkaTopic = rule.getKafkaTopicTemplate();\n String kafkaKey = rule.getKafkaKeyTemplate();\n for (int i = 1; i < matcher.groupCount() + 1; i++) {\n mappedKafkaTopic = mappedKafkaTopic.replace(\"$\" + i, matcher.group(i));",
"score": 42.0336322370591
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/mapper/MqttKafkaRegexMapper.java",
"retrieved_chunk": " kafkaKey = kafkaKey != null ? kafkaKey.replace(\"$\" + i, matcher.group(i)) : null;\n }\n // check for pending placeholders replacement in the Kafka topic\n checkPlaceholder(mappedKafkaTopic);\n if (kafkaKey != null) {\n // check for pending placeholders replacement in the Kafka key.\n checkPlaceholder(kafkaKey);\n }\n // return the first match\n return new MappingResult(mappedKafkaTopic, kafkaKey);",
"score": 39.772571445802804
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/mapper/MqttKafkaMapper.java",
"retrieved_chunk": " * Constructor\n *\n * @param rules the list of mapping rules.\n * @param placeholderPattern the pattern used to find placeholders.\n * @see MappingRule\n */\n protected MqttKafkaMapper(List<MappingRule> rules, Pattern placeholderPattern) {\n this.rules = rules;\n this.placeholderPattern = placeholderPattern;\n this.buildOrCompilePatterns();",
"score": 34.47959483150466
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/mapper/MqttKafkaRegexMapper.java",
"retrieved_chunk": " }\n }\n return new MappingResult(MqttKafkaMapper.DEFAULT_KAFKA_TOPIC, null);\n }\n @Override\n protected void buildOrCompilePatterns() {\n this.rules.forEach(rule-> this.patterns.add(Pattern.compile(rule.getMqttTopicPattern())));\n }\n /**\n * Checks if there are any pending placeholders in the Kafka topic or Kafka key template.",
"score": 30.86198699381398
}
] | java | matcher(rule.getMqttTopicPattern()); |
package com.solodroid.ads.sdkdemo.activity;
import static com.solodroid.ads.sdk.util.Constant.ADMOB;
import static com.solodroid.ads.sdk.util.Constant.AD_STATUS_ON;
import static com.solodroid.ads.sdk.util.Constant.APPLOVIN;
import static com.solodroid.ads.sdk.util.Constant.APPLOVIN_MAX;
import static com.solodroid.ads.sdk.util.Constant.GOOGLE_AD_MANAGER;
import static com.solodroid.ads.sdk.util.Constant.WORTISE;
import android.app.Application;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import com.solodroid.ads.sdk.format.AdNetwork;
import com.solodroid.ads.sdk.format.AppOpenAd;
import com.solodroid.ads.sdkdemo.BuildConfig;
import com.solodroid.ads.sdkdemo.R;
import com.solodroid.ads.sdkdemo.application.MyApplication;
import com.solodroid.ads.sdkdemo.callback.CallbackConfig;
import com.solodroid.ads.sdkdemo.data.Constant;
import com.solodroid.ads.sdkdemo.database.SharedPref;
import com.solodroid.ads.sdkdemo.rest.RestAdapter;
import java.util.Arrays;
import java.util.List;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
@SuppressWarnings("ConstantConditions")
public class ActivitySplash extends AppCompatActivity {
private static final String TAG = "ActivitySplash";
Call<CallbackConfig> callbackConfigCall = null;
public static int DELAY_PROGRESS = 1500;
AdNetwork.Initialize adNetwork;
AppOpenAd.Builder appOpenAdBuilder;
SharedPref sharedPref;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_splash);
sharedPref = new SharedPref(this);
initAds();
if (Constant.AD_STATUS.equals(AD_STATUS_ON) && Constant.OPEN_ADS_ON_START) {
if (!Constant.FORCE_TO_SHOW_APP_OPEN_AD_ON_START) {
new Handler(Looper.getMainLooper()).postDelayed(() -> {
switch (Constant.AD_NETWORK) {
case ADMOB:
if (!Constant.ADMOB_APP_OPEN_AD_ID.equals("0")) {
((MyApplication) getApplication()).showAdIfAvailable(ActivitySplash.this, this::requestConfig);
} else {
requestConfig();
}
break;
case GOOGLE_AD_MANAGER:
if (!Constant.GOOGLE_AD_MANAGER_APP_OPEN_AD_ID.equals("0")) {
((MyApplication) getApplication()).showAdIfAvailable(ActivitySplash.this, this::requestConfig);
} else {
requestConfig();
}
break;
case APPLOVIN:
case APPLOVIN_MAX:
if (!Constant.APPLOVIN_APP_OPEN_AP_ID.equals("0")) {
((MyApplication) getApplication()).showAdIfAvailable(ActivitySplash.this, this::requestConfig);
} else {
requestConfig();
}
break;
case WORTISE:
if (!Constant.WORTISE_APP_OPEN_AD_ID.equals("0")) {
((MyApplication) getApplication()).showAdIfAvailable(ActivitySplash.this, this::requestConfig);
} else {
requestConfig();
}
break;
default:
requestConfig();
break;
}
}, DELAY_PROGRESS);
} else {
requestConfig();
}
} else {
requestConfig();
}
}
private void requestConfig() {
requestAPI("https://raw.githubusercontent.com/solodroidev/content/uploads/json/android.json");
}
private void requestAPI(@SuppressWarnings("SameParameterValue") String url) {
if (url.startsWith("http://") || url.startsWith("https://")) {
if (url.contains("https://drive.google.com")) {
String driveUrl = url.replace("https://", "").replace("http://", "");
List<String> data = Arrays.asList(driveUrl.split("/"));
String googleDriveFileId = data.get(3);
callbackConfigCall = RestAdapter.createApi().getDriveJsonFileId(googleDriveFileId);
} else {
callbackConfigCall = RestAdapter.createApi().getJsonUrl(url);
}
} else {
callbackConfigCall = | RestAdapter.createApi().getDriveJsonFileId(url); |
}
callbackConfigCall.enqueue(new Callback<CallbackConfig>() {
public void onResponse(@NonNull Call<CallbackConfig> call, @NonNull Response<CallbackConfig> response) {
CallbackConfig resp = response.body();
if (resp != null) {
sharedPref.savePostList(resp.android);
loadOpenAds();
Log.d(TAG, "responses success");
} else {
loadOpenAds();
Log.d(TAG, "responses null");
}
}
public void onFailure(@NonNull Call<CallbackConfig> call, @NonNull Throwable th) {
Log.d(TAG, "responses failed: " + th.getMessage());
loadOpenAds();
}
});
}
private void initAds() {
adNetwork = new AdNetwork.Initialize(this)
.setAdStatus(Constant.AD_STATUS)
.setAdNetwork(Constant.AD_NETWORK)
.setBackupAdNetwork(Constant.BACKUP_AD_NETWORK)
.setAdMobAppId(null)
.setStartappAppId(Constant.STARTAPP_APP_ID)
.setUnityGameId(Constant.UNITY_GAME_ID)
.setAppLovinSdkKey(getResources().getString(R.string.applovin_sdk_key))
.setIronSourceAppKey(Constant.IRONSOURCE_APP_KEY)
.setWortiseAppId(Constant.WORTISE_APP_ID)
.setDebug(BuildConfig.DEBUG)
.build();
}
private void loadOpenAds() {
if (Constant.FORCE_TO_SHOW_APP_OPEN_AD_ON_START && Constant.OPEN_ADS_ON_START) {
appOpenAdBuilder = new AppOpenAd.Builder(this)
.setAdStatus(Constant.AD_STATUS)
.setAdNetwork(Constant.AD_NETWORK)
.setBackupAdNetwork(Constant.BACKUP_AD_NETWORK)
.setAdMobAppOpenId(Constant.ADMOB_APP_OPEN_AD_ID)
.setAdManagerAppOpenId(Constant.GOOGLE_AD_MANAGER_APP_OPEN_AD_ID)
.setApplovinAppOpenId(Constant.APPLOVIN_APP_OPEN_AP_ID)
.setWortiseAppOpenId(Constant.WORTISE_APP_OPEN_AD_ID)
.build(this::startMainActivity);
} else {
startMainActivity();
}
}
public void startMainActivity() {
new Handler().postDelayed(() -> {
Intent intent = new Intent(this, MainActivity.class);
startActivity(intent);
finish();
}, DELAY_PROGRESS);
}
}
| demo/src/main/java/com/solodroid/ads/sdkdemo/activity/ActivitySplash.java | solodroidx-multi-ads-sdk-4f23cdf | [
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/rest/ApiInterface.java",
"retrieved_chunk": " @Headers({CACHE, AGENT})\n @GET\n Call<CallbackConfig> getJsonUrl(\n @Url String url\n );\n @Headers({CACHE, AGENT})\n @GET(\"uc?export=download\")\n Call<CallbackConfig> getDriveJsonFileId(\n @Query(\"id\") String id\n );",
"score": 54.436816553098986
},
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/rest/RestAdapter.java",
"retrieved_chunk": "package com.solodroid.ads.sdkdemo.rest;\nimport java.util.concurrent.TimeUnit;\nimport okhttp3.OkHttpClient;\nimport okhttp3.logging.HttpLoggingInterceptor;\nimport retrofit2.Retrofit;\nimport retrofit2.converter.gson.GsonConverterFactory;\npublic class RestAdapter {\n public static ApiInterface createApi() {\n HttpLoggingInterceptor logging = new HttpLoggingInterceptor();\n logging.setLevel(HttpLoggingInterceptor.Level.BODY);",
"score": 35.90378757879124
},
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/rest/ApiInterface.java",
"retrieved_chunk": "package com.solodroid.ads.sdkdemo.rest;\nimport com.solodroid.ads.sdkdemo.callback.CallbackConfig;\nimport retrofit2.Call;\nimport retrofit2.http.GET;\nimport retrofit2.http.Headers;\nimport retrofit2.http.Query;\nimport retrofit2.http.Url;\npublic interface ApiInterface {\n String CACHE = \"Cache-Control: max-age=0\";\n String AGENT = \"Data-Agent: Solodroid\";",
"score": 18.274268263798586
},
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/data/Constant.java",
"retrieved_chunk": "package com.solodroid.ads.sdkdemo.data;\npublic class Constant {\n public static final String AD_STATUS = \"1\";\n public static String AD_NETWORK = \"admob\";\n public static final String BACKUP_AD_NETWORK = \"none\";\n public static final String ADMOB_BANNER_ID = \"ca-app-pub-3940256099942544/6300978111\";\n public static final String ADMOB_INTERSTITIAL_ID = \"ca-app-pub-3940256099942544/1033173712\";\n public static final String ADMOB_REWARDED_ID = \"ca-app-pub-3940256099942544/5224354917\";\n public static final String ADMOB_NATIVE_ID = \"ca-app-pub-3940256099942544/2247696110\";\n public static final String ADMOB_APP_OPEN_AD_ID = \"ca-app-pub-3940256099942544/3419835294\";",
"score": 16.96253438315635
},
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/activity/MainActivity.java",
"retrieved_chunk": "import static com.solodroid.ads.sdkdemo.data.Constant.STYLE_DEFAULT;\nimport static com.solodroid.ads.sdkdemo.data.Constant.STYLE_NEWS;\nimport static com.solodroid.ads.sdkdemo.data.Constant.STYLE_RADIO;\nimport static com.solodroid.ads.sdkdemo.data.Constant.STYLE_VIDEO_LARGE;\nimport static com.solodroid.ads.sdkdemo.data.Constant.STYLE_VIDEO_SMALL;\nimport android.content.Intent;\nimport android.os.Bundle;\nimport android.os.Handler;\nimport android.os.Looper;\nimport android.util.Log;",
"score": 16.878667156158105
}
] | java | RestAdapter.createApi().getDriveJsonFileId(url); |
package com.github.stupdit1t.jackson.expand.serializer;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonStreamContext;
import com.fasterxml.jackson.databind.BeanProperty;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.ser.ContextualSerializer;
import com.github.stupdit1t.jackson.expand.annotation.Expand;
import com.github.stupdit1t.jackson.expand.cache.ExpandCache;
import com.github.stupdit1t.jackson.expand.cache.LocalCache;
import com.github.stupdit1t.jackson.expand.config.JacksonExpandProperties;
import com.github.stupdit1t.jackson.expand.domain.ExpandStrategy;
import com.github.stupdit1t.jackson.expand.domain.SerializerParam;
import com.github.stupdit1t.jackson.expand.handler.params.ParamsHandler;
import com.github.stupdit1t.jackson.expand.handler.rsp.ResponseHandler;
import com.github.stupdit1t.jackson.expand.util.ReflectUtil;
import com.github.stupdit1t.jackson.expand.util.SpringUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.ReflectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.web.context.request.RequestAttributes;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import javax.servlet.ServletRequest;
import java.io.IOException;
import java.lang.reflect.Field;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.locks.StampedLock;
import java.util.stream.Collectors;
public class ExpandSerializer extends JsonSerializer<Object> implements ContextualSerializer {
private static final Logger LOG = LoggerFactory.getLogger(ExpandSerializer.class);
/**
* 成功数据
*/
public static final String OK = "OK";
/**
* 失败数据
*/
public static final String FAIL = "FAIL";
/**
* 缓存
*/
private static ExpandCache cache;
/**
* 配置
*/
private static JacksonExpandProperties jacksonExpandProperties;
/**
* 本地锁缓存,防止同时查询
*/
private static final LocalCache lockCache = new LocalCache();
/**
* 远程调用服务
*/
private Object loadService;
/**
* 方法
*/
private String method;
/**
* 注解参数处理
*/
private SerializerParam params;
/**
* 返回结果处理类
*/
private ParamsHandler paramsHandler;
/**
* 返回结果处理类
*/
private ResponseHandler responseHandler;
/**
* bean名称
*/
private String beanName;
public ExpandSerializer() {
super();
if (cache == null) {
synchronized (ExpandSerializer.class) {
if (cache == null) {
cache = SpringUtil.getBean(ExpandCache.class);
jacksonExpandProperties = SpringUtil.getBean(JacksonExpandProperties.class);
}
}
}
}
public ExpandSerializer(String beanName, String method, SerializerParam params, ParamsHandler paramsHandler, ResponseHandler otherResponseHandler) {
this();
this.loadService = SpringUtil.getBean(beanName);
this.method = method;
this.params = params;
this.responseHandler = otherResponseHandler;
this.paramsHandler = paramsHandler;
this.beanName = beanName;
}
@Override
public void serialize(Object bindData, JsonGenerator gen, SerializerProvider serializers) throws IOException {
String writeFieldPath = getFieldPath(gen.getOutputContext());
// 统一path替换
String dynamicExpandCommonPrefix = jacksonExpandProperties.getDynamicExpandCommonPrefix();
if (StringUtils.hasText(dynamicExpandCommonPrefix) && writeFieldPath.startsWith(dynamicExpandCommonPrefix)) {
writeFieldPath = writeFieldPath.substring(dynamicExpandCommonPrefix.length() + 1);
}
// 是否展开
boolean expand;
// 动态展开开启,判断是否展开
boolean dynamicExpand = jacksonExpandProperties.isDynamicExpand();
if (dynamicExpand) {
Set<String> needExpandField = getParam(jacksonExpandProperties.getDynamicExpandParameterName());
// 如果代码里设置不展开,动态展开也不生效
expand = needExpandField.contains(writeFieldPath) && params.isOpen();
} else {
expand = params.isOpen();
}
if (!expand) {
gen.writeObject(bindData);
return;
}
// 判断要写入的字段
String writeField = gen.getOutputContext().getCurrentName();
if (jacksonExpandProperties.getExpandStrategy() == ExpandStrategy.COVER) {
writeField = gen.getOutputContext().getCurrentName();
} else if (jacksonExpandProperties.getExpandStrategy() == ExpandStrategy.COPY) {
writeField = String.format(jacksonExpandProperties.getCopyStrategyFormat(), gen.getOutputContext().getCurrentName());
}
// 自定义要写入的优先级最高
if (StringUtils.hasText(params.getWriteField())) {
writeField = params.getWriteField();
}
// 设置理论上的响应类型,要不要使用取决于 ResponseHandler 要不要处理,比如只能写入数据对象存在的对象,默认是忽略存不存在
Class<?> writeClass = null;
if (params.getWriteField() != null && StringUtils.hasText(params.getWriteField())) {
Field field = ReflectionUtils.findField(gen.getCurrentValue().getClass(), params.getWriteField());
if (field != null) {
writeClass = field.getType();
}
}
// 关闭不存在字段扩展,被写入的字段类型找不到,不扩展
if | (!jacksonExpandProperties.isCanExpandToNotExistField() && writeClass == null) { |
gen.writeObject(bindData);
return;
}
// 翻译为非当前字段,先写入当前字段值再翻译
boolean currField = gen.getOutputContext().getCurrentName().equals(writeField);
if (!currField) {
gen.writeObject(bindData);
gen.writeFieldName(writeField);
}
if (bindData == null || loadService == null) {
gen.writeObject(bindData);
return;
}
// 获取缓存KEY
Object[] args = params.getRemoteParams();
int argsLength = args == null ? 0 : args.length;
String cacheKey = jacksonExpandProperties.getCachePrefix() + ":" + beanName + ":" + method + ":%s:" + paramsHandler.getCacheKey(bindData, args);
Object result = getCacheInfo(cacheKey);
if (result != null) {
LOG.info("{} Expand cache 命中: {}", beanName, result);
gen.writeObject(result);
return;
}
StampedLock lock = lockCache.get(cacheKey, new StampedLock(), Duration.ofSeconds(300));
// 写锁避免同一业务ID重复查询
long stamp = lock.writeLock();
Integer cacheTime = params.getCacheTime();
try {
// 多参数组装
Object[] objectParams = new Object[argsLength + 1];
objectParams[0] = paramsHandler.handleVal(bindData);
if(objectParams.length > 1){
System.arraycopy(args, 0, objectParams, 1, argsLength);
}
// 请求翻译结果
Object loadResult = ReflectUtil.invoke(loadService, method, objectParams);
if (loadResult != null) {
result = this.responseHandler.handle(this.beanName, method, loadResult, writeClass, objectParams);
cache.put(String.format(cacheKey, OK), result, Duration.ofSeconds(cacheTime));
} else {
LOG.error("【{}】 Expand失败,未找到:{}", beanName, bindData);
cache.put(String.format(cacheKey, FAIL), bindData, Duration.ofSeconds(cacheTime));
result = bindData;
}
} catch (Exception e) {
LOG.error("【{}】 Expand异常:", beanName, e);
cache.put(String.format(cacheKey, FAIL), bindData, Duration.ofSeconds(cacheTime));
result = bindData;
} finally {
lock.unlockWrite(stamp);
}
gen.writeObject(result);
}
/**
* 获取当前字段的path路径
*
* @param outputContext
* @return
*/
private String getFieldPath(JsonStreamContext outputContext) {
List<String> path = new ArrayList<>(4);
while (outputContext != null) {
String currentName = outputContext.getCurrentName();
if (StringUtils.hasText(currentName)) {
path.add(currentName);
}
outputContext = outputContext.getParent();
}
Collections.reverse(path);
return String.join(".", path);
}
/**
* 获取厍信息
*
* @param cacheKey 缓存的KEY
* @return
*/
private Object getCacheInfo(String cacheKey) {
Object result = cache.get(String.format(cacheKey, OK));
if (result == null) {
result = cache.get(String.format(cacheKey, FAIL));
}
return result;
}
@Override
public JsonSerializer<?> createContextual(SerializerProvider prov, BeanProperty property) throws JsonMappingException {
if (property != null) {
Expand load = property.getAnnotation(Expand.class);
if (load == null) {
throw new RuntimeException("未注解相关 @Expand 注解");
}
String bean = load.bean();
Class<? extends ParamsHandler> paramsHandlerClass = load.paramsHandler();
Class<? extends ResponseHandler> responseHandlerClass = load.responseHandler();
String method = load.method();
try {
ParamsHandler paramsHandler = paramsHandlerClass.getDeclaredConstructor().newInstance();
ResponseHandler responseHandler = responseHandlerClass.getDeclaredConstructor().newInstance();
int cacheTime = load.cacheTime();
// 额外参数处理
SerializerParam params = paramsHandler.handleAnnotation(property);
// 参数处理器没设置,且父注设置了,以父注解为主
if (params.getCacheTime() == null && cacheTime != -1) {
params.setCacheTime(cacheTime);
}
// 缓存时间未设置,取默认
if (params.getCacheTime() == null) {
params.setCacheTime(jacksonExpandProperties.getCacheTimeout());
}
if (params.isOpen() == null) {
params.setExpand(load.expand());
}
return new ExpandSerializer(bean, method, params, paramsHandler, responseHandler);
} catch (Exception e) {
LOG.error("@Expand error: ", e);
}
}
return prov.findNullValueSerializer(null);
}
/**
* 获取展开参数
*
* @param key
* @return
*/
private Set<String> getParam(String key) {
RequestAttributes attributes = RequestContextHolder.getRequestAttributes();
if (attributes == null) {
return Collections.emptySet();
}
ServletRequest request = ((ServletRequestAttributes) attributes).getRequest();
String[] parameterValues = request.getParameterValues(key);
if (parameterValues == null) {
return Collections.emptySet();
}
return Arrays.stream(parameterValues).flatMap(o -> Arrays.stream(o.split(",")))
.collect(Collectors.toSet());
}
}
| src/main/java/com/github/stupdit1t/jackson/expand/serializer/ExpandSerializer.java | stupdit1t-jackson-expand-boot-starter-b492b74 | [
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/handler/params/DefaultParamsHandler.java",
"retrieved_chunk": " if (expand != null) {\n if (StringUtils.hasText(expand.to())) {\n params.setWriteField(expand.to());\n }\n params.setExpand(expand.expand());\n }\n return params;\n }\n}",
"score": 34.29430111556448
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/handler/rsp/ResponseHandler.java",
"retrieved_chunk": "package com.github.stupdit1t.jackson.expand.handler.rsp;\n/**\n * 返回处理第三方响应\n */\npublic interface ResponseHandler {\n Object handle(String bean, String method, Object rsp, Class<?> writeClass, Object... params);\n}",
"score": 19.803149376168587
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/util/ReflectUtil.java",
"retrieved_chunk": " private static Class<?> unWrap(Class<?> clazz) {\n if (null == clazz || clazz.isPrimitive()) {\n return clazz;\n }\n Class<?> result = WRAPPER_PRIMITIVE_MAP.get(clazz);\n return (null == result) ? clazz : result;\n }\n}",
"score": 16.98634756139328
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/util/ReflectUtil.java",
"retrieved_chunk": " *\n * @param types1 类组1\n * @param types2 类组2\n * @return 是否相同、父类或接口\n */\n private static boolean isAllAssignableFrom(Class<?>[] types1, Class<?>[] types2) {\n if (ObjectUtils.isEmpty(types1) && ObjectUtils.isEmpty(types2)) {\n return true;\n }\n if (null == types1 || null == types2) {",
"score": 16.725667930003308
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/util/ReflectUtil.java",
"retrieved_chunk": " if (null == obj) {\n classes[i] = Object.class;\n } else {\n classes[i] = obj.getClass();\n }\n }\n return classes;\n }\n /**\n * 比较判断types1和types2两组类,如果types1中所有的类都与types2对应位置的类相同,或者是其父类或接口,则返回{@code true}",
"score": 15.384543391480292
}
] | java | (!jacksonExpandProperties.isCanExpandToNotExistField() && writeClass == null) { |
/*
* Copyright Strimzi authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.strimzi.kafka.bridge.mqtt.mapper;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Responsible for handling all the topic mapping using named placeholders instead of regular expressions.
*
* @see MappingRule
* @see MqttKafkaMapper
* @see MqttKafkaRegexMapper
*/
public class MqttKafkaSimpleMapper extends MqttKafkaMapper {
// find any word inside a curly bracket. E.g. {something}, this is known as a placeholder.
private static final String MQTT_TOPIC_PLACEHOLDER_REGEX = "\\{\\w+\\}";
// identifies a single level wildcard character in the mqtt pattern. E.g. sensors/+/data
private static final String MQTT_TOPIC_SINGLE_LEVEL_WILDCARD_CHARACTER = "+";
// Regex expression used to replace the + in the mqtt pattern.
private static final String SINGLE_LEVEL_WILDCARD_REGEX = "[^/]+";
// identifies a multi level wildcard character in the mqtt pattern. E.g. sensors/#
private static final String MQTT_TOPIC_MULTI_LEVEL_WILDCARD_CHARACTER = "#";
// used to replace the # in the mqtt pattern.
public static final String WILDCARD_REGEX = "(?:\\/.*)?$";
/**
* Constructor.
*
* @param rules the list of mapping rules.
*/
public MqttKafkaSimpleMapper(List<MappingRule> rules) {
super(rules, Pattern.compile(MQTT_TOPIC_PLACEHOLDER_REGEX));
}
@Override
public MappingResult map(String mqttTopic) {
for (MappingRule rule : this.rules) {
Matcher matcher = this.patterns.get(this.rules.indexOf(rule)).matcher(mqttTopic);
if (matcher.matches()) {
HashMap<String, String> placeholders = new HashMap<>();
String mappedKafkaTopic = rule.getKafkaTopicTemplate();
String kafkaKey = rule.getKafkaKeyTemplate();
// find MQTT_TOPIC_PLACEHOLDER_REGEX in the kafkaTopicTemplate.
Matcher placeholderMatcher = this.placeholderPattern.matcher(rule.getKafkaTopicTemplate());
while (placeholderMatcher.find()) {
String placeholderKey = placeholderMatcher.group();
placeholders.put(placeholderKey, null);
}
// find MQTT_TOPIC_PLACEHOLDER_REGEX in the kafkaKey
if (kafkaKey != null) {
placeholderMatcher = this.placeholderPattern.matcher(kafkaKey);
while (placeholderMatcher.find()) {
String placeholderKey = placeholderMatcher.group();
placeholders.put(placeholderKey, null);
}
}
if (!placeholders.isEmpty()) {
Matcher mqttTopicMatcher = this.placeholderPattern.matcher(rule.getMqttTopicPattern());
// find the placeholders in the mqtt topic pattern and assign them a value.
while (mqttTopicMatcher.find()) {
String placeholderKey = mqttTopicMatcher.group();
String placeholderValue = matcher.group(removeBrackets(placeholderKey));
placeholders.put(placeholderKey, placeholderValue);
}
// build the Kafka topic using the placeholders.
for (Map.Entry<String, String> entry : placeholders.entrySet()) {
if (entry.getValue() != null) {
mappedKafkaTopic = mappedKafkaTopic.replace(entry.getKey(), entry.getValue());
kafkaKey = kafkaKey != null ? kafkaKey.replace(entry.getKey(), entry.getValue()) : null;
} else {
throw new IllegalArgumentException("The placeholder " + entry.getKey() + " was not found assigned any value.");
}
}
}
return new MappingResult(mappedKafkaTopic, kafkaKey);
}
}
return new MappingResult(MqttKafkaMapper.DEFAULT_KAFKA_TOPIC, null);
}
@Override
protected void buildOrCompilePatterns() {
// convert the mqtt patterns to a valid regex expression.
// the mqtt pattern can contain placeholders like {something}, + and #.
// if the mqtt topic contains a +, we replace it with @singleLevelWildcardRegex
// if the mqtt topic contains a #, we replace it with @multiLevelWildcardRegex
// if the mqtt topic contains a placeholder (pattern \{\w+\}), we replace it with @placeholderRegex
String[] mqttTopicPatternParts;
StringBuilder ruleRegex;
for (MappingRule rule : this.rules) {
mqttTopicPatternParts | = rule.getMqttTopicPattern().split(MQTT_TOPIC_SEPARATOR); |
ruleRegex = new StringBuilder();
for (String part : mqttTopicPatternParts) {
if (part.matches(MQTT_TOPIC_PLACEHOLDER_REGEX)) {
ruleRegex.append(buildNamedRegexExpression(part));
} else if (part.equals(MQTT_TOPIC_SINGLE_LEVEL_WILDCARD_CHARACTER)) {
ruleRegex.append(SINGLE_LEVEL_WILDCARD_REGEX);
} else if (part.equals(MQTT_TOPIC_MULTI_LEVEL_WILDCARD_CHARACTER)) {
if (ruleRegex.length() > 1) {
ruleRegex.deleteCharAt(ruleRegex.length() - 1);
}
ruleRegex.append(WILDCARD_REGEX);
} else {
ruleRegex.append(part);
}
ruleRegex.append(MQTT_TOPIC_SEPARATOR);
}
// remove the last slash
ruleRegex.deleteCharAt(ruleRegex.length() - 1);
// compile the regex expression for the rule.
patterns.add(Pattern.compile(ruleRegex.toString()));
}
}
/**
* Helper method for building a named regex expression.
* A named regex expression is a regex expression that contains a named capturing group.
* E.g. (?<groupName>regexExpression)
*
* @param placeholder represents a placeholder in the mqtt pattern.
* @return a named regex expression.
*/
private String buildNamedRegexExpression(String placeholder) {
String groupName = removeBrackets(placeholder);
return "(?<" + groupName + ">[^/]+)";
}
/**
* Helper method for removing the curly brackets from a placeholder.
*
* @param placeholder represents a placeholder in the pattern.
* @return a placeholder without the curly brackets.
*/
private String removeBrackets(String placeholder) {
return placeholder.replaceAll("\\{+|\\}+", "");
}
}
| src/main/java/io/strimzi/kafka/bridge/mqtt/mapper/MqttKafkaSimpleMapper.java | strimzi-strimzi-mqtt-bridge-cb0a4b8 | [
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/mapper/MqttKafkaRegexMapper.java",
"retrieved_chunk": " }\n }\n return new MappingResult(MqttKafkaMapper.DEFAULT_KAFKA_TOPIC, null);\n }\n @Override\n protected void buildOrCompilePatterns() {\n this.rules.forEach(rule-> this.patterns.add(Pattern.compile(rule.getMqttTopicPattern())));\n }\n /**\n * Checks if there are any pending placeholders in the Kafka topic or Kafka key template.",
"score": 46.16202284243906
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/mapper/MappingResult.java",
"retrieved_chunk": "/*\n * Copyright Strimzi authors.\n * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).\n */\npackage io.strimzi.kafka.bridge.mqtt.mapper;\n/**\n * Represents the result of a mapping operation.\n * It contains the mapped Kafka topic and the Kafka key.\n *\n * @param kafkaTopic the mapped Kafka topic.",
"score": 44.730523946493555
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/mapper/MappingRule.java",
"retrieved_chunk": " * Get the mqtt topic pattern.\n *\n * @return the mqtt topic pattern.\n */\n public String getMqttTopicPattern() {\n return mqttTopicPattern;\n }\n /**\n * Get the record key template.\n *",
"score": 44.657893148837566
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/mapper/MqttKafkaRegexMapper.java",
"retrieved_chunk": " }\n @Override\n public MappingResult map(String mqttTopic) {\n for (MappingRule rule : this.rules) {\n Matcher matcher = this.patterns.get(this.rules.indexOf(rule)).matcher(mqttTopic);\n if (matcher.matches()) {\n String mappedKafkaTopic = rule.getKafkaTopicTemplate();\n String kafkaKey = rule.getKafkaKeyTemplate();\n for (int i = 1; i < matcher.groupCount() + 1; i++) {\n mappedKafkaTopic = mappedKafkaTopic.replace(\"$\" + i, matcher.group(i));",
"score": 43.12458655732666
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/mapper/MqttKafkaRegexMapper.java",
"retrieved_chunk": " kafkaKey = kafkaKey != null ? kafkaKey.replace(\"$\" + i, matcher.group(i)) : null;\n }\n // check for pending placeholders replacement in the Kafka topic\n checkPlaceholder(mappedKafkaTopic);\n if (kafkaKey != null) {\n // check for pending placeholders replacement in the Kafka key.\n checkPlaceholder(kafkaKey);\n }\n // return the first match\n return new MappingResult(mappedKafkaTopic, kafkaKey);",
"score": 41.79966054375263
}
] | java | = rule.getMqttTopicPattern().split(MQTT_TOPIC_SEPARATOR); |
package com.solodroid.ads.sdkdemo.activity;
import static com.solodroid.ads.sdk.util.Constant.ADMOB;
import static com.solodroid.ads.sdk.util.Constant.AD_STATUS_ON;
import static com.solodroid.ads.sdk.util.Constant.APPLOVIN;
import static com.solodroid.ads.sdk.util.Constant.APPLOVIN_MAX;
import static com.solodroid.ads.sdk.util.Constant.GOOGLE_AD_MANAGER;
import static com.solodroid.ads.sdk.util.Constant.WORTISE;
import android.app.Application;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import com.solodroid.ads.sdk.format.AdNetwork;
import com.solodroid.ads.sdk.format.AppOpenAd;
import com.solodroid.ads.sdkdemo.BuildConfig;
import com.solodroid.ads.sdkdemo.R;
import com.solodroid.ads.sdkdemo.application.MyApplication;
import com.solodroid.ads.sdkdemo.callback.CallbackConfig;
import com.solodroid.ads.sdkdemo.data.Constant;
import com.solodroid.ads.sdkdemo.database.SharedPref;
import com.solodroid.ads.sdkdemo.rest.RestAdapter;
import java.util.Arrays;
import java.util.List;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
@SuppressWarnings("ConstantConditions")
public class ActivitySplash extends AppCompatActivity {
private static final String TAG = "ActivitySplash";
Call<CallbackConfig> callbackConfigCall = null;
public static int DELAY_PROGRESS = 1500;
AdNetwork.Initialize adNetwork;
AppOpenAd.Builder appOpenAdBuilder;
SharedPref sharedPref;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_splash);
sharedPref = new SharedPref(this);
initAds();
if (Constant.AD_STATUS.equals(AD_STATUS_ON) && Constant.OPEN_ADS_ON_START) {
if (!Constant.FORCE_TO_SHOW_APP_OPEN_AD_ON_START) {
new Handler(Looper.getMainLooper()).postDelayed(() -> {
switch (Constant.AD_NETWORK) {
case ADMOB:
if (!Constant.ADMOB_APP_OPEN_AD_ID.equals("0")) {
((MyApplication) getApplication | ()).showAdIfAvailable(ActivitySplash.this, this::requestConfig); |
} else {
requestConfig();
}
break;
case GOOGLE_AD_MANAGER:
if (!Constant.GOOGLE_AD_MANAGER_APP_OPEN_AD_ID.equals("0")) {
((MyApplication) getApplication()).showAdIfAvailable(ActivitySplash.this, this::requestConfig);
} else {
requestConfig();
}
break;
case APPLOVIN:
case APPLOVIN_MAX:
if (!Constant.APPLOVIN_APP_OPEN_AP_ID.equals("0")) {
((MyApplication) getApplication()).showAdIfAvailable(ActivitySplash.this, this::requestConfig);
} else {
requestConfig();
}
break;
case WORTISE:
if (!Constant.WORTISE_APP_OPEN_AD_ID.equals("0")) {
((MyApplication) getApplication()).showAdIfAvailable(ActivitySplash.this, this::requestConfig);
} else {
requestConfig();
}
break;
default:
requestConfig();
break;
}
}, DELAY_PROGRESS);
} else {
requestConfig();
}
} else {
requestConfig();
}
}
private void requestConfig() {
requestAPI("https://raw.githubusercontent.com/solodroidev/content/uploads/json/android.json");
}
private void requestAPI(@SuppressWarnings("SameParameterValue") String url) {
if (url.startsWith("http://") || url.startsWith("https://")) {
if (url.contains("https://drive.google.com")) {
String driveUrl = url.replace("https://", "").replace("http://", "");
List<String> data = Arrays.asList(driveUrl.split("/"));
String googleDriveFileId = data.get(3);
callbackConfigCall = RestAdapter.createApi().getDriveJsonFileId(googleDriveFileId);
} else {
callbackConfigCall = RestAdapter.createApi().getJsonUrl(url);
}
} else {
callbackConfigCall = RestAdapter.createApi().getDriveJsonFileId(url);
}
callbackConfigCall.enqueue(new Callback<CallbackConfig>() {
public void onResponse(@NonNull Call<CallbackConfig> call, @NonNull Response<CallbackConfig> response) {
CallbackConfig resp = response.body();
if (resp != null) {
sharedPref.savePostList(resp.android);
loadOpenAds();
Log.d(TAG, "responses success");
} else {
loadOpenAds();
Log.d(TAG, "responses null");
}
}
public void onFailure(@NonNull Call<CallbackConfig> call, @NonNull Throwable th) {
Log.d(TAG, "responses failed: " + th.getMessage());
loadOpenAds();
}
});
}
private void initAds() {
adNetwork = new AdNetwork.Initialize(this)
.setAdStatus(Constant.AD_STATUS)
.setAdNetwork(Constant.AD_NETWORK)
.setBackupAdNetwork(Constant.BACKUP_AD_NETWORK)
.setAdMobAppId(null)
.setStartappAppId(Constant.STARTAPP_APP_ID)
.setUnityGameId(Constant.UNITY_GAME_ID)
.setAppLovinSdkKey(getResources().getString(R.string.applovin_sdk_key))
.setIronSourceAppKey(Constant.IRONSOURCE_APP_KEY)
.setWortiseAppId(Constant.WORTISE_APP_ID)
.setDebug(BuildConfig.DEBUG)
.build();
}
private void loadOpenAds() {
if (Constant.FORCE_TO_SHOW_APP_OPEN_AD_ON_START && Constant.OPEN_ADS_ON_START) {
appOpenAdBuilder = new AppOpenAd.Builder(this)
.setAdStatus(Constant.AD_STATUS)
.setAdNetwork(Constant.AD_NETWORK)
.setBackupAdNetwork(Constant.BACKUP_AD_NETWORK)
.setAdMobAppOpenId(Constant.ADMOB_APP_OPEN_AD_ID)
.setAdManagerAppOpenId(Constant.GOOGLE_AD_MANAGER_APP_OPEN_AD_ID)
.setApplovinAppOpenId(Constant.APPLOVIN_APP_OPEN_AP_ID)
.setWortiseAppOpenId(Constant.WORTISE_APP_OPEN_AD_ID)
.build(this::startMainActivity);
} else {
startMainActivity();
}
}
public void startMainActivity() {
new Handler().postDelayed(() -> {
Intent intent = new Intent(this, MainActivity.class);
startActivity(intent);
finish();
}, DELAY_PROGRESS);
}
}
| demo/src/main/java/com/solodroid/ads/sdkdemo/activity/ActivitySplash.java | solodroidx-multi-ads-sdk-4f23cdf | [
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/application/MyApplication.java",
"retrieved_chunk": " if (Constant.OPEN_ADS_ON_START) {\n if (Constant.AD_STATUS.equals(AD_STATUS_ON)) {\n switch (Constant.AD_NETWORK) {\n case ADMOB:\n if (!Constant.ADMOB_APP_OPEN_AD_ID.equals(\"0\")) {\n appOpenAdMob.showAdIfAvailable(activity, Constant.ADMOB_APP_OPEN_AD_ID, onShowAdCompleteListener);\n Constant.isAppOpen = true;\n }\n break;\n case GOOGLE_AD_MANAGER:",
"score": 73.78430867155669
},
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/application/MyApplication.java",
"retrieved_chunk": " public void onStart(@NonNull LifecycleOwner owner) {\n DefaultLifecycleObserver.super.onStart(owner);\n if (Constant.isAppOpen) {\n if (Constant.OPEN_ADS_ON_RESUME) {\n if (Constant.AD_STATUS.equals(AD_STATUS_ON)) {\n switch (Constant.AD_NETWORK) {\n case ADMOB:\n if (!Constant.ADMOB_APP_OPEN_AD_ID.equals(\"0\")) {\n if (!currentActivity.getIntent().hasExtra(\"unique_id\")) {\n appOpenAdMob.showAdIfAvailable(currentActivity, Constant.ADMOB_APP_OPEN_AD_ID);",
"score": 61.98214579761709
},
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/application/MyApplication.java",
"retrieved_chunk": " };\n ActivityLifecycleCallbacks activityLifecycleCallbacks = new ActivityLifecycleCallbacks() {\n @Override\n public void onActivityCreated(@NonNull Activity activity, @Nullable Bundle savedInstanceState) {\n }\n @Override\n public void onActivityStarted(@NonNull Activity activity) {\n if (Constant.OPEN_ADS_ON_START) {\n if (Constant.AD_STATUS.equals(AD_STATUS_ON)) {\n switch (Constant.AD_NETWORK) {",
"score": 48.56237521088075
},
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/application/MyApplication.java",
"retrieved_chunk": " case ADMOB:\n if (!Constant.ADMOB_APP_OPEN_AD_ID.equals(\"0\")) {\n if (!appOpenAdMob.isShowingAd) {\n currentActivity = activity;\n }\n }\n break;\n case GOOGLE_AD_MANAGER:\n if (!Constant.GOOGLE_AD_MANAGER_APP_OPEN_AD_ID.equals(\"0\")) {\n if (!appOpenAdManager.isShowingAd) {",
"score": 45.91009927748522
},
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/activity/MainActivity.java",
"retrieved_chunk": " private void loadOpenAds() {\n if (Constant.OPEN_ADS_ON_RESUME) {\n appOpenAdBuilder = new AppOpenAd.Builder(this)\n .setAdStatus(Constant.AD_STATUS)\n .setAdNetwork(Constant.AD_NETWORK)\n .setBackupAdNetwork(Constant.BACKUP_AD_NETWORK)\n .setAdMobAppOpenId(Constant.ADMOB_APP_OPEN_AD_ID)\n .setAdManagerAppOpenId(Constant.GOOGLE_AD_MANAGER_APP_OPEN_AD_ID)\n .setApplovinAppOpenId(Constant.APPLOVIN_APP_OPEN_AP_ID)\n .setWortiseAppOpenId(Constant.WORTISE_APP_OPEN_AD_ID)",
"score": 44.224422660079135
}
] | java | ()).showAdIfAvailable(ActivitySplash.this, this::requestConfig); |
package com.github.stupdit1t.jackson.expand.serializer;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonStreamContext;
import com.fasterxml.jackson.databind.BeanProperty;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.ser.ContextualSerializer;
import com.github.stupdit1t.jackson.expand.annotation.Expand;
import com.github.stupdit1t.jackson.expand.cache.ExpandCache;
import com.github.stupdit1t.jackson.expand.cache.LocalCache;
import com.github.stupdit1t.jackson.expand.config.JacksonExpandProperties;
import com.github.stupdit1t.jackson.expand.domain.ExpandStrategy;
import com.github.stupdit1t.jackson.expand.domain.SerializerParam;
import com.github.stupdit1t.jackson.expand.handler.params.ParamsHandler;
import com.github.stupdit1t.jackson.expand.handler.rsp.ResponseHandler;
import com.github.stupdit1t.jackson.expand.util.ReflectUtil;
import com.github.stupdit1t.jackson.expand.util.SpringUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.ReflectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.web.context.request.RequestAttributes;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import javax.servlet.ServletRequest;
import java.io.IOException;
import java.lang.reflect.Field;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.locks.StampedLock;
import java.util.stream.Collectors;
public class ExpandSerializer extends JsonSerializer<Object> implements ContextualSerializer {
private static final Logger LOG = LoggerFactory.getLogger(ExpandSerializer.class);
/**
* 成功数据
*/
public static final String OK = "OK";
/**
* 失败数据
*/
public static final String FAIL = "FAIL";
/**
* 缓存
*/
private static ExpandCache cache;
/**
* 配置
*/
private static JacksonExpandProperties jacksonExpandProperties;
/**
* 本地锁缓存,防止同时查询
*/
private static final LocalCache lockCache = new LocalCache();
/**
* 远程调用服务
*/
private Object loadService;
/**
* 方法
*/
private String method;
/**
* 注解参数处理
*/
private SerializerParam params;
/**
* 返回结果处理类
*/
private ParamsHandler paramsHandler;
/**
* 返回结果处理类
*/
private ResponseHandler responseHandler;
/**
* bean名称
*/
private String beanName;
public ExpandSerializer() {
super();
if (cache == null) {
synchronized (ExpandSerializer.class) {
if (cache == null) {
cache = SpringUtil.getBean(ExpandCache.class);
jacksonExpandProperties = SpringUtil.getBean(JacksonExpandProperties.class);
}
}
}
}
public ExpandSerializer(String beanName, String method, SerializerParam params, ParamsHandler paramsHandler, ResponseHandler otherResponseHandler) {
this();
this.loadService = SpringUtil.getBean(beanName);
this.method = method;
this.params = params;
this.responseHandler = otherResponseHandler;
this.paramsHandler = paramsHandler;
this.beanName = beanName;
}
@Override
public void serialize(Object bindData, JsonGenerator gen, SerializerProvider serializers) throws IOException {
String writeFieldPath = getFieldPath(gen.getOutputContext());
// 统一path替换
String dynamicExpandCommonPrefix = jacksonExpandProperties.getDynamicExpandCommonPrefix();
if (StringUtils.hasText(dynamicExpandCommonPrefix) && writeFieldPath.startsWith(dynamicExpandCommonPrefix)) {
writeFieldPath = writeFieldPath.substring(dynamicExpandCommonPrefix.length() + 1);
}
// 是否展开
boolean expand;
// 动态展开开启,判断是否展开
boolean dynamicExpand = jacksonExpandProperties.isDynamicExpand();
if (dynamicExpand) {
Set<String> needExpandField = getParam(jacksonExpandProperties.getDynamicExpandParameterName());
// 如果代码里设置不展开,动态展开也不生效
expand = needExpandField.contains(writeFieldPath) && params.isOpen();
} else {
expand = params.isOpen();
}
if (!expand) {
gen.writeObject(bindData);
return;
}
// 判断要写入的字段
String writeField = gen.getOutputContext().getCurrentName();
if (jacksonExpandProperties.getExpandStrategy() == ExpandStrategy.COVER) {
writeField = gen.getOutputContext().getCurrentName();
} else | if (jacksonExpandProperties.getExpandStrategy() == ExpandStrategy.COPY) { |
writeField = String.format(jacksonExpandProperties.getCopyStrategyFormat(), gen.getOutputContext().getCurrentName());
}
// 自定义要写入的优先级最高
if (StringUtils.hasText(params.getWriteField())) {
writeField = params.getWriteField();
}
// 设置理论上的响应类型,要不要使用取决于 ResponseHandler 要不要处理,比如只能写入数据对象存在的对象,默认是忽略存不存在
Class<?> writeClass = null;
if (params.getWriteField() != null && StringUtils.hasText(params.getWriteField())) {
Field field = ReflectionUtils.findField(gen.getCurrentValue().getClass(), params.getWriteField());
if (field != null) {
writeClass = field.getType();
}
}
// 关闭不存在字段扩展,被写入的字段类型找不到,不扩展
if (!jacksonExpandProperties.isCanExpandToNotExistField() && writeClass == null) {
gen.writeObject(bindData);
return;
}
// 翻译为非当前字段,先写入当前字段值再翻译
boolean currField = gen.getOutputContext().getCurrentName().equals(writeField);
if (!currField) {
gen.writeObject(bindData);
gen.writeFieldName(writeField);
}
if (bindData == null || loadService == null) {
gen.writeObject(bindData);
return;
}
// 获取缓存KEY
Object[] args = params.getRemoteParams();
int argsLength = args == null ? 0 : args.length;
String cacheKey = jacksonExpandProperties.getCachePrefix() + ":" + beanName + ":" + method + ":%s:" + paramsHandler.getCacheKey(bindData, args);
Object result = getCacheInfo(cacheKey);
if (result != null) {
LOG.info("{} Expand cache 命中: {}", beanName, result);
gen.writeObject(result);
return;
}
StampedLock lock = lockCache.get(cacheKey, new StampedLock(), Duration.ofSeconds(300));
// 写锁避免同一业务ID重复查询
long stamp = lock.writeLock();
Integer cacheTime = params.getCacheTime();
try {
// 多参数组装
Object[] objectParams = new Object[argsLength + 1];
objectParams[0] = paramsHandler.handleVal(bindData);
if(objectParams.length > 1){
System.arraycopy(args, 0, objectParams, 1, argsLength);
}
// 请求翻译结果
Object loadResult = ReflectUtil.invoke(loadService, method, objectParams);
if (loadResult != null) {
result = this.responseHandler.handle(this.beanName, method, loadResult, writeClass, objectParams);
cache.put(String.format(cacheKey, OK), result, Duration.ofSeconds(cacheTime));
} else {
LOG.error("【{}】 Expand失败,未找到:{}", beanName, bindData);
cache.put(String.format(cacheKey, FAIL), bindData, Duration.ofSeconds(cacheTime));
result = bindData;
}
} catch (Exception e) {
LOG.error("【{}】 Expand异常:", beanName, e);
cache.put(String.format(cacheKey, FAIL), bindData, Duration.ofSeconds(cacheTime));
result = bindData;
} finally {
lock.unlockWrite(stamp);
}
gen.writeObject(result);
}
/**
* 获取当前字段的path路径
*
* @param outputContext
* @return
*/
private String getFieldPath(JsonStreamContext outputContext) {
List<String> path = new ArrayList<>(4);
while (outputContext != null) {
String currentName = outputContext.getCurrentName();
if (StringUtils.hasText(currentName)) {
path.add(currentName);
}
outputContext = outputContext.getParent();
}
Collections.reverse(path);
return String.join(".", path);
}
/**
* 获取厍信息
*
* @param cacheKey 缓存的KEY
* @return
*/
private Object getCacheInfo(String cacheKey) {
Object result = cache.get(String.format(cacheKey, OK));
if (result == null) {
result = cache.get(String.format(cacheKey, FAIL));
}
return result;
}
@Override
public JsonSerializer<?> createContextual(SerializerProvider prov, BeanProperty property) throws JsonMappingException {
if (property != null) {
Expand load = property.getAnnotation(Expand.class);
if (load == null) {
throw new RuntimeException("未注解相关 @Expand 注解");
}
String bean = load.bean();
Class<? extends ParamsHandler> paramsHandlerClass = load.paramsHandler();
Class<? extends ResponseHandler> responseHandlerClass = load.responseHandler();
String method = load.method();
try {
ParamsHandler paramsHandler = paramsHandlerClass.getDeclaredConstructor().newInstance();
ResponseHandler responseHandler = responseHandlerClass.getDeclaredConstructor().newInstance();
int cacheTime = load.cacheTime();
// 额外参数处理
SerializerParam params = paramsHandler.handleAnnotation(property);
// 参数处理器没设置,且父注设置了,以父注解为主
if (params.getCacheTime() == null && cacheTime != -1) {
params.setCacheTime(cacheTime);
}
// 缓存时间未设置,取默认
if (params.getCacheTime() == null) {
params.setCacheTime(jacksonExpandProperties.getCacheTimeout());
}
if (params.isOpen() == null) {
params.setExpand(load.expand());
}
return new ExpandSerializer(bean, method, params, paramsHandler, responseHandler);
} catch (Exception e) {
LOG.error("@Expand error: ", e);
}
}
return prov.findNullValueSerializer(null);
}
/**
* 获取展开参数
*
* @param key
* @return
*/
private Set<String> getParam(String key) {
RequestAttributes attributes = RequestContextHolder.getRequestAttributes();
if (attributes == null) {
return Collections.emptySet();
}
ServletRequest request = ((ServletRequestAttributes) attributes).getRequest();
String[] parameterValues = request.getParameterValues(key);
if (parameterValues == null) {
return Collections.emptySet();
}
return Arrays.stream(parameterValues).flatMap(o -> Arrays.stream(o.split(",")))
.collect(Collectors.toSet());
}
}
| src/main/java/com/github/stupdit1t/jackson/expand/serializer/ExpandSerializer.java | stupdit1t-jackson-expand-boot-starter-b492b74 | [
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/config/JacksonExpandProperties.java",
"retrieved_chunk": " this.dynamicExpandParameterName = dynamicExpandParameterName;\n }\n public ExpandStrategy getExpandStrategy() {\n return expandStrategy;\n }\n public void setExpandStrategy(ExpandStrategy expandStrategy) {\n this.expandStrategy = expandStrategy;\n }\n public String getCopyStrategyFormat() {\n return copyStrategyFormat;",
"score": 17.940498976678352
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/config/JacksonExpandProperties.java",
"retrieved_chunk": " */\n private String dynamicExpandParameterName = \"expand\";\n /**\n * 动态展开 统一数据的Path前缀,比如前缀是 data.body. 如果配置 expand=userId, 相当于是expnad=data.body.userId, 默认无\n */\n private String dynamicExpandCommonPrefix;\n /**\n * 展开策略, 默认覆盖\n */\n private ExpandStrategy expandStrategy = ExpandStrategy.COVER;",
"score": 12.288188791219223
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/domain/ExpandStrategy.java",
"retrieved_chunk": "package com.github.stupdit1t.jackson.expand.domain;\n/**\n * 展开策略\n */\npublic enum ExpandStrategy {\n /**\n * 覆盖当前字段\n */\n COVER,\n /**",
"score": 12.223659472005032
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/util/ReflectUtil.java",
"retrieved_chunk": " type2 = types2[i];\n if (isBasicType(type1) && isBasicType(type2)) {\n // 原始类型和包装类型存在不一致情况\n if (unWrap(type1) != unWrap(type2)) {\n return false;\n }\n } else if (!type1.isAssignableFrom(type2)) {\n return false;\n }\n }",
"score": 12.179089271500724
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/config/JacksonExpandConfigure.java",
"retrieved_chunk": " }\n /**\n * jackson 配置\n *\n * @return\n */\n @Bean\n public JacksonExpandProperties jacksonExpandProperties() {\n return new JacksonExpandProperties();\n }",
"score": 12.011820233430601
}
] | java | if (jacksonExpandProperties.getExpandStrategy() == ExpandStrategy.COPY) { |
package com.solodroid.ads.sdkdemo.activity;
import android.os.Bundle;
import android.view.MenuItem;
import android.widget.Toast;
import androidx.appcompat.app.ActionBar;
import androidx.appcompat.app.AppCompatActivity;
import androidx.appcompat.widget.Toolbar;
import androidx.recyclerview.widget.DividerItemDecoration;
import androidx.recyclerview.widget.RecyclerView;
import androidx.recyclerview.widget.StaggeredGridLayoutManager;
import com.solodroid.ads.sdk.format.BannerAd;
import com.solodroid.ads.sdkdemo.R;
import com.solodroid.ads.sdkdemo.adapter.AdapterPost;
import com.solodroid.ads.sdkdemo.data.Constant;
import com.solodroid.ads.sdkdemo.database.SharedPref;
import com.solodroid.ads.sdkdemo.model.Post;
import java.util.ArrayList;
import java.util.List;
public class SecondActivity extends AppCompatActivity {
SharedPref sharedPref;
RecyclerView recyclerView;
AdapterPost adapterPost;
BannerAd.Builder bannerAd;
Toolbar toolbar;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
sharedPref = new SharedPref(this);
getAppTheme();
setContentView(R.layout.activity_second);
initView();
loadBannerAd();
initToolbar();
}
private void initView() {
recyclerView = findViewById(R.id.recyclerView);
recyclerView.setLayoutManager(new StaggeredGridLayoutManager(1, StaggeredGridLayoutManager.VERTICAL));
recyclerView.addItemDecoration(new DividerItemDecoration(this, DividerItemDecoration.VERTICAL));
adapterPost = new AdapterPost(this, new ArrayList<>());
recyclerView.setAdapter(adapterPost);
| displayData(sharedPref.getPostList()); |
}
private void initToolbar() {
toolbar = findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
final ActionBar actionBar = getSupportActionBar();
if (actionBar != null) {
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setHomeButtonEnabled(true);
getSupportActionBar().setTitle("Second Activity");
}
}
private void displayData(List<Post> posts) {
if (posts != null && posts.size() > 0) {
adapterPost.setListData(posts, posts.size());
adapterPost.setOnItemClickListener((view, obj, position) -> {
Toast.makeText(getApplicationContext(), "" + obj.name, Toast.LENGTH_SHORT).show();
});
}
}
@Override
public void onBackPressed() {
super.onBackPressed();
bannerAd.destroyAndDetachBanner();
}
private void loadBannerAd() {
bannerAd = new BannerAd.Builder(this)
.setAdStatus(Constant.AD_STATUS)
.setAdNetwork(Constant.AD_NETWORK)
.setBackupAdNetwork(Constant.BACKUP_AD_NETWORK)
.setAdMobBannerId(Constant.ADMOB_BANNER_ID)
.setGoogleAdManagerBannerId(Constant.GOOGLE_AD_MANAGER_BANNER_ID)
.setUnityBannerId(Constant.UNITY_BANNER_ID)
.setAppLovinBannerId(Constant.APPLOVIN_BANNER_ID)
.setAppLovinBannerZoneId(Constant.APPLOVIN_BANNER_ZONE_ID)
.setIronSourceBannerId(Constant.IRONSOURCE_BANNER_ID)
.setWortiseBannerId(Constant.WORTISE_BANNER_ID)
.setDarkTheme(false)
.build();
}
@Override
public boolean onOptionsItemSelected(MenuItem menuItem) {
if (menuItem.getItemId() == android.R.id.home) {
onBackPressed();
return true;
}
return super.onOptionsItemSelected(menuItem);
}
public void getAppTheme() {
if (sharedPref.getIsDarkTheme()) {
setTheme(R.style.AppDarkTheme);
} else {
setTheme(R.style.AppTheme);
}
}
}
| demo/src/main/java/com/solodroid/ads/sdkdemo/activity/SecondActivity.java | solodroidx-multi-ads-sdk-4f23cdf | [
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/adapter/AdapterPost.java",
"retrieved_chunk": " this.onItemClickListener = mItemClickListener;\n }\n public AdapterPost(Context context, List<Post> posts) {\n this.posts = posts;\n this.context = context;\n this.sharedPref = new SharedPref(context);\n }\n public static class OriginalViewHolder extends RecyclerView.ViewHolder {\n public TextView name;\n public ImageView image;",
"score": 20.374058809915915
},
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/activity/MainActivity.java",
"retrieved_chunk": " toolbar = findViewById(R.id.toolbar);\n setSupportActionBar(toolbar);\n bannerAdView = findViewById(R.id.banner_ad_view);\n bannerAdView.addView(View.inflate(this, R.layout.view_banner_ad, null));\n initAds();\n loadOpenAds();\n loadBannerAd();\n loadInterstitialAd();\n loadRewardedAd();\n nativeAdViewContainer = findViewById(R.id.native_ad);",
"score": 18.872481813595552
},
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/activity/MainActivity.java",
"retrieved_chunk": " appOpenAdBuilder.show();\n }\n }\n }, 100);\n }\n };\n private void loadBannerAd() {\n bannerAd = new BannerAd.Builder(this)\n .setAdStatus(Constant.AD_STATUS)\n .setAdNetwork(Constant.AD_NETWORK)",
"score": 18.735381834768486
},
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/activity/MainActivity.java",
"retrieved_chunk": " }\n private void switchAppTheme() {\n switchMaterial = findViewById(R.id.switch_theme);\n switchMaterial.setChecked(sharedPref.getIsDarkTheme());\n switchMaterial.setOnCheckedChangeListener((buttonView, isChecked) -> {\n sharedPref.setIsDarkTheme(isChecked);\n recreate();\n });\n }\n private void showAdChooser() {",
"score": 17.75158135834866
},
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/activity/MainActivity.java",
"retrieved_chunk": " btnSelectAds = findViewById(R.id.btn_select_ads);\n btnSelectAds.setOnClickListener(v -> showAdChooser());\n btnNativeAdStyle = findViewById(R.id.btn_native_ad_style);\n btnNativeAdStyle.setOnClickListener(v -> changeNativeAdStyle());\n switchAppTheme();\n }\n private void initAds() {\n adNetwork = new AdNetwork.Initialize(this)\n .setAdStatus(Constant.AD_STATUS)\n .setAdNetwork(Constant.AD_NETWORK)",
"score": 17.66500092780124
}
] | java | displayData(sharedPref.getPostList()); |
package com.github.stupdit1t.jackson.expand.serializer;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonStreamContext;
import com.fasterxml.jackson.databind.BeanProperty;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.ser.ContextualSerializer;
import com.github.stupdit1t.jackson.expand.annotation.Expand;
import com.github.stupdit1t.jackson.expand.cache.ExpandCache;
import com.github.stupdit1t.jackson.expand.cache.LocalCache;
import com.github.stupdit1t.jackson.expand.config.JacksonExpandProperties;
import com.github.stupdit1t.jackson.expand.domain.ExpandStrategy;
import com.github.stupdit1t.jackson.expand.domain.SerializerParam;
import com.github.stupdit1t.jackson.expand.handler.params.ParamsHandler;
import com.github.stupdit1t.jackson.expand.handler.rsp.ResponseHandler;
import com.github.stupdit1t.jackson.expand.util.ReflectUtil;
import com.github.stupdit1t.jackson.expand.util.SpringUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.ReflectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.web.context.request.RequestAttributes;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import javax.servlet.ServletRequest;
import java.io.IOException;
import java.lang.reflect.Field;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.locks.StampedLock;
import java.util.stream.Collectors;
public class ExpandSerializer extends JsonSerializer<Object> implements ContextualSerializer {
private static final Logger LOG = LoggerFactory.getLogger(ExpandSerializer.class);
/**
* 成功数据
*/
public static final String OK = "OK";
/**
* 失败数据
*/
public static final String FAIL = "FAIL";
/**
* 缓存
*/
private static ExpandCache cache;
/**
* 配置
*/
private static JacksonExpandProperties jacksonExpandProperties;
/**
* 本地锁缓存,防止同时查询
*/
private static final LocalCache lockCache = new LocalCache();
/**
* 远程调用服务
*/
private Object loadService;
/**
* 方法
*/
private String method;
/**
* 注解参数处理
*/
private SerializerParam params;
/**
* 返回结果处理类
*/
private ParamsHandler paramsHandler;
/**
* 返回结果处理类
*/
private ResponseHandler responseHandler;
/**
* bean名称
*/
private String beanName;
public ExpandSerializer() {
super();
if (cache == null) {
synchronized (ExpandSerializer.class) {
if (cache == null) {
cache = SpringUtil.getBean(ExpandCache.class);
jacksonExpandProperties = SpringUtil.getBean(JacksonExpandProperties.class);
}
}
}
}
public ExpandSerializer(String beanName, String method, SerializerParam params, ParamsHandler paramsHandler, ResponseHandler otherResponseHandler) {
this();
this.loadService = SpringUtil.getBean(beanName);
this.method = method;
this.params = params;
this.responseHandler = otherResponseHandler;
this.paramsHandler = paramsHandler;
this.beanName = beanName;
}
@Override
public void serialize(Object bindData, JsonGenerator gen, SerializerProvider serializers) throws IOException {
String writeFieldPath = getFieldPath(gen.getOutputContext());
// 统一path替换
String dynamicExpandCommonPrefix = jacksonExpandProperties.getDynamicExpandCommonPrefix();
if (StringUtils.hasText(dynamicExpandCommonPrefix) && writeFieldPath.startsWith(dynamicExpandCommonPrefix)) {
writeFieldPath = writeFieldPath.substring(dynamicExpandCommonPrefix.length() + 1);
}
// 是否展开
boolean expand;
// 动态展开开启,判断是否展开
boolean dynamicExpand = jacksonExpandProperties.isDynamicExpand();
if (dynamicExpand) {
Set<String> needExpandField = getParam(jacksonExpandProperties.getDynamicExpandParameterName());
// 如果代码里设置不展开,动态展开也不生效
expand = needExpandField.contains(writeFieldPath) && params.isOpen();
} else {
expand = params.isOpen();
}
if (!expand) {
gen.writeObject(bindData);
return;
}
// 判断要写入的字段
String writeField = gen.getOutputContext().getCurrentName();
if (jacksonExpandProperties.getExpandStrategy() == ExpandStrategy.COVER) {
writeField = gen.getOutputContext().getCurrentName();
} else if (jacksonExpandProperties.getExpandStrategy() == ExpandStrategy.COPY) {
writeField = String.format(jacksonExpandProperties.getCopyStrategyFormat(), gen.getOutputContext().getCurrentName());
}
// 自定义要写入的优先级最高
if (StringUtils.hasText(params.getWriteField())) {
writeField = params.getWriteField();
}
// 设置理论上的响应类型,要不要使用取决于 ResponseHandler 要不要处理,比如只能写入数据对象存在的对象,默认是忽略存不存在
Class<?> writeClass = null;
if (params.getWriteField() != null && StringUtils.hasText(params.getWriteField())) {
Field field = ReflectionUtils.findField(gen.getCurrentValue().getClass(), params.getWriteField());
if (field != null) {
writeClass = field.getType();
}
}
// 关闭不存在字段扩展,被写入的字段类型找不到,不扩展
if (!jacksonExpandProperties.isCanExpandToNotExistField() && writeClass == null) {
gen.writeObject(bindData);
return;
}
// 翻译为非当前字段,先写入当前字段值再翻译
boolean currField = gen.getOutputContext().getCurrentName().equals(writeField);
if (!currField) {
gen.writeObject(bindData);
gen.writeFieldName(writeField);
}
if (bindData == null || loadService == null) {
gen.writeObject(bindData);
return;
}
// 获取缓存KEY
Object[] args = params.getRemoteParams();
int argsLength = args == null ? 0 : args.length;
String cacheKey = jacksonExpandProperties.getCachePrefix() + ":" + beanName + ":" + method + ":%s:" + paramsHandler.getCacheKey(bindData, args);
Object result = getCacheInfo(cacheKey);
if (result != null) {
LOG.info("{} Expand cache 命中: {}", beanName, result);
gen.writeObject(result);
return;
}
StampedLock lock = lockCache.get(cacheKey, new StampedLock(), Duration.ofSeconds(300));
// 写锁避免同一业务ID重复查询
long stamp = lock.writeLock();
Integer cacheTime = params.getCacheTime();
try {
// 多参数组装
Object[] objectParams = new Object[argsLength + 1];
objectParams[0] = paramsHandler.handleVal(bindData);
if(objectParams.length > 1){
System.arraycopy(args, 0, objectParams, 1, argsLength);
}
// 请求翻译结果
Object loadResult = ReflectUtil.invoke(loadService, method, objectParams);
if (loadResult != null) {
result = this.responseHandler.handle(this.beanName, method, loadResult, writeClass, objectParams);
cache.put(String.format(cacheKey, OK), result, Duration.ofSeconds(cacheTime));
} else {
LOG.error("【{}】 Expand失败,未找到:{}", beanName, bindData);
cache.put(String.format(cacheKey, FAIL), bindData, Duration.ofSeconds(cacheTime));
result = bindData;
}
} catch (Exception e) {
LOG.error("【{}】 Expand异常:", beanName, e);
cache.put(String.format(cacheKey, FAIL), bindData, Duration.ofSeconds(cacheTime));
result = bindData;
} finally {
lock.unlockWrite(stamp);
}
gen.writeObject(result);
}
/**
* 获取当前字段的path路径
*
* @param outputContext
* @return
*/
private String getFieldPath(JsonStreamContext outputContext) {
List<String> path = new ArrayList<>(4);
while (outputContext != null) {
String currentName = outputContext.getCurrentName();
if (StringUtils.hasText(currentName)) {
path.add(currentName);
}
outputContext = outputContext.getParent();
}
Collections.reverse(path);
return String.join(".", path);
}
/**
* 获取厍信息
*
* @param cacheKey 缓存的KEY
* @return
*/
private Object getCacheInfo(String cacheKey) {
Object result = cache.get(String.format(cacheKey, OK));
if (result == null) {
result = cache.get(String.format(cacheKey, FAIL));
}
return result;
}
@Override
public JsonSerializer<?> createContextual(SerializerProvider prov, BeanProperty property) throws JsonMappingException {
if (property != null) {
Expand load = property.getAnnotation(Expand.class);
if (load == null) {
throw new RuntimeException("未注解相关 @Expand 注解");
}
String bean = load.bean();
Class<? extends ParamsHandler> paramsHandlerClass = load.paramsHandler();
Class<? extends ResponseHandler> responseHandlerClass = load.responseHandler();
String method = load.method();
try {
ParamsHandler paramsHandler = paramsHandlerClass.getDeclaredConstructor().newInstance();
ResponseHandler responseHandler = responseHandlerClass.getDeclaredConstructor().newInstance();
int cacheTime = load.cacheTime();
// 额外参数处理
SerializerParam params = paramsHandler.handleAnnotation(property);
// 参数处理器没设置,且父注设置了,以父注解为主
if (params.getCacheTime() == null && cacheTime != -1) {
params.setCacheTime(cacheTime);
}
// 缓存时间未设置,取默认
if (params.getCacheTime() == null) {
params. | setCacheTime(jacksonExpandProperties.getCacheTimeout()); |
}
if (params.isOpen() == null) {
params.setExpand(load.expand());
}
return new ExpandSerializer(bean, method, params, paramsHandler, responseHandler);
} catch (Exception e) {
LOG.error("@Expand error: ", e);
}
}
return prov.findNullValueSerializer(null);
}
/**
* 获取展开参数
*
* @param key
* @return
*/
private Set<String> getParam(String key) {
RequestAttributes attributes = RequestContextHolder.getRequestAttributes();
if (attributes == null) {
return Collections.emptySet();
}
ServletRequest request = ((ServletRequestAttributes) attributes).getRequest();
String[] parameterValues = request.getParameterValues(key);
if (parameterValues == null) {
return Collections.emptySet();
}
return Arrays.stream(parameterValues).flatMap(o -> Arrays.stream(o.split(",")))
.collect(Collectors.toSet());
}
}
| src/main/java/com/github/stupdit1t/jackson/expand/serializer/ExpandSerializer.java | stupdit1t-jackson-expand-boot-starter-b492b74 | [
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/handler/params/DefaultParamsHandler.java",
"retrieved_chunk": " if (expand != null) {\n if (StringUtils.hasText(expand.to())) {\n params.setWriteField(expand.to());\n }\n params.setExpand(expand.expand());\n }\n return params;\n }\n}",
"score": 27.746931444960097
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/domain/SerializerParam.java",
"retrieved_chunk": " public void setWriteField(String writeField) {\n this.writeField = writeField;\n }\n public Integer getCacheTime() {\n return cacheTime;\n }\n public void setCacheTime(Integer cacheTime) {\n this.cacheTime = cacheTime;\n }\n public Boolean isOpen() {",
"score": 26.461282176128456
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/handler/params/DefaultParamsHandler.java",
"retrieved_chunk": "public class DefaultParamsHandler implements ParamsHandler {\n @Override\n public Object handleVal(Object val) {\n return val;\n }\n @Override\n public SerializerParam handleAnnotation(BeanProperty property) {\n SerializerParam params = new SerializerParam();\n // 用户注解值处理\n Expand expand = property.getAnnotation(Expand.class);",
"score": 20.761936478296505
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/handler/rsp/DefaultResponseHandler.java",
"retrieved_chunk": " * @param rsp 当前返回值\n * @param toClass 要填充字段的类型\n * @param params 当前方法参数\n * @return\n */\n @Override\n public Object handle(String bean, String method, Object rsp, Class<?> toClass, Object... params) {\n return rsp;\n }\n}",
"score": 15.151975334482179
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/annotation/Expand.java",
"retrieved_chunk": " *\n * @return\n */\n int cacheTime() default -1;\n /**\n * 是否要展开\n *\n * @return\n */\n boolean expand() default true;",
"score": 13.0661912661331
}
] | java | setCacheTime(jacksonExpandProperties.getCacheTimeout()); |
package com.solodroid.ads.sdkdemo.activity;
import static com.solodroid.ads.sdk.util.Constant.ADMOB;
import static com.solodroid.ads.sdk.util.Constant.AD_STATUS_ON;
import static com.solodroid.ads.sdk.util.Constant.APPLOVIN;
import static com.solodroid.ads.sdk.util.Constant.APPLOVIN_MAX;
import static com.solodroid.ads.sdk.util.Constant.GOOGLE_AD_MANAGER;
import static com.solodroid.ads.sdk.util.Constant.WORTISE;
import android.app.Application;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import com.solodroid.ads.sdk.format.AdNetwork;
import com.solodroid.ads.sdk.format.AppOpenAd;
import com.solodroid.ads.sdkdemo.BuildConfig;
import com.solodroid.ads.sdkdemo.R;
import com.solodroid.ads.sdkdemo.application.MyApplication;
import com.solodroid.ads.sdkdemo.callback.CallbackConfig;
import com.solodroid.ads.sdkdemo.data.Constant;
import com.solodroid.ads.sdkdemo.database.SharedPref;
import com.solodroid.ads.sdkdemo.rest.RestAdapter;
import java.util.Arrays;
import java.util.List;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
@SuppressWarnings("ConstantConditions")
public class ActivitySplash extends AppCompatActivity {
private static final String TAG = "ActivitySplash";
Call<CallbackConfig> callbackConfigCall = null;
public static int DELAY_PROGRESS = 1500;
AdNetwork.Initialize adNetwork;
AppOpenAd.Builder appOpenAdBuilder;
SharedPref sharedPref;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_splash);
sharedPref = new SharedPref(this);
initAds();
if (Constant.AD_STATUS.equals(AD_STATUS_ON) && Constant.OPEN_ADS_ON_START) {
if (!Constant.FORCE_TO_SHOW_APP_OPEN_AD_ON_START) {
new Handler(Looper.getMainLooper()).postDelayed(() -> {
switch (Constant.AD_NETWORK) {
case ADMOB:
if (!Constant.ADMOB_APP_OPEN_AD_ID.equals("0")) {
((MyApplication) getApplication()).showAdIfAvailable(ActivitySplash.this, this::requestConfig);
} else {
requestConfig();
}
break;
case GOOGLE_AD_MANAGER:
if (!Constant.GOOGLE_AD_MANAGER_APP_OPEN_AD_ID.equals("0")) {
((MyApplication) getApplication()).showAdIfAvailable(ActivitySplash.this, this::requestConfig);
} else {
requestConfig();
}
break;
case APPLOVIN:
case APPLOVIN_MAX:
if (!Constant.APPLOVIN_APP_OPEN_AP_ID.equals("0")) {
((MyApplication) getApplication()).showAdIfAvailable(ActivitySplash.this, this::requestConfig);
} else {
requestConfig();
}
break;
case WORTISE:
if (!Constant.WORTISE_APP_OPEN_AD_ID.equals("0")) {
((MyApplication) getApplication()).showAdIfAvailable(ActivitySplash.this, this::requestConfig);
} else {
requestConfig();
}
break;
default:
requestConfig();
break;
}
}, DELAY_PROGRESS);
} else {
requestConfig();
}
} else {
requestConfig();
}
}
private void requestConfig() {
requestAPI("https://raw.githubusercontent.com/solodroidev/content/uploads/json/android.json");
}
private void requestAPI(@SuppressWarnings("SameParameterValue") String url) {
if (url.startsWith("http://") || url.startsWith("https://")) {
if (url.contains("https://drive.google.com")) {
String driveUrl = url.replace("https://", "").replace("http://", "");
List<String> data = Arrays.asList(driveUrl.split("/"));
String googleDriveFileId = data.get(3);
callbackConfigCall | = RestAdapter.createApi().getDriveJsonFileId(googleDriveFileId); |
} else {
callbackConfigCall = RestAdapter.createApi().getJsonUrl(url);
}
} else {
callbackConfigCall = RestAdapter.createApi().getDriveJsonFileId(url);
}
callbackConfigCall.enqueue(new Callback<CallbackConfig>() {
public void onResponse(@NonNull Call<CallbackConfig> call, @NonNull Response<CallbackConfig> response) {
CallbackConfig resp = response.body();
if (resp != null) {
sharedPref.savePostList(resp.android);
loadOpenAds();
Log.d(TAG, "responses success");
} else {
loadOpenAds();
Log.d(TAG, "responses null");
}
}
public void onFailure(@NonNull Call<CallbackConfig> call, @NonNull Throwable th) {
Log.d(TAG, "responses failed: " + th.getMessage());
loadOpenAds();
}
});
}
private void initAds() {
adNetwork = new AdNetwork.Initialize(this)
.setAdStatus(Constant.AD_STATUS)
.setAdNetwork(Constant.AD_NETWORK)
.setBackupAdNetwork(Constant.BACKUP_AD_NETWORK)
.setAdMobAppId(null)
.setStartappAppId(Constant.STARTAPP_APP_ID)
.setUnityGameId(Constant.UNITY_GAME_ID)
.setAppLovinSdkKey(getResources().getString(R.string.applovin_sdk_key))
.setIronSourceAppKey(Constant.IRONSOURCE_APP_KEY)
.setWortiseAppId(Constant.WORTISE_APP_ID)
.setDebug(BuildConfig.DEBUG)
.build();
}
private void loadOpenAds() {
if (Constant.FORCE_TO_SHOW_APP_OPEN_AD_ON_START && Constant.OPEN_ADS_ON_START) {
appOpenAdBuilder = new AppOpenAd.Builder(this)
.setAdStatus(Constant.AD_STATUS)
.setAdNetwork(Constant.AD_NETWORK)
.setBackupAdNetwork(Constant.BACKUP_AD_NETWORK)
.setAdMobAppOpenId(Constant.ADMOB_APP_OPEN_AD_ID)
.setAdManagerAppOpenId(Constant.GOOGLE_AD_MANAGER_APP_OPEN_AD_ID)
.setApplovinAppOpenId(Constant.APPLOVIN_APP_OPEN_AP_ID)
.setWortiseAppOpenId(Constant.WORTISE_APP_OPEN_AD_ID)
.build(this::startMainActivity);
} else {
startMainActivity();
}
}
public void startMainActivity() {
new Handler().postDelayed(() -> {
Intent intent = new Intent(this, MainActivity.class);
startActivity(intent);
finish();
}, DELAY_PROGRESS);
}
}
| demo/src/main/java/com/solodroid/ads/sdkdemo/activity/ActivitySplash.java | solodroidx-multi-ads-sdk-4f23cdf | [
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/rest/ApiInterface.java",
"retrieved_chunk": " @Headers({CACHE, AGENT})\n @GET\n Call<CallbackConfig> getJsonUrl(\n @Url String url\n );\n @Headers({CACHE, AGENT})\n @GET(\"uc?export=download\")\n Call<CallbackConfig> getDriveJsonFileId(\n @Query(\"id\") String id\n );",
"score": 57.40181763891534
},
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/rest/RestAdapter.java",
"retrieved_chunk": " OkHttpClient okHttpClient = new OkHttpClient.Builder()\n .connectTimeout(5, TimeUnit.SECONDS)\n .writeTimeout(10, TimeUnit.SECONDS)\n .readTimeout(30, TimeUnit.SECONDS)\n .cache(null)\n .build();\n Retrofit retrofit = new Retrofit.Builder()\n .baseUrl(\"https://drive.google.com/\")\n .addConverterFactory(GsonConverterFactory.create())\n .client(okHttpClient)",
"score": 37.68392964982566
},
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/rest/ApiInterface.java",
"retrieved_chunk": "package com.solodroid.ads.sdkdemo.rest;\nimport com.solodroid.ads.sdkdemo.callback.CallbackConfig;\nimport retrofit2.Call;\nimport retrofit2.http.GET;\nimport retrofit2.http.Headers;\nimport retrofit2.http.Query;\nimport retrofit2.http.Url;\npublic interface ApiInterface {\n String CACHE = \"Cache-Control: max-age=0\";\n String AGENT = \"Data-Agent: Solodroid\";",
"score": 34.49149524074026
},
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/database/SharedPref.java",
"retrieved_chunk": " Gson gson = new Gson();\n String json = gson.toJson(posts);\n editor.putString(KEY_POSTS, json);\n editor.apply();\n }\n public List<Post> getPostList() {\n Gson gson = new Gson();\n String json = sharedPreferences.getString(KEY_POSTS, null);\n Type type = new TypeToken<ArrayList<Post>>() {\n }.getType();",
"score": 34.1194398365482
},
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/activity/MainActivity.java",
"retrieved_chunk": "import static com.solodroid.ads.sdkdemo.data.Constant.STYLE_DEFAULT;\nimport static com.solodroid.ads.sdkdemo.data.Constant.STYLE_NEWS;\nimport static com.solodroid.ads.sdkdemo.data.Constant.STYLE_RADIO;\nimport static com.solodroid.ads.sdkdemo.data.Constant.STYLE_VIDEO_LARGE;\nimport static com.solodroid.ads.sdkdemo.data.Constant.STYLE_VIDEO_SMALL;\nimport android.content.Intent;\nimport android.os.Bundle;\nimport android.os.Handler;\nimport android.os.Looper;\nimport android.util.Log;",
"score": 27.816933632394196
}
] | java | = RestAdapter.createApi().getDriveJsonFileId(googleDriveFileId); |
package com.github.stupdit1t.jackson.expand.serializer;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonStreamContext;
import com.fasterxml.jackson.databind.BeanProperty;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.ser.ContextualSerializer;
import com.github.stupdit1t.jackson.expand.annotation.Expand;
import com.github.stupdit1t.jackson.expand.cache.ExpandCache;
import com.github.stupdit1t.jackson.expand.cache.LocalCache;
import com.github.stupdit1t.jackson.expand.config.JacksonExpandProperties;
import com.github.stupdit1t.jackson.expand.domain.ExpandStrategy;
import com.github.stupdit1t.jackson.expand.domain.SerializerParam;
import com.github.stupdit1t.jackson.expand.handler.params.ParamsHandler;
import com.github.stupdit1t.jackson.expand.handler.rsp.ResponseHandler;
import com.github.stupdit1t.jackson.expand.util.ReflectUtil;
import com.github.stupdit1t.jackson.expand.util.SpringUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.ReflectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.web.context.request.RequestAttributes;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import javax.servlet.ServletRequest;
import java.io.IOException;
import java.lang.reflect.Field;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.locks.StampedLock;
import java.util.stream.Collectors;
public class ExpandSerializer extends JsonSerializer<Object> implements ContextualSerializer {
private static final Logger LOG = LoggerFactory.getLogger(ExpandSerializer.class);
/**
* 成功数据
*/
public static final String OK = "OK";
/**
* 失败数据
*/
public static final String FAIL = "FAIL";
/**
* 缓存
*/
private static ExpandCache cache;
/**
* 配置
*/
private static JacksonExpandProperties jacksonExpandProperties;
/**
* 本地锁缓存,防止同时查询
*/
private static final LocalCache lockCache = new LocalCache();
/**
* 远程调用服务
*/
private Object loadService;
/**
* 方法
*/
private String method;
/**
* 注解参数处理
*/
private SerializerParam params;
/**
* 返回结果处理类
*/
private ParamsHandler paramsHandler;
/**
* 返回结果处理类
*/
private ResponseHandler responseHandler;
/**
* bean名称
*/
private String beanName;
public ExpandSerializer() {
super();
if (cache == null) {
synchronized (ExpandSerializer.class) {
if (cache == null) {
cache = SpringUtil.getBean(ExpandCache.class);
jacksonExpandProperties = SpringUtil.getBean(JacksonExpandProperties.class);
}
}
}
}
public ExpandSerializer(String beanName, String method, SerializerParam params, ParamsHandler paramsHandler, ResponseHandler otherResponseHandler) {
this();
this.loadService = SpringUtil.getBean(beanName);
this.method = method;
this.params = params;
this.responseHandler = otherResponseHandler;
this.paramsHandler = paramsHandler;
this.beanName = beanName;
}
@Override
public void serialize(Object bindData, JsonGenerator gen, SerializerProvider serializers) throws IOException {
String writeFieldPath = getFieldPath(gen.getOutputContext());
// 统一path替换
String dynamicExpandCommonPrefix = jacksonExpandProperties.getDynamicExpandCommonPrefix();
if (StringUtils.hasText(dynamicExpandCommonPrefix) && writeFieldPath.startsWith(dynamicExpandCommonPrefix)) {
writeFieldPath = writeFieldPath.substring(dynamicExpandCommonPrefix.length() + 1);
}
// 是否展开
boolean expand;
// 动态展开开启,判断是否展开
boolean dynamicExpand = jacksonExpandProperties.isDynamicExpand();
if (dynamicExpand) {
Set<String> needExpandField = getParam(jacksonExpandProperties.getDynamicExpandParameterName());
// 如果代码里设置不展开,动态展开也不生效
expand = needExpandField.contains(writeFieldPath) && params.isOpen();
} else {
expand = params.isOpen();
}
if (!expand) {
gen.writeObject(bindData);
return;
}
// 判断要写入的字段
String writeField = gen.getOutputContext().getCurrentName();
if (jacksonExpandProperties.getExpandStrategy() == ExpandStrategy.COVER) {
writeField = gen.getOutputContext().getCurrentName();
} else if (jacksonExpandProperties.getExpandStrategy() == ExpandStrategy.COPY) {
writeField = String.format(jacksonExpandProperties.getCopyStrategyFormat(), gen.getOutputContext().getCurrentName());
}
// 自定义要写入的优先级最高
if (StringUtils.hasText(params.getWriteField())) {
writeField = params.getWriteField();
}
// 设置理论上的响应类型,要不要使用取决于 ResponseHandler 要不要处理,比如只能写入数据对象存在的对象,默认是忽略存不存在
Class<?> writeClass = null;
if (params.getWriteField() != null && StringUtils.hasText(params.getWriteField())) {
Field field = ReflectionUtils.findField(gen.getCurrentValue().getClass(), params.getWriteField());
if (field != null) {
writeClass = field.getType();
}
}
// 关闭不存在字段扩展,被写入的字段类型找不到,不扩展
if (!jacksonExpandProperties.isCanExpandToNotExistField() && writeClass == null) {
gen.writeObject(bindData);
return;
}
// 翻译为非当前字段,先写入当前字段值再翻译
boolean currField = gen.getOutputContext().getCurrentName().equals(writeField);
if (!currField) {
gen.writeObject(bindData);
gen.writeFieldName(writeField);
}
if (bindData == null || loadService == null) {
gen.writeObject(bindData);
return;
}
// 获取缓存KEY
Object[] args = params.getRemoteParams();
int argsLength = args == null ? 0 : args.length;
String cacheKey = jacksonExpandProperties.getCachePrefix() + ":" + beanName + ":" + method + ":%s:" + paramsHandler.getCacheKey(bindData, args);
Object result = getCacheInfo(cacheKey);
if (result != null) {
LOG.info("{} Expand cache 命中: {}", beanName, result);
gen.writeObject(result);
return;
}
StampedLock lock = lockCache.get(cacheKey, new StampedLock(), Duration.ofSeconds(300));
// 写锁避免同一业务ID重复查询
long stamp = lock.writeLock();
Integer cacheTime = params.getCacheTime();
try {
// 多参数组装
Object[] objectParams = new Object[argsLength + 1];
objectParams[0] = paramsHandler.handleVal(bindData);
if(objectParams.length > 1){
System.arraycopy(args, 0, objectParams, 1, argsLength);
}
// 请求翻译结果
Object loadResult = ReflectUtil.invoke(loadService, method, objectParams);
if (loadResult != null) {
result = this.responseHandler.handle(this.beanName, method, loadResult, writeClass, objectParams);
| cache.put(String.format(cacheKey, OK), result, Duration.ofSeconds(cacheTime)); |
} else {
LOG.error("【{}】 Expand失败,未找到:{}", beanName, bindData);
cache.put(String.format(cacheKey, FAIL), bindData, Duration.ofSeconds(cacheTime));
result = bindData;
}
} catch (Exception e) {
LOG.error("【{}】 Expand异常:", beanName, e);
cache.put(String.format(cacheKey, FAIL), bindData, Duration.ofSeconds(cacheTime));
result = bindData;
} finally {
lock.unlockWrite(stamp);
}
gen.writeObject(result);
}
/**
* 获取当前字段的path路径
*
* @param outputContext
* @return
*/
private String getFieldPath(JsonStreamContext outputContext) {
List<String> path = new ArrayList<>(4);
while (outputContext != null) {
String currentName = outputContext.getCurrentName();
if (StringUtils.hasText(currentName)) {
path.add(currentName);
}
outputContext = outputContext.getParent();
}
Collections.reverse(path);
return String.join(".", path);
}
/**
* 获取厍信息
*
* @param cacheKey 缓存的KEY
* @return
*/
private Object getCacheInfo(String cacheKey) {
Object result = cache.get(String.format(cacheKey, OK));
if (result == null) {
result = cache.get(String.format(cacheKey, FAIL));
}
return result;
}
@Override
public JsonSerializer<?> createContextual(SerializerProvider prov, BeanProperty property) throws JsonMappingException {
if (property != null) {
Expand load = property.getAnnotation(Expand.class);
if (load == null) {
throw new RuntimeException("未注解相关 @Expand 注解");
}
String bean = load.bean();
Class<? extends ParamsHandler> paramsHandlerClass = load.paramsHandler();
Class<? extends ResponseHandler> responseHandlerClass = load.responseHandler();
String method = load.method();
try {
ParamsHandler paramsHandler = paramsHandlerClass.getDeclaredConstructor().newInstance();
ResponseHandler responseHandler = responseHandlerClass.getDeclaredConstructor().newInstance();
int cacheTime = load.cacheTime();
// 额外参数处理
SerializerParam params = paramsHandler.handleAnnotation(property);
// 参数处理器没设置,且父注设置了,以父注解为主
if (params.getCacheTime() == null && cacheTime != -1) {
params.setCacheTime(cacheTime);
}
// 缓存时间未设置,取默认
if (params.getCacheTime() == null) {
params.setCacheTime(jacksonExpandProperties.getCacheTimeout());
}
if (params.isOpen() == null) {
params.setExpand(load.expand());
}
return new ExpandSerializer(bean, method, params, paramsHandler, responseHandler);
} catch (Exception e) {
LOG.error("@Expand error: ", e);
}
}
return prov.findNullValueSerializer(null);
}
/**
* 获取展开参数
*
* @param key
* @return
*/
private Set<String> getParam(String key) {
RequestAttributes attributes = RequestContextHolder.getRequestAttributes();
if (attributes == null) {
return Collections.emptySet();
}
ServletRequest request = ((ServletRequestAttributes) attributes).getRequest();
String[] parameterValues = request.getParameterValues(key);
if (parameterValues == null) {
return Collections.emptySet();
}
return Arrays.stream(parameterValues).flatMap(o -> Arrays.stream(o.split(",")))
.collect(Collectors.toSet());
}
}
| src/main/java/com/github/stupdit1t/jackson/expand/serializer/ExpandSerializer.java | stupdit1t-jackson-expand-boot-starter-b492b74 | [
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/cache/ExpandCache.java",
"retrieved_chunk": " key.add(String.valueOf(bindData));\n for (Object subVal : annotationVal) {\n key.add(String.valueOf(subVal));\n }\n String cacheKey = properties.getCachePrefix() + \":\" + beanName + \":\" + method + \":%s:\" + key.toString();\n delete(String.format(cacheKey, ExpandSerializer.OK));\n delete(String.format(cacheKey, ExpandSerializer.FAIL));\n }\n /**\n * 模糊匹配key",
"score": 24.766034079699747
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/util/ReflectUtil.java",
"retrieved_chunk": " *\n * @param obj\n * @param methodName\n * @param args\n * @return\n */\n public static Object invoke(Object obj, String methodName, Object[] args) throws InvocationTargetException, IllegalAccessException {\n String cacheKey = obj.getClass().getName() + methodName;\n final Method method = METHODS_CACHE.computeIfAbsent(cacheKey, (key) -> getMethod(obj.getClass(), methodName, args));\n if (null == method) {",
"score": 24.233626290529507
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/annotation/Expand.java",
"retrieved_chunk": " *\n * @return\n */\n int cacheTime() default -1;\n /**\n * 是否要展开\n *\n * @return\n */\n boolean expand() default true;",
"score": 19.359269895332865
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/util/ReflectUtil.java",
"retrieved_chunk": " private static Class<?> unWrap(Class<?> clazz) {\n if (null == clazz || clazz.isPrimitive()) {\n return clazz;\n }\n Class<?> result = WRAPPER_PRIMITIVE_MAP.get(clazz);\n return (null == result) ? clazz : result;\n }\n}",
"score": 18.54540174560119
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/handler/rsp/ResponseHandler.java",
"retrieved_chunk": "package com.github.stupdit1t.jackson.expand.handler.rsp;\n/**\n * 返回处理第三方响应\n */\npublic interface ResponseHandler {\n Object handle(String bean, String method, Object rsp, Class<?> writeClass, Object... params);\n}",
"score": 18.051795612945856
}
] | java | cache.put(String.format(cacheKey, OK), result, Duration.ofSeconds(cacheTime)); |
package com.solodroid.ads.sdkdemo.activity;
import android.os.Bundle;
import android.view.MenuItem;
import android.widget.Toast;
import androidx.appcompat.app.ActionBar;
import androidx.appcompat.app.AppCompatActivity;
import androidx.appcompat.widget.Toolbar;
import androidx.recyclerview.widget.DividerItemDecoration;
import androidx.recyclerview.widget.RecyclerView;
import androidx.recyclerview.widget.StaggeredGridLayoutManager;
import com.solodroid.ads.sdk.format.BannerAd;
import com.solodroid.ads.sdkdemo.R;
import com.solodroid.ads.sdkdemo.adapter.AdapterPost;
import com.solodroid.ads.sdkdemo.data.Constant;
import com.solodroid.ads.sdkdemo.database.SharedPref;
import com.solodroid.ads.sdkdemo.model.Post;
import java.util.ArrayList;
import java.util.List;
public class SecondActivity extends AppCompatActivity {
SharedPref sharedPref;
RecyclerView recyclerView;
AdapterPost adapterPost;
BannerAd.Builder bannerAd;
Toolbar toolbar;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
sharedPref = new SharedPref(this);
getAppTheme();
setContentView(R.layout.activity_second);
initView();
loadBannerAd();
initToolbar();
}
private void initView() {
recyclerView = findViewById(R.id.recyclerView);
recyclerView.setLayoutManager(new StaggeredGridLayoutManager(1, StaggeredGridLayoutManager.VERTICAL));
recyclerView.addItemDecoration(new DividerItemDecoration(this, DividerItemDecoration.VERTICAL));
adapterPost = new AdapterPost(this, new ArrayList<>());
recyclerView.setAdapter(adapterPost);
displayData(sharedPref.getPostList());
}
private void initToolbar() {
toolbar = findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
final ActionBar actionBar = getSupportActionBar();
if (actionBar != null) {
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setHomeButtonEnabled(true);
getSupportActionBar().setTitle("Second Activity");
}
}
private void displayData(List<Post> posts) {
if (posts != null && posts.size() > 0) {
adapterPost.setListData(posts, posts.size());
| adapterPost.setOnItemClickListener((view, obj, position) -> { |
Toast.makeText(getApplicationContext(), "" + obj.name, Toast.LENGTH_SHORT).show();
});
}
}
@Override
public void onBackPressed() {
super.onBackPressed();
bannerAd.destroyAndDetachBanner();
}
private void loadBannerAd() {
bannerAd = new BannerAd.Builder(this)
.setAdStatus(Constant.AD_STATUS)
.setAdNetwork(Constant.AD_NETWORK)
.setBackupAdNetwork(Constant.BACKUP_AD_NETWORK)
.setAdMobBannerId(Constant.ADMOB_BANNER_ID)
.setGoogleAdManagerBannerId(Constant.GOOGLE_AD_MANAGER_BANNER_ID)
.setUnityBannerId(Constant.UNITY_BANNER_ID)
.setAppLovinBannerId(Constant.APPLOVIN_BANNER_ID)
.setAppLovinBannerZoneId(Constant.APPLOVIN_BANNER_ZONE_ID)
.setIronSourceBannerId(Constant.IRONSOURCE_BANNER_ID)
.setWortiseBannerId(Constant.WORTISE_BANNER_ID)
.setDarkTheme(false)
.build();
}
@Override
public boolean onOptionsItemSelected(MenuItem menuItem) {
if (menuItem.getItemId() == android.R.id.home) {
onBackPressed();
return true;
}
return super.onOptionsItemSelected(menuItem);
}
public void getAppTheme() {
if (sharedPref.getIsDarkTheme()) {
setTheme(R.style.AppDarkTheme);
} else {
setTheme(R.style.AppTheme);
}
}
}
| demo/src/main/java/com/solodroid/ads/sdkdemo/activity/SecondActivity.java | solodroidx-multi-ads-sdk-4f23cdf | [
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/adapter/AdapterPost.java",
"retrieved_chunk": " }\n }\n public void setListData(List<Post> posts, int totalPosts) {\n this.posts = posts;\n insertNativeAd(posts, totalPosts);\n notifyDataSetChanged();\n }\n private void insertNativeAd(List<Post> posts, int totalPosts) {\n switch (Constant.AD_NETWORK) {\n case ADMOB:",
"score": 68.07078311295719
},
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/adapter/AdapterPost.java",
"retrieved_chunk": " Context context;\n private List<Post> posts;\n private OnItemClickListener onItemClickListener;\n private final int VIEW_ITEM = 1;\n private final int VIEW_AD = 2;\n SharedPref sharedPref;\n public interface OnItemClickListener {\n void onItemClick(View view, Post obj, int position);\n }\n public void setOnItemClickListener(final OnItemClickListener mItemClickListener) {",
"score": 57.04479616383341
},
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/adapter/AdapterPost.java",
"retrieved_chunk": " @Override\n public int getItemCount() {\n return posts.size();\n }\n}",
"score": 51.27752702767756
},
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/adapter/AdapterPost.java",
"retrieved_chunk": " maxNumberNativeAd = (totalPosts / Constant.NATIVE_AD_INTERVAL);\n } else {\n maxNumberNativeAd = 1;\n }\n int limitNativeAd = (maxNumberNativeAd * Constant.NATIVE_AD_INTERVAL) + Constant.NATIVE_AD_INDEX;\n if (posts.size() >= Constant.NATIVE_AD_INDEX) {\n for (int i = Constant.NATIVE_AD_INDEX; i < limitNativeAd; i += Constant.NATIVE_AD_INTERVAL) {\n posts.add(i, new Post());\n }\n }",
"score": 49.784402599799186
},
{
"filename": "demo/src/main/java/com/solodroid/ads/sdkdemo/adapter/AdapterPost.java",
"retrieved_chunk": " this.onItemClickListener = mItemClickListener;\n }\n public AdapterPost(Context context, List<Post> posts) {\n this.posts = posts;\n this.context = context;\n this.sharedPref = new SharedPref(context);\n }\n public static class OriginalViewHolder extends RecyclerView.ViewHolder {\n public TextView name;\n public ImageView image;",
"score": 46.47551172098336
}
] | java | adapterPost.setOnItemClickListener((view, obj, position) -> { |
package com.github.stupdit1t.jackson.expand.serializer;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonStreamContext;
import com.fasterxml.jackson.databind.BeanProperty;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.ser.ContextualSerializer;
import com.github.stupdit1t.jackson.expand.annotation.Expand;
import com.github.stupdit1t.jackson.expand.cache.ExpandCache;
import com.github.stupdit1t.jackson.expand.cache.LocalCache;
import com.github.stupdit1t.jackson.expand.config.JacksonExpandProperties;
import com.github.stupdit1t.jackson.expand.domain.ExpandStrategy;
import com.github.stupdit1t.jackson.expand.domain.SerializerParam;
import com.github.stupdit1t.jackson.expand.handler.params.ParamsHandler;
import com.github.stupdit1t.jackson.expand.handler.rsp.ResponseHandler;
import com.github.stupdit1t.jackson.expand.util.ReflectUtil;
import com.github.stupdit1t.jackson.expand.util.SpringUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.ReflectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.web.context.request.RequestAttributes;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import javax.servlet.ServletRequest;
import java.io.IOException;
import java.lang.reflect.Field;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.locks.StampedLock;
import java.util.stream.Collectors;
public class ExpandSerializer extends JsonSerializer<Object> implements ContextualSerializer {
private static final Logger LOG = LoggerFactory.getLogger(ExpandSerializer.class);
/**
* 成功数据
*/
public static final String OK = "OK";
/**
* 失败数据
*/
public static final String FAIL = "FAIL";
/**
* 缓存
*/
private static ExpandCache cache;
/**
* 配置
*/
private static JacksonExpandProperties jacksonExpandProperties;
/**
* 本地锁缓存,防止同时查询
*/
private static final LocalCache lockCache = new LocalCache();
/**
* 远程调用服务
*/
private Object loadService;
/**
* 方法
*/
private String method;
/**
* 注解参数处理
*/
private SerializerParam params;
/**
* 返回结果处理类
*/
private ParamsHandler paramsHandler;
/**
* 返回结果处理类
*/
private ResponseHandler responseHandler;
/**
* bean名称
*/
private String beanName;
public ExpandSerializer() {
super();
if (cache == null) {
synchronized (ExpandSerializer.class) {
if (cache == null) {
cache = SpringUtil.getBean(ExpandCache.class);
jacksonExpandProperties = SpringUtil.getBean(JacksonExpandProperties.class);
}
}
}
}
public ExpandSerializer(String beanName, String method, SerializerParam params, ParamsHandler paramsHandler, ResponseHandler otherResponseHandler) {
this();
this.loadService = SpringUtil.getBean(beanName);
this.method = method;
this.params = params;
this.responseHandler = otherResponseHandler;
this.paramsHandler = paramsHandler;
this.beanName = beanName;
}
@Override
public void serialize(Object bindData, JsonGenerator gen, SerializerProvider serializers) throws IOException {
String writeFieldPath = getFieldPath(gen.getOutputContext());
// 统一path替换
String dynamicExpandCommonPrefix = jacksonExpandProperties.getDynamicExpandCommonPrefix();
if (StringUtils.hasText(dynamicExpandCommonPrefix) && writeFieldPath.startsWith(dynamicExpandCommonPrefix)) {
writeFieldPath = writeFieldPath.substring(dynamicExpandCommonPrefix.length() + 1);
}
// 是否展开
boolean expand;
// 动态展开开启,判断是否展开
boolean dynamicExpand = jacksonExpandProperties.isDynamicExpand();
if (dynamicExpand) {
Set<String> needExpandField = getParam(jacksonExpandProperties.getDynamicExpandParameterName());
// 如果代码里设置不展开,动态展开也不生效
expand = needExpandField.contains(writeFieldPath) && params.isOpen();
} else {
expand = params.isOpen();
}
if (!expand) {
gen.writeObject(bindData);
return;
}
// 判断要写入的字段
String writeField = gen.getOutputContext().getCurrentName();
if ( | jacksonExpandProperties.getExpandStrategy() == ExpandStrategy.COVER) { |
writeField = gen.getOutputContext().getCurrentName();
} else if (jacksonExpandProperties.getExpandStrategy() == ExpandStrategy.COPY) {
writeField = String.format(jacksonExpandProperties.getCopyStrategyFormat(), gen.getOutputContext().getCurrentName());
}
// 自定义要写入的优先级最高
if (StringUtils.hasText(params.getWriteField())) {
writeField = params.getWriteField();
}
// 设置理论上的响应类型,要不要使用取决于 ResponseHandler 要不要处理,比如只能写入数据对象存在的对象,默认是忽略存不存在
Class<?> writeClass = null;
if (params.getWriteField() != null && StringUtils.hasText(params.getWriteField())) {
Field field = ReflectionUtils.findField(gen.getCurrentValue().getClass(), params.getWriteField());
if (field != null) {
writeClass = field.getType();
}
}
// 关闭不存在字段扩展,被写入的字段类型找不到,不扩展
if (!jacksonExpandProperties.isCanExpandToNotExistField() && writeClass == null) {
gen.writeObject(bindData);
return;
}
// 翻译为非当前字段,先写入当前字段值再翻译
boolean currField = gen.getOutputContext().getCurrentName().equals(writeField);
if (!currField) {
gen.writeObject(bindData);
gen.writeFieldName(writeField);
}
if (bindData == null || loadService == null) {
gen.writeObject(bindData);
return;
}
// 获取缓存KEY
Object[] args = params.getRemoteParams();
int argsLength = args == null ? 0 : args.length;
String cacheKey = jacksonExpandProperties.getCachePrefix() + ":" + beanName + ":" + method + ":%s:" + paramsHandler.getCacheKey(bindData, args);
Object result = getCacheInfo(cacheKey);
if (result != null) {
LOG.info("{} Expand cache 命中: {}", beanName, result);
gen.writeObject(result);
return;
}
StampedLock lock = lockCache.get(cacheKey, new StampedLock(), Duration.ofSeconds(300));
// 写锁避免同一业务ID重复查询
long stamp = lock.writeLock();
Integer cacheTime = params.getCacheTime();
try {
// 多参数组装
Object[] objectParams = new Object[argsLength + 1];
objectParams[0] = paramsHandler.handleVal(bindData);
if(objectParams.length > 1){
System.arraycopy(args, 0, objectParams, 1, argsLength);
}
// 请求翻译结果
Object loadResult = ReflectUtil.invoke(loadService, method, objectParams);
if (loadResult != null) {
result = this.responseHandler.handle(this.beanName, method, loadResult, writeClass, objectParams);
cache.put(String.format(cacheKey, OK), result, Duration.ofSeconds(cacheTime));
} else {
LOG.error("【{}】 Expand失败,未找到:{}", beanName, bindData);
cache.put(String.format(cacheKey, FAIL), bindData, Duration.ofSeconds(cacheTime));
result = bindData;
}
} catch (Exception e) {
LOG.error("【{}】 Expand异常:", beanName, e);
cache.put(String.format(cacheKey, FAIL), bindData, Duration.ofSeconds(cacheTime));
result = bindData;
} finally {
lock.unlockWrite(stamp);
}
gen.writeObject(result);
}
/**
* 获取当前字段的path路径
*
* @param outputContext
* @return
*/
private String getFieldPath(JsonStreamContext outputContext) {
List<String> path = new ArrayList<>(4);
while (outputContext != null) {
String currentName = outputContext.getCurrentName();
if (StringUtils.hasText(currentName)) {
path.add(currentName);
}
outputContext = outputContext.getParent();
}
Collections.reverse(path);
return String.join(".", path);
}
/**
* 获取厍信息
*
* @param cacheKey 缓存的KEY
* @return
*/
private Object getCacheInfo(String cacheKey) {
Object result = cache.get(String.format(cacheKey, OK));
if (result == null) {
result = cache.get(String.format(cacheKey, FAIL));
}
return result;
}
@Override
public JsonSerializer<?> createContextual(SerializerProvider prov, BeanProperty property) throws JsonMappingException {
if (property != null) {
Expand load = property.getAnnotation(Expand.class);
if (load == null) {
throw new RuntimeException("未注解相关 @Expand 注解");
}
String bean = load.bean();
Class<? extends ParamsHandler> paramsHandlerClass = load.paramsHandler();
Class<? extends ResponseHandler> responseHandlerClass = load.responseHandler();
String method = load.method();
try {
ParamsHandler paramsHandler = paramsHandlerClass.getDeclaredConstructor().newInstance();
ResponseHandler responseHandler = responseHandlerClass.getDeclaredConstructor().newInstance();
int cacheTime = load.cacheTime();
// 额外参数处理
SerializerParam params = paramsHandler.handleAnnotation(property);
// 参数处理器没设置,且父注设置了,以父注解为主
if (params.getCacheTime() == null && cacheTime != -1) {
params.setCacheTime(cacheTime);
}
// 缓存时间未设置,取默认
if (params.getCacheTime() == null) {
params.setCacheTime(jacksonExpandProperties.getCacheTimeout());
}
if (params.isOpen() == null) {
params.setExpand(load.expand());
}
return new ExpandSerializer(bean, method, params, paramsHandler, responseHandler);
} catch (Exception e) {
LOG.error("@Expand error: ", e);
}
}
return prov.findNullValueSerializer(null);
}
/**
* 获取展开参数
*
* @param key
* @return
*/
private Set<String> getParam(String key) {
RequestAttributes attributes = RequestContextHolder.getRequestAttributes();
if (attributes == null) {
return Collections.emptySet();
}
ServletRequest request = ((ServletRequestAttributes) attributes).getRequest();
String[] parameterValues = request.getParameterValues(key);
if (parameterValues == null) {
return Collections.emptySet();
}
return Arrays.stream(parameterValues).flatMap(o -> Arrays.stream(o.split(",")))
.collect(Collectors.toSet());
}
}
| src/main/java/com/github/stupdit1t/jackson/expand/serializer/ExpandSerializer.java | stupdit1t-jackson-expand-boot-starter-b492b74 | [
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/handler/params/DefaultParamsHandler.java",
"retrieved_chunk": " if (expand != null) {\n if (StringUtils.hasText(expand.to())) {\n params.setWriteField(expand.to());\n }\n params.setExpand(expand.expand());\n }\n return params;\n }\n}",
"score": 12.968951064072595
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/domain/SerializerParam.java",
"retrieved_chunk": " public void setWriteField(String writeField) {\n this.writeField = writeField;\n }\n public Integer getCacheTime() {\n return cacheTime;\n }\n public void setCacheTime(Integer cacheTime) {\n this.cacheTime = cacheTime;\n }\n public Boolean isOpen() {",
"score": 10.01990827574129
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/domain/ExpandStrategy.java",
"retrieved_chunk": "package com.github.stupdit1t.jackson.expand.domain;\n/**\n * 展开策略\n */\npublic enum ExpandStrategy {\n /**\n * 覆盖当前字段\n */\n COVER,\n /**",
"score": 9.883270560231795
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/config/JacksonExpandProperties.java",
"retrieved_chunk": " this.dynamicExpandParameterName = dynamicExpandParameterName;\n }\n public ExpandStrategy getExpandStrategy() {\n return expandStrategy;\n }\n public void setExpandStrategy(ExpandStrategy expandStrategy) {\n this.expandStrategy = expandStrategy;\n }\n public String getCopyStrategyFormat() {\n return copyStrategyFormat;",
"score": 9.750314132901563
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/config/JacksonExpandProperties.java",
"retrieved_chunk": " */\n private String dynamicExpandParameterName = \"expand\";\n /**\n * 动态展开 统一数据的Path前缀,比如前缀是 data.body. 如果配置 expand=userId, 相当于是expnad=data.body.userId, 默认无\n */\n private String dynamicExpandCommonPrefix;\n /**\n * 展开策略, 默认覆盖\n */\n private ExpandStrategy expandStrategy = ExpandStrategy.COVER;",
"score": 9.735321928186261
}
] | java | jacksonExpandProperties.getExpandStrategy() == ExpandStrategy.COVER) { |
package com.solodroid.ads.sdk.format;
import static com.solodroid.ads.sdk.util.Constant.ADMOB;
import static com.solodroid.ads.sdk.util.Constant.AD_STATUS_ON;
import static com.solodroid.ads.sdk.util.Constant.APPLOVIN;
import static com.solodroid.ads.sdk.util.Constant.APPLOVIN_DISCOVERY;
import static com.solodroid.ads.sdk.util.Constant.APPLOVIN_MAX;
import static com.solodroid.ads.sdk.util.Constant.FACEBOOK;
import static com.solodroid.ads.sdk.util.Constant.FAN;
import static com.solodroid.ads.sdk.util.Constant.FAN_BIDDING_ADMOB;
import static com.solodroid.ads.sdk.util.Constant.FAN_BIDDING_AD_MANAGER;
import static com.solodroid.ads.sdk.util.Constant.FAN_BIDDING_APPLOVIN_MAX;
import static com.solodroid.ads.sdk.util.Constant.FAN_BIDDING_IRONSOURCE;
import static com.solodroid.ads.sdk.util.Constant.GOOGLE_AD_MANAGER;
import static com.solodroid.ads.sdk.util.Constant.IRONSOURCE;
import static com.solodroid.ads.sdk.util.Constant.MOPUB;
import static com.solodroid.ads.sdk.util.Constant.NONE;
import static com.solodroid.ads.sdk.util.Constant.STARTAPP;
import static com.solodroid.ads.sdk.util.Constant.UNITY;
import static com.solodroid.ads.sdk.util.Constant.UNITY_ADS_BANNER_HEIGHT_MEDIUM;
import static com.solodroid.ads.sdk.util.Constant.UNITY_ADS_BANNER_WIDTH_MEDIUM;
import android.app.Activity;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.widget.RelativeLayout;
import androidx.annotation.NonNull;
import com.applovin.adview.AppLovinAdView;
import com.applovin.mediation.MaxAd;
import com.applovin.mediation.MaxAdViewAdListener;
import com.applovin.mediation.MaxError;
import com.applovin.mediation.ads.MaxAdView;
import com.applovin.sdk.AppLovinAd;
import com.applovin.sdk.AppLovinAdLoadListener;
import com.applovin.sdk.AppLovinAdSize;
import com.applovin.sdk.AppLovinSdkUtils;
import com.facebook.ads.Ad;
import com.facebook.ads.AdSize;
import com.google.android.gms.ads.AdListener;
import com.google.android.gms.ads.AdRequest;
import com.google.android.gms.ads.AdView;
import com.google.android.gms.ads.LoadAdError;
import com.google.android.gms.ads.admanager.AdManagerAdView;
import com.ironsource.mediationsdk.ISBannerSize;
import com.ironsource.mediationsdk.IronSource;
import com.ironsource.mediationsdk.IronSourceBannerLayout;
import com.ironsource.mediationsdk.adunit.adapter.utility.AdInfo;
import com.ironsource.mediationsdk.logger.IronSourceError;
import com.ironsource.mediationsdk.sdk.LevelPlayBannerListener;
import com.solodroid.ads.sdk.R;
import com.solodroid.ads.sdk.helper.AppLovinCustomEventBanner;
import com.solodroid.ads.sdk.util.Tools;
import com.startapp.sdk.ads.banner.Banner;
import com.startapp.sdk.ads.banner.BannerListener;
import com.unity3d.services.banners.BannerErrorInfo;
import com.unity3d.services.banners.BannerView;
import com.unity3d.services.banners.UnityBannerSize;
public class MediumRectangleAd {
public static class Builder {
private static final String TAG = "AdNetwork";
private final Activity activity;
private AdView adView;
private AdManagerAdView adManagerAdView;
private com.facebook.ads.AdView fanAdView;
private AppLovinAdView appLovinAdView;
FrameLayout ironSourceBannerView;
private IronSourceBannerLayout ironSourceBannerLayout;
private String adStatus = "";
private String adNetwork = "";
private String backupAdNetwork = "";
private String adMobBannerId = "";
private String googleAdManagerBannerId = "";
private String fanBannerId = "";
private String unityBannerId = "";
private String appLovinBannerId = "";
private String appLovinBannerZoneId = "";
private String mopubBannerId = "";
private String ironSourceBannerId = "";
private int placementStatus = 1;
private boolean darkTheme = false;
private boolean legacyGDPR = false;
public Builder(Activity activity) {
this.activity = activity;
}
public Builder build() {
loadBannerAd();
return this;
}
public Builder setAdStatus(String adStatus) {
this.adStatus = adStatus;
return this;
}
public Builder setAdNetwork(String adNetwork) {
this.adNetwork = adNetwork;
return this;
}
public Builder setBackupAdNetwork(String backupAdNetwork) {
this.backupAdNetwork = backupAdNetwork;
return this;
}
public Builder setAdMobBannerId(String adMobBannerId) {
this.adMobBannerId = adMobBannerId;
return this;
}
public Builder setGoogleAdManagerBannerId(String googleAdManagerBannerId) {
this.googleAdManagerBannerId = googleAdManagerBannerId;
return this;
}
public Builder setFanBannerId(String fanBannerId) {
this.fanBannerId = fanBannerId;
return this;
}
public Builder setUnityBannerId(String unityBannerId) {
this.unityBannerId = unityBannerId;
return this;
}
public Builder setAppLovinBannerId(String appLovinBannerId) {
this.appLovinBannerId = appLovinBannerId;
return this;
}
public Builder setAppLovinBannerZoneId(String appLovinBannerZoneId) {
this.appLovinBannerZoneId = appLovinBannerZoneId;
return this;
}
public Builder setMopubBannerId(String mopubBannerId) {
this.mopubBannerId = mopubBannerId;
return this;
}
public Builder setIronSourceBannerId(String ironSourceBannerId) {
this.ironSourceBannerId = ironSourceBannerId;
return this;
}
public Builder setPlacementStatus(int placementStatus) {
this.placementStatus = placementStatus;
return this;
}
public Builder setDarkTheme(boolean darkTheme) {
this.darkTheme = darkTheme;
return this;
}
public Builder setLegacyGDPR(boolean legacyGDPR) {
this.legacyGDPR = legacyGDPR;
return this;
}
public void loadBannerAd() {
if (adStatus.equals(AD_STATUS_ON) && placementStatus != 0) {
switch (adNetwork) {
case ADMOB:
case FAN_BIDDING_ADMOB:
FrameLayout adContainerView = activity.findViewById(R.id.admob_banner_view_container);
adContainerView.post(() -> {
adView = new AdView(activity);
adView.setAdUnitId(adMobBannerId);
adContainerView.removeAllViews();
adContainerView.addView(adView);
adView.setAdSize | (Tools.getAdSizeMREC()); |
adView.loadAd(Tools.getAdRequest(activity, legacyGDPR));
adView.setAdListener(new AdListener() {
@Override
public void onAdLoaded() {
// Code to be executed when an ad finishes loading.
adContainerView.setVisibility(View.VISIBLE);
}
@Override
public void onAdFailedToLoad(@NonNull LoadAdError adError) {
// Code to be executed when an ad request fails.
adContainerView.setVisibility(View.GONE);
loadBackupBannerAd();
}
@Override
public void onAdOpened() {
// Code to be executed when an ad opens an overlay that
// covers the screen.
}
@Override
public void onAdClicked() {
// Code to be executed when the user clicks on an ad.
}
@Override
public void onAdClosed() {
// Code to be executed when the user is about to return
// to the app after tapping on an ad.
}
});
});
Log.d(TAG, adNetwork + " Banner Ad unit Id : " + adMobBannerId);
break;
case GOOGLE_AD_MANAGER:
case FAN_BIDDING_AD_MANAGER:
FrameLayout googleAdContainerView = activity.findViewById(R.id.google_ad_banner_view_container);
googleAdContainerView.post(() -> {
adManagerAdView = new AdManagerAdView(activity);
adManagerAdView.setAdUnitId(googleAdManagerBannerId);
googleAdContainerView.removeAllViews();
googleAdContainerView.addView(adManagerAdView);
adManagerAdView.setAdSize(Tools.getAdSizeMREC());
adManagerAdView.loadAd(Tools.getGoogleAdManagerRequest());
adManagerAdView.setAdListener(new AdListener() {
@Override
public void onAdClicked() {
super.onAdClicked();
}
@Override
public void onAdClosed() {
super.onAdClosed();
}
@Override
public void onAdFailedToLoad(@NonNull LoadAdError loadAdError) {
super.onAdFailedToLoad(loadAdError);
googleAdContainerView.setVisibility(View.GONE);
loadBackupBannerAd();
}
@Override
public void onAdImpression() {
super.onAdImpression();
}
@Override
public void onAdLoaded() {
super.onAdLoaded();
googleAdContainerView.setVisibility(View.VISIBLE);
}
@Override
public void onAdOpened() {
super.onAdOpened();
}
});
});
break;
case FAN:
case FACEBOOK:
fanAdView = new com.facebook.ads.AdView(activity, fanBannerId, AdSize.RECTANGLE_HEIGHT_250);
RelativeLayout fanAdViewContainer = activity.findViewById(R.id.fan_banner_view_container);
fanAdViewContainer.addView(fanAdView);
com.facebook.ads.AdListener adListener = new com.facebook.ads.AdListener() {
@Override
public void onError(Ad ad, com.facebook.ads.AdError adError) {
fanAdViewContainer.setVisibility(View.GONE);
loadBackupBannerAd();
Log.d(TAG, "Error load FAN : " + adError.getErrorMessage());
}
@Override
public void onAdLoaded(Ad ad) {
fanAdViewContainer.setVisibility(View.VISIBLE);
}
@Override
public void onAdClicked(Ad ad) {
}
@Override
public void onLoggingImpression(Ad ad) {
}
};
com.facebook.ads.AdView.AdViewLoadConfig loadAdConfig = fanAdView.buildLoadAdConfig().withAdListener(adListener).build();
fanAdView.loadAd(loadAdConfig);
break;
case STARTAPP:
RelativeLayout startAppAdView = activity.findViewById(R.id.startapp_banner_view_container);
Banner banner = new Banner(activity, new BannerListener() {
@Override
public void onReceiveAd(View banner) {
startAppAdView.setVisibility(View.VISIBLE);
}
@Override
public void onFailedToReceiveAd(View banner) {
startAppAdView.setVisibility(View.GONE);
loadBackupBannerAd();
Log.d(TAG, adNetwork + " failed load startapp banner ad : ");
}
@Override
public void onImpression(View view) {
}
@Override
public void onClick(View banner) {
}
});
startAppAdView.addView(banner);
break;
case UNITY:
RelativeLayout unityAdView = activity.findViewById(R.id.unity_banner_view_container);
BannerView bottomBanner = new BannerView(activity, unityBannerId, new UnityBannerSize(UNITY_ADS_BANNER_WIDTH_MEDIUM, UNITY_ADS_BANNER_HEIGHT_MEDIUM));
bottomBanner.setListener(new BannerView.IListener() {
@Override
public void onBannerLoaded(BannerView bannerView) {
unityAdView.setVisibility(View.VISIBLE);
Log.d("Unity_banner", "ready");
}
@Override
public void onBannerShown(BannerView bannerAdView) {
}
@Override
public void onBannerClick(BannerView bannerView) {
}
@Override
public void onBannerFailedToLoad(BannerView bannerView, BannerErrorInfo bannerErrorInfo) {
Log.d("SupportTest", "Banner Error" + bannerErrorInfo);
unityAdView.setVisibility(View.GONE);
loadBackupBannerAd();
}
@Override
public void onBannerLeftApplication(BannerView bannerView) {
}
});
unityAdView.addView(bottomBanner);
bottomBanner.load();
Log.d(TAG, adNetwork + " Banner Ad unit Id : " + unityBannerId);
break;
case APPLOVIN:
case APPLOVIN_MAX:
case FAN_BIDDING_APPLOVIN_MAX:
RelativeLayout appLovinAdView = activity.findViewById(R.id.applovin_banner_view_container);
MaxAdView maxAdView = new MaxAdView(appLovinBannerId, activity);
maxAdView.setListener(new MaxAdViewAdListener() {
@Override
public void onAdExpanded(MaxAd ad) {
}
@Override
public void onAdCollapsed(MaxAd ad) {
}
@Override
public void onAdLoaded(MaxAd ad) {
appLovinAdView.setVisibility(View.VISIBLE);
}
@Override
public void onAdDisplayed(MaxAd ad) {
}
@Override
public void onAdHidden(MaxAd ad) {
}
@Override
public void onAdClicked(MaxAd ad) {
}
@Override
public void onAdLoadFailed(String adUnitId, MaxError error) {
appLovinAdView.setVisibility(View.GONE);
loadBackupBannerAd();
}
@Override
public void onAdDisplayFailed(MaxAd ad, MaxError error) {
}
});
int width = ViewGroup.LayoutParams.MATCH_PARENT;
int heightPx = activity.getResources().getDimensionPixelSize(R.dimen.applovin_banner_height);
maxAdView.setLayoutParams(new FrameLayout.LayoutParams(width, heightPx));
if (darkTheme) {
maxAdView.setBackgroundColor(activity.getResources().getColor(R.color.color_native_background_dark));
} else {
maxAdView.setBackgroundColor(activity.getResources().getColor(R.color.color_native_background_light));
}
appLovinAdView.addView(maxAdView);
maxAdView.loadAd();
Log.d(TAG, adNetwork + " Banner Ad unit Id : " + appLovinBannerId);
break;
case APPLOVIN_DISCOVERY:
RelativeLayout appLovinDiscoveryAdView = activity.findViewById(R.id.applovin_discovery_banner_view_container);
AdRequest.Builder builder = new AdRequest.Builder();
Bundle bannerExtras = new Bundle();
bannerExtras.putString("zone_id", appLovinBannerZoneId);
builder.addCustomEventExtrasBundle(AppLovinCustomEventBanner.class, bannerExtras);
boolean isTablet2 = AppLovinSdkUtils.isTablet(activity);
AppLovinAdSize adSize = isTablet2 ? AppLovinAdSize.LEADER : AppLovinAdSize.BANNER;
this.appLovinAdView = new AppLovinAdView(adSize, activity);
this.appLovinAdView.setAdLoadListener(new AppLovinAdLoadListener() {
@Override
public void adReceived(AppLovinAd ad) {
appLovinDiscoveryAdView.setVisibility(View.VISIBLE);
}
@Override
public void failedToReceiveAd(int errorCode) {
appLovinDiscoveryAdView.setVisibility(View.GONE);
loadBackupBannerAd();
}
});
appLovinDiscoveryAdView.addView(this.appLovinAdView);
this.appLovinAdView.loadNextAd();
break;
case MOPUB:
//Mopub has been acquired by AppLovin
break;
case IRONSOURCE:
case FAN_BIDDING_IRONSOURCE:
ironSourceBannerView = activity.findViewById(R.id.ironsource_banner_view_container);
ISBannerSize size = ISBannerSize.RECTANGLE;
ironSourceBannerLayout = IronSource.createBanner(activity, size);
FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams(FrameLayout.LayoutParams.MATCH_PARENT, FrameLayout.LayoutParams.MATCH_PARENT);
ironSourceBannerView.addView(ironSourceBannerLayout, 0, layoutParams);
if (ironSourceBannerLayout != null) {
ironSourceBannerLayout.setLevelPlayBannerListener(new LevelPlayBannerListener() {
@Override
public void onAdLoaded(AdInfo adInfo) {
Log.d(TAG, "onBannerAdLoaded");
ironSourceBannerView.setVisibility(View.VISIBLE);
}
@Override
public void onAdLoadFailed(IronSourceError ironSourceError) {
Log.d(TAG, "onBannerAdLoadFailed" + " " + ironSourceError.getErrorMessage());
loadBackupBannerAd();
}
@Override
public void onAdClicked(AdInfo adInfo) {
Log.d(TAG, "onBannerAdClicked");
}
@Override
public void onAdLeftApplication(AdInfo adInfo) {
Log.d(TAG, "onBannerAdLeftApplication");
}
@Override
public void onAdScreenPresented(AdInfo adInfo) {
Log.d(TAG, "onBannerAdScreenPresented");
}
@Override
public void onAdScreenDismissed(AdInfo adInfo) {
Log.d(TAG, "onBannerAdScreenDismissed");
}
});
IronSource.loadBanner(ironSourceBannerLayout, ironSourceBannerId);
} else {
Log.d(TAG, "IronSource.createBanner returned null");
}
break;
case NONE:
//do nothing
break;
}
Log.d(TAG, "Banner Ad is enabled");
} else {
Log.d(TAG, "Banner Ad is disabled");
}
}
public void loadBackupBannerAd() {
if (adStatus.equals(AD_STATUS_ON) && placementStatus != 0) {
switch (backupAdNetwork) {
case ADMOB:
case FAN_BIDDING_ADMOB:
FrameLayout adContainerView = activity.findViewById(R.id.admob_banner_view_container);
adContainerView.post(() -> {
adView = new AdView(activity);
adView.setAdUnitId(adMobBannerId);
adContainerView.removeAllViews();
adContainerView.addView(adView);
adView.setAdSize(Tools.getAdSizeMREC());
adView.loadAd(Tools.getAdRequest(activity, legacyGDPR));
adView.setAdListener(new AdListener() {
@Override
public void onAdLoaded() {
// Code to be executed when an ad finishes loading.
adContainerView.setVisibility(View.VISIBLE);
}
@Override
public void onAdFailedToLoad(@NonNull LoadAdError adError) {
// Code to be executed when an ad request fails.
adContainerView.setVisibility(View.GONE);
}
@Override
public void onAdOpened() {
// Code to be executed when an ad opens an overlay that
// covers the screen.
}
@Override
public void onAdClicked() {
// Code to be executed when the user clicks on an ad.
}
@Override
public void onAdClosed() {
// Code to be executed when the user is about to return
// to the app after tapping on an ad.
}
});
});
Log.d(TAG, adNetwork + " Banner Ad unit Id : " + adMobBannerId);
break;
case GOOGLE_AD_MANAGER:
case FAN_BIDDING_AD_MANAGER:
FrameLayout googleAdContainerView = activity.findViewById(R.id.google_ad_banner_view_container);
googleAdContainerView.post(() -> {
adManagerAdView = new AdManagerAdView(activity);
adManagerAdView.setAdUnitId(googleAdManagerBannerId);
googleAdContainerView.removeAllViews();
googleAdContainerView.addView(adManagerAdView);
adManagerAdView.setAdSize(Tools.getAdSizeMREC());
adManagerAdView.loadAd(Tools.getGoogleAdManagerRequest());
adManagerAdView.setAdListener(new AdListener() {
@Override
public void onAdClicked() {
super.onAdClicked();
}
@Override
public void onAdClosed() {
super.onAdClosed();
}
@Override
public void onAdFailedToLoad(@NonNull LoadAdError loadAdError) {
super.onAdFailedToLoad(loadAdError);
googleAdContainerView.setVisibility(View.GONE);
}
@Override
public void onAdImpression() {
super.onAdImpression();
}
@Override
public void onAdLoaded() {
super.onAdLoaded();
googleAdContainerView.setVisibility(View.VISIBLE);
}
@Override
public void onAdOpened() {
super.onAdOpened();
}
});
});
break;
case FAN:
case FACEBOOK:
fanAdView = new com.facebook.ads.AdView(activity, fanBannerId, AdSize.RECTANGLE_HEIGHT_250);
RelativeLayout fanAdViewContainer = activity.findViewById(R.id.fan_banner_view_container);
fanAdViewContainer.addView(fanAdView);
com.facebook.ads.AdListener adListener = new com.facebook.ads.AdListener() {
@Override
public void onError(Ad ad, com.facebook.ads.AdError adError) {
fanAdViewContainer.setVisibility(View.GONE);
Log.d(TAG, "Error load FAN : " + adError.getErrorMessage());
}
@Override
public void onAdLoaded(Ad ad) {
fanAdViewContainer.setVisibility(View.VISIBLE);
}
@Override
public void onAdClicked(Ad ad) {
}
@Override
public void onLoggingImpression(Ad ad) {
}
};
com.facebook.ads.AdView.AdViewLoadConfig loadAdConfig = fanAdView.buildLoadAdConfig().withAdListener(adListener).build();
fanAdView.loadAd(loadAdConfig);
break;
case STARTAPP:
RelativeLayout startAppAdView = activity.findViewById(R.id.startapp_banner_view_container);
Banner banner = new Banner(activity, new BannerListener() {
@Override
public void onReceiveAd(View banner) {
startAppAdView.setVisibility(View.VISIBLE);
}
@Override
public void onFailedToReceiveAd(View banner) {
startAppAdView.setVisibility(View.GONE);
Log.d(TAG, adNetwork + " failed load startapp banner ad : ");
}
@Override
public void onImpression(View view) {
}
@Override
public void onClick(View banner) {
}
});
startAppAdView.addView(banner);
break;
case UNITY:
RelativeLayout unityAdView = activity.findViewById(R.id.unity_banner_view_container);
BannerView bottomBanner = new BannerView(activity, unityBannerId, new UnityBannerSize(UNITY_ADS_BANNER_WIDTH_MEDIUM, UNITY_ADS_BANNER_HEIGHT_MEDIUM));
bottomBanner.setListener(new BannerView.IListener() {
@Override
public void onBannerLoaded(BannerView bannerView) {
unityAdView.setVisibility(View.VISIBLE);
Log.d("Unity_banner", "ready");
}
@Override
public void onBannerShown(BannerView bannerAdView) {
}
@Override
public void onBannerClick(BannerView bannerView) {
}
@Override
public void onBannerFailedToLoad(BannerView bannerView, BannerErrorInfo bannerErrorInfo) {
Log.d("SupportTest", "Banner Error" + bannerErrorInfo);
unityAdView.setVisibility(View.GONE);
}
@Override
public void onBannerLeftApplication(BannerView bannerView) {
}
});
unityAdView.addView(bottomBanner);
bottomBanner.load();
Log.d(TAG, adNetwork + " Banner Ad unit Id : " + unityBannerId);
break;
case APPLOVIN:
case APPLOVIN_MAX:
case FAN_BIDDING_APPLOVIN_MAX:
RelativeLayout appLovinAdView = activity.findViewById(R.id.applovin_banner_view_container);
MaxAdView maxAdView = new MaxAdView(appLovinBannerId, activity);
maxAdView.setListener(new MaxAdViewAdListener() {
@Override
public void onAdExpanded(MaxAd ad) {
}
@Override
public void onAdCollapsed(MaxAd ad) {
}
@Override
public void onAdLoaded(MaxAd ad) {
appLovinAdView.setVisibility(View.VISIBLE);
}
@Override
public void onAdDisplayed(MaxAd ad) {
}
@Override
public void onAdHidden(MaxAd ad) {
}
@Override
public void onAdClicked(MaxAd ad) {
}
@Override
public void onAdLoadFailed(String adUnitId, MaxError error) {
appLovinAdView.setVisibility(View.GONE);
}
@Override
public void onAdDisplayFailed(MaxAd ad, MaxError error) {
}
});
int width = ViewGroup.LayoutParams.MATCH_PARENT;
int heightPx = activity.getResources().getDimensionPixelSize(R.dimen.applovin_banner_height);
maxAdView.setLayoutParams(new FrameLayout.LayoutParams(width, heightPx));
if (darkTheme) {
maxAdView.setBackgroundColor(activity.getResources().getColor(R.color.color_native_background_dark));
} else {
maxAdView.setBackgroundColor(activity.getResources().getColor(R.color.color_native_background_light));
}
appLovinAdView.addView(maxAdView);
maxAdView.loadAd();
Log.d(TAG, adNetwork + " Banner Ad unit Id : " + appLovinBannerId);
break;
case APPLOVIN_DISCOVERY:
RelativeLayout appLovinDiscoveryAdView = activity.findViewById(R.id.applovin_discovery_banner_view_container);
AdRequest.Builder builder = new AdRequest.Builder();
Bundle bannerExtras = new Bundle();
bannerExtras.putString("zone_id", appLovinBannerZoneId);
builder.addCustomEventExtrasBundle(AppLovinCustomEventBanner.class, bannerExtras);
boolean isTablet2 = AppLovinSdkUtils.isTablet(activity);
AppLovinAdSize adSize = isTablet2 ? AppLovinAdSize.LEADER : AppLovinAdSize.BANNER;
this.appLovinAdView = new AppLovinAdView(adSize, activity);
this.appLovinAdView.setAdLoadListener(new AppLovinAdLoadListener() {
@Override
public void adReceived(AppLovinAd ad) {
appLovinDiscoveryAdView.setVisibility(View.VISIBLE);
}
@Override
public void failedToReceiveAd(int errorCode) {
appLovinDiscoveryAdView.setVisibility(View.GONE);
}
});
appLovinDiscoveryAdView.addView(this.appLovinAdView);
this.appLovinAdView.loadNextAd();
break;
case MOPUB:
//Mopub has been acquired by AppLovin
break;
case IRONSOURCE:
case FAN_BIDDING_IRONSOURCE:
ironSourceBannerView = activity.findViewById(R.id.ironsource_banner_view_container);
ISBannerSize size = ISBannerSize.RECTANGLE;
ironSourceBannerLayout = IronSource.createBanner(activity, size);
FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams(FrameLayout.LayoutParams.MATCH_PARENT, FrameLayout.LayoutParams.MATCH_PARENT);
ironSourceBannerView.addView(ironSourceBannerLayout, 0, layoutParams);
if (ironSourceBannerLayout != null) {
ironSourceBannerLayout.setLevelPlayBannerListener(new LevelPlayBannerListener() {
@Override
public void onAdLoaded(AdInfo adInfo) {
Log.d(TAG, "onBannerAdLoaded");
ironSourceBannerView.setVisibility(View.VISIBLE);
}
@Override
public void onAdLoadFailed(IronSourceError ironSourceError) {
Log.d(TAG, "onBannerAdLoadFailed" + " " + ironSourceError.getErrorMessage());
}
@Override
public void onAdClicked(AdInfo adInfo) {
Log.d(TAG, "onBannerAdClicked");
}
@Override
public void onAdLeftApplication(AdInfo adInfo) {
Log.d(TAG, "onBannerAdLeftApplication");
}
@Override
public void onAdScreenPresented(AdInfo adInfo) {
Log.d(TAG, "onBannerAdScreenPresented");
}
@Override
public void onAdScreenDismissed(AdInfo adInfo) {
Log.d(TAG, "onBannerAdScreenDismissed");
}
});
IronSource.loadBanner(ironSourceBannerLayout, ironSourceBannerId);
} else {
Log.d(TAG, "IronSource.createBanner returned null");
}
break;
}
Log.d(TAG, "Banner Ad is enabled");
} else {
Log.d(TAG, "Banner Ad is disabled");
}
}
public void destroyAndDetachBanner() {
if (adStatus.equals(AD_STATUS_ON) && placementStatus != 0) {
if (adNetwork.equals(IRONSOURCE) || backupAdNetwork.equals(IRONSOURCE)) {
if (ironSourceBannerView != null) {
Log.d(TAG, "ironSource banner is not null, ready to destroy");
IronSource.destroyBanner(ironSourceBannerLayout);
ironSourceBannerView.removeView(ironSourceBannerLayout);
} else {
Log.d(TAG, "ironSource banner is null");
}
}
}
}
}
}
| library/src/main/java/com/solodroid/ads/sdk/format/MediumRectangleAd.java | solodroidx-multi-ads-sdk-4f23cdf | [
{
"filename": "library/src/main/java/com/solodroid/ads/sdk/format/BannerAd.java",
"retrieved_chunk": " adContainerView.post(() -> {\n adView = new AdView(activity);\n adView.setAdUnitId(adMobBannerId);\n adContainerView.removeAllViews();\n adContainerView.addView(adView);\n adView.setAdSize(Tools.getAdSize(activity));\n adView.loadAd(Tools.getAdRequest(activity, legacyGDPR));\n adView.setAdListener(new AdListener() {\n @Override\n public void onAdLoaded() {",
"score": 106.20279682048142
},
{
"filename": "library/src/main/java/com/solodroid/ads/sdk/format/BannerAd.java",
"retrieved_chunk": " }\n public void loadBackupBannerAd() {\n if (adStatus.equals(AD_STATUS_ON) && placementStatus != 0) {\n switch (backupAdNetwork) {\n case ADMOB:\n case FAN_BIDDING_ADMOB:\n FrameLayout adContainerView = activity.findViewById(R.id.admob_banner_view_container);\n adContainerView.post(() -> {\n adView = new AdView(activity);\n adView.setAdUnitId(adMobBannerId);",
"score": 103.15051931717178
},
{
"filename": "library/src/main/java/com/solodroid/ads/sdk/format/BannerAd.java",
"retrieved_chunk": " adContainerView.removeAllViews();\n adContainerView.addView(adView);\n adView.setAdSize(Tools.getAdSize(activity));\n adView.loadAd(Tools.getAdRequest(activity, legacyGDPR));\n adView.setAdListener(new AdListener() {\n @Override\n public void onAdLoaded() {\n // Code to be executed when an ad finishes loading.\n adContainerView.setVisibility(View.VISIBLE);\n }",
"score": 79.3140942139075
},
{
"filename": "library/src/main/java/com/solodroid/ads/sdk/format/BannerAd.java",
"retrieved_chunk": " public Builder setLegacyGDPR(boolean legacyGDPR) {\n this.legacyGDPR = legacyGDPR;\n return this;\n }\n public void loadBannerAd() {\n if (adStatus.equals(AD_STATUS_ON) && placementStatus != 0) {\n switch (adNetwork) {\n case ADMOB:\n case FAN_BIDDING_ADMOB:\n FrameLayout adContainerView = activity.findViewById(R.id.admob_banner_view_container);",
"score": 48.12168869948277
},
{
"filename": "library/src/main/java/com/solodroid/ads/sdk/format/NativeAd.java",
"retrieved_chunk": " adView = (NativeAdView) activity.getLayoutInflater().inflate(R.layout.gnt_wortise_medium_template_view, null);\n break;\n }\n populateNativeAdView(nativeAd, adView);\n wortiseNativeAd.removeAllViews();\n wortiseNativeAd.addView(adView);\n wortiseNativeAd.setVisibility(View.VISIBLE);\n nativeAdViewContainer.setVisibility(View.VISIBLE);\n Log.d(TAG, \"Wortise Native Ad loaded\");\n }",
"score": 35.4073352117242
}
] | java | (Tools.getAdSizeMREC()); |
package com.github.stupdit1t.jackson.expand.serializer;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonStreamContext;
import com.fasterxml.jackson.databind.BeanProperty;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.ser.ContextualSerializer;
import com.github.stupdit1t.jackson.expand.annotation.Expand;
import com.github.stupdit1t.jackson.expand.cache.ExpandCache;
import com.github.stupdit1t.jackson.expand.cache.LocalCache;
import com.github.stupdit1t.jackson.expand.config.JacksonExpandProperties;
import com.github.stupdit1t.jackson.expand.domain.ExpandStrategy;
import com.github.stupdit1t.jackson.expand.domain.SerializerParam;
import com.github.stupdit1t.jackson.expand.handler.params.ParamsHandler;
import com.github.stupdit1t.jackson.expand.handler.rsp.ResponseHandler;
import com.github.stupdit1t.jackson.expand.util.ReflectUtil;
import com.github.stupdit1t.jackson.expand.util.SpringUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.ReflectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.web.context.request.RequestAttributes;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import javax.servlet.ServletRequest;
import java.io.IOException;
import java.lang.reflect.Field;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.locks.StampedLock;
import java.util.stream.Collectors;
public class ExpandSerializer extends JsonSerializer<Object> implements ContextualSerializer {
private static final Logger LOG = LoggerFactory.getLogger(ExpandSerializer.class);
/**
* 成功数据
*/
public static final String OK = "OK";
/**
* 失败数据
*/
public static final String FAIL = "FAIL";
/**
* 缓存
*/
private static ExpandCache cache;
/**
* 配置
*/
private static JacksonExpandProperties jacksonExpandProperties;
/**
* 本地锁缓存,防止同时查询
*/
private static final LocalCache lockCache = new LocalCache();
/**
* 远程调用服务
*/
private Object loadService;
/**
* 方法
*/
private String method;
/**
* 注解参数处理
*/
private SerializerParam params;
/**
* 返回结果处理类
*/
private ParamsHandler paramsHandler;
/**
* 返回结果处理类
*/
private ResponseHandler responseHandler;
/**
* bean名称
*/
private String beanName;
public ExpandSerializer() {
super();
if (cache == null) {
synchronized (ExpandSerializer.class) {
if (cache == null) {
cache = SpringUtil.getBean(ExpandCache.class);
jacksonExpandProperties = SpringUtil.getBean(JacksonExpandProperties.class);
}
}
}
}
public ExpandSerializer(String beanName, String method, SerializerParam params, ParamsHandler paramsHandler, ResponseHandler otherResponseHandler) {
this();
this.loadService = SpringUtil.getBean(beanName);
this.method = method;
this.params = params;
this.responseHandler = otherResponseHandler;
this.paramsHandler = paramsHandler;
this.beanName = beanName;
}
@Override
public void serialize(Object bindData, JsonGenerator gen, SerializerProvider serializers) throws IOException {
String writeFieldPath = getFieldPath(gen.getOutputContext());
// 统一path替换
String dynamicExpandCommonPrefix = jacksonExpandProperties.getDynamicExpandCommonPrefix();
if (StringUtils.hasText(dynamicExpandCommonPrefix) && writeFieldPath.startsWith(dynamicExpandCommonPrefix)) {
writeFieldPath = writeFieldPath.substring(dynamicExpandCommonPrefix.length() + 1);
}
// 是否展开
boolean expand;
// 动态展开开启,判断是否展开
boolean dynamicExpand = jacksonExpandProperties.isDynamicExpand();
if (dynamicExpand) {
Set<String> needExpandField = getParam(jacksonExpandProperties.getDynamicExpandParameterName());
// 如果代码里设置不展开,动态展开也不生效
expand = needExpandField.contains(writeFieldPath) && params.isOpen();
} else {
expand = params.isOpen();
}
if (!expand) {
gen.writeObject(bindData);
return;
}
// 判断要写入的字段
String writeField = gen.getOutputContext().getCurrentName();
if (jacksonExpandProperties.getExpandStrategy() == ExpandStrategy.COVER) {
writeField = gen.getOutputContext().getCurrentName();
} else if (jacksonExpandProperties.getExpandStrategy() == ExpandStrategy.COPY) {
writeField = String.format(jacksonExpandProperties.getCopyStrategyFormat(), gen.getOutputContext().getCurrentName());
}
// 自定义要写入的优先级最高
if (StringUtils.hasText(params.getWriteField())) {
writeField = params.getWriteField();
}
// 设置理论上的响应类型,要不要使用取决于 ResponseHandler 要不要处理,比如只能写入数据对象存在的对象,默认是忽略存不存在
Class<?> writeClass = null;
if (params.getWriteField() != null && StringUtils.hasText(params.getWriteField())) {
Field field = ReflectionUtils.findField(gen.getCurrentValue().getClass(), params.getWriteField());
if (field != null) {
writeClass = field.getType();
}
}
// 关闭不存在字段扩展,被写入的字段类型找不到,不扩展
if (!jacksonExpandProperties.isCanExpandToNotExistField() && writeClass == null) {
gen.writeObject(bindData);
return;
}
// 翻译为非当前字段,先写入当前字段值再翻译
boolean currField = gen.getOutputContext().getCurrentName().equals(writeField);
if (!currField) {
gen.writeObject(bindData);
gen.writeFieldName(writeField);
}
if (bindData == null || loadService == null) {
gen.writeObject(bindData);
return;
}
// 获取缓存KEY
Object[] args = params.getRemoteParams();
int argsLength = args == null ? 0 : args.length;
String cacheKey = jacksonExpandProperties.getCachePrefix() + ":" + beanName + ":" + method + ":%s:" + paramsHandler.getCacheKey(bindData, args);
Object result = getCacheInfo(cacheKey);
if (result != null) {
LOG.info("{} Expand cache 命中: {}", beanName, result);
gen.writeObject(result);
return;
}
StampedLock lock = lockCache.get(cacheKey, new StampedLock(), Duration.ofSeconds(300));
// 写锁避免同一业务ID重复查询
long stamp = lock.writeLock();
Integer cacheTime = params.getCacheTime();
try {
// 多参数组装
Object[] objectParams = new Object[argsLength + 1];
objectParams[0] = paramsHandler.handleVal(bindData);
if(objectParams.length > 1){
System.arraycopy(args, 0, objectParams, 1, argsLength);
}
// 请求翻译结果
Object loadResult = ReflectUtil.invoke(loadService, method, objectParams);
if (loadResult != null) {
result = this.responseHandler.handle(this.beanName, method, loadResult, writeClass, objectParams);
cache.put(String.format(cacheKey, OK), result, Duration.ofSeconds(cacheTime));
} else {
LOG.error("【{}】 Expand失败,未找到:{}", beanName, bindData);
cache.put(String.format(cacheKey, FAIL), bindData, Duration.ofSeconds(cacheTime));
result = bindData;
}
} catch (Exception e) {
LOG.error("【{}】 Expand异常:", beanName, e);
cache.put(String.format(cacheKey, FAIL), bindData, Duration.ofSeconds(cacheTime));
result = bindData;
} finally {
lock.unlockWrite(stamp);
}
gen.writeObject(result);
}
/**
* 获取当前字段的path路径
*
* @param outputContext
* @return
*/
private String getFieldPath(JsonStreamContext outputContext) {
List<String> path = new ArrayList<>(4);
while (outputContext != null) {
String currentName = outputContext.getCurrentName();
if (StringUtils.hasText(currentName)) {
path.add(currentName);
}
outputContext = outputContext.getParent();
}
Collections.reverse(path);
return String.join(".", path);
}
/**
* 获取厍信息
*
* @param cacheKey 缓存的KEY
* @return
*/
private Object getCacheInfo(String cacheKey) {
Object result = | cache.get(String.format(cacheKey, OK)); |
if (result == null) {
result = cache.get(String.format(cacheKey, FAIL));
}
return result;
}
@Override
public JsonSerializer<?> createContextual(SerializerProvider prov, BeanProperty property) throws JsonMappingException {
if (property != null) {
Expand load = property.getAnnotation(Expand.class);
if (load == null) {
throw new RuntimeException("未注解相关 @Expand 注解");
}
String bean = load.bean();
Class<? extends ParamsHandler> paramsHandlerClass = load.paramsHandler();
Class<? extends ResponseHandler> responseHandlerClass = load.responseHandler();
String method = load.method();
try {
ParamsHandler paramsHandler = paramsHandlerClass.getDeclaredConstructor().newInstance();
ResponseHandler responseHandler = responseHandlerClass.getDeclaredConstructor().newInstance();
int cacheTime = load.cacheTime();
// 额外参数处理
SerializerParam params = paramsHandler.handleAnnotation(property);
// 参数处理器没设置,且父注设置了,以父注解为主
if (params.getCacheTime() == null && cacheTime != -1) {
params.setCacheTime(cacheTime);
}
// 缓存时间未设置,取默认
if (params.getCacheTime() == null) {
params.setCacheTime(jacksonExpandProperties.getCacheTimeout());
}
if (params.isOpen() == null) {
params.setExpand(load.expand());
}
return new ExpandSerializer(bean, method, params, paramsHandler, responseHandler);
} catch (Exception e) {
LOG.error("@Expand error: ", e);
}
}
return prov.findNullValueSerializer(null);
}
/**
* 获取展开参数
*
* @param key
* @return
*/
private Set<String> getParam(String key) {
RequestAttributes attributes = RequestContextHolder.getRequestAttributes();
if (attributes == null) {
return Collections.emptySet();
}
ServletRequest request = ((ServletRequestAttributes) attributes).getRequest();
String[] parameterValues = request.getParameterValues(key);
if (parameterValues == null) {
return Collections.emptySet();
}
return Arrays.stream(parameterValues).flatMap(o -> Arrays.stream(o.split(",")))
.collect(Collectors.toSet());
}
}
| src/main/java/com/github/stupdit1t/jackson/expand/serializer/ExpandSerializer.java | stupdit1t-jackson-expand-boot-starter-b492b74 | [
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/cache/ExpandCache.java",
"retrieved_chunk": " key.add(String.valueOf(bindData));\n for (Object subVal : annotationVal) {\n key.add(String.valueOf(subVal));\n }\n String cacheKey = properties.getCachePrefix() + \":\" + beanName + \":\" + method + \":%s:\" + key.toString();\n delete(String.format(cacheKey, ExpandSerializer.OK));\n delete(String.format(cacheKey, ExpandSerializer.FAIL));\n }\n /**\n * 模糊匹配key",
"score": 24.451721750852048
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/cache/LocalCache.java",
"retrieved_chunk": " }\n public <T> T get(String cacheKey, T value, Duration timeout) {\n T val = (T) cacheMap.computeIfAbsent(cacheKey, (key) -> {\n scheduleExpiration(cacheKey, timeout.toMillis());\n return value;\n });\n return val;\n }\n @Override\n public Set<String> keys(String pattern) {",
"score": 18.194081190394343
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/util/ReflectUtil.java",
"retrieved_chunk": " *\n * @param obj\n * @param methodName\n * @param args\n * @return\n */\n public static Object invoke(Object obj, String methodName, Object[] args) throws InvocationTargetException, IllegalAccessException {\n String cacheKey = obj.getClass().getName() + methodName;\n final Method method = METHODS_CACHE.computeIfAbsent(cacheKey, (key) -> getMethod(obj.getClass(), methodName, args));\n if (null == method) {",
"score": 17.280994326885587
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/util/ReflectUtil.java",
"retrieved_chunk": " private static Class<?> unWrap(Class<?> clazz) {\n if (null == clazz || clazz.isPrimitive()) {\n return clazz;\n }\n Class<?> result = WRAPPER_PRIMITIVE_MAP.get(clazz);\n return (null == result) ? clazz : result;\n }\n}",
"score": 11.901504778740316
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/handler/rsp/DefaultResponseHandler.java",
"retrieved_chunk": " * @param rsp 当前返回值\n * @param toClass 要填充字段的类型\n * @param params 当前方法参数\n * @return\n */\n @Override\n public Object handle(String bean, String method, Object rsp, Class<?> toClass, Object... params) {\n return rsp;\n }\n}",
"score": 8.270539078772453
}
] | java | cache.get(String.format(cacheKey, OK)); |
package com.github.stupdit1t.jackson.expand.serializer;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonStreamContext;
import com.fasterxml.jackson.databind.BeanProperty;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.ser.ContextualSerializer;
import com.github.stupdit1t.jackson.expand.annotation.Expand;
import com.github.stupdit1t.jackson.expand.cache.ExpandCache;
import com.github.stupdit1t.jackson.expand.cache.LocalCache;
import com.github.stupdit1t.jackson.expand.config.JacksonExpandProperties;
import com.github.stupdit1t.jackson.expand.domain.ExpandStrategy;
import com.github.stupdit1t.jackson.expand.domain.SerializerParam;
import com.github.stupdit1t.jackson.expand.handler.params.ParamsHandler;
import com.github.stupdit1t.jackson.expand.handler.rsp.ResponseHandler;
import com.github.stupdit1t.jackson.expand.util.ReflectUtil;
import com.github.stupdit1t.jackson.expand.util.SpringUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.ReflectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.web.context.request.RequestAttributes;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import javax.servlet.ServletRequest;
import java.io.IOException;
import java.lang.reflect.Field;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.locks.StampedLock;
import java.util.stream.Collectors;
public class ExpandSerializer extends JsonSerializer<Object> implements ContextualSerializer {
private static final Logger LOG = LoggerFactory.getLogger(ExpandSerializer.class);
/**
* 成功数据
*/
public static final String OK = "OK";
/**
* 失败数据
*/
public static final String FAIL = "FAIL";
/**
* 缓存
*/
private static ExpandCache cache;
/**
* 配置
*/
private static JacksonExpandProperties jacksonExpandProperties;
/**
* 本地锁缓存,防止同时查询
*/
private static final LocalCache lockCache = new LocalCache();
/**
* 远程调用服务
*/
private Object loadService;
/**
* 方法
*/
private String method;
/**
* 注解参数处理
*/
private SerializerParam params;
/**
* 返回结果处理类
*/
private ParamsHandler paramsHandler;
/**
* 返回结果处理类
*/
private ResponseHandler responseHandler;
/**
* bean名称
*/
private String beanName;
public ExpandSerializer() {
super();
if (cache == null) {
synchronized (ExpandSerializer.class) {
if (cache == null) {
cache = SpringUtil.getBean(ExpandCache.class);
jacksonExpandProperties = SpringUtil.getBean(JacksonExpandProperties.class);
}
}
}
}
public ExpandSerializer(String beanName, String method, SerializerParam params, ParamsHandler paramsHandler, ResponseHandler otherResponseHandler) {
this();
this.loadService = SpringUtil.getBean(beanName);
this.method = method;
this.params = params;
this.responseHandler = otherResponseHandler;
this.paramsHandler = paramsHandler;
this.beanName = beanName;
}
@Override
public void serialize(Object bindData, JsonGenerator gen, SerializerProvider serializers) throws IOException {
String writeFieldPath = getFieldPath(gen.getOutputContext());
// 统一path替换
String dynamicExpandCommonPrefix = jacksonExpandProperties.getDynamicExpandCommonPrefix();
if (StringUtils.hasText(dynamicExpandCommonPrefix) && writeFieldPath.startsWith(dynamicExpandCommonPrefix)) {
writeFieldPath = writeFieldPath.substring(dynamicExpandCommonPrefix.length() + 1);
}
// 是否展开
boolean expand;
// 动态展开开启,判断是否展开
boolean dynamicExpand = jacksonExpandProperties.isDynamicExpand();
if (dynamicExpand) {
Set<String> needExpandField = getParam(jacksonExpandProperties.getDynamicExpandParameterName());
// 如果代码里设置不展开,动态展开也不生效
expand = needExpandField.contains(writeFieldPath) && params.isOpen();
} else {
expand = params.isOpen();
}
if (!expand) {
gen.writeObject(bindData);
return;
}
// 判断要写入的字段
String writeField = gen.getOutputContext().getCurrentName();
if (jacksonExpandProperties.getExpandStrategy() == ExpandStrategy.COVER) {
writeField = gen.getOutputContext().getCurrentName();
} else if (jacksonExpandProperties.getExpandStrategy() == ExpandStrategy.COPY) {
writeField = String.format(jacksonExpandProperties.getCopyStrategyFormat(), gen.getOutputContext().getCurrentName());
}
// 自定义要写入的优先级最高
if (StringUtils.hasText(params.getWriteField())) {
writeField = params.getWriteField();
}
// 设置理论上的响应类型,要不要使用取决于 ResponseHandler 要不要处理,比如只能写入数据对象存在的对象,默认是忽略存不存在
Class<?> writeClass = null;
if (params.getWriteField() != null && StringUtils.hasText(params.getWriteField())) {
Field field = ReflectionUtils.findField(gen.getCurrentValue().getClass(), params.getWriteField());
if (field != null) {
writeClass = field.getType();
}
}
// 关闭不存在字段扩展,被写入的字段类型找不到,不扩展
if (!jacksonExpandProperties.isCanExpandToNotExistField() && writeClass == null) {
gen.writeObject(bindData);
return;
}
// 翻译为非当前字段,先写入当前字段值再翻译
boolean currField = gen.getOutputContext().getCurrentName().equals(writeField);
if (!currField) {
gen.writeObject(bindData);
gen.writeFieldName(writeField);
}
if (bindData == null || loadService == null) {
gen.writeObject(bindData);
return;
}
// 获取缓存KEY
Object[] args = params.getRemoteParams();
int argsLength = args == null ? 0 : args.length;
String cacheKey = jacksonExpandProperties.getCachePrefix() + ":" + beanName + ":" + method + ":%s:" + paramsHandler.getCacheKey(bindData, args);
Object result = getCacheInfo(cacheKey);
if (result != null) {
LOG.info("{} Expand cache 命中: {}", beanName, result);
gen.writeObject(result);
return;
}
StampedLock | lock = lockCache.get(cacheKey, new StampedLock(), Duration.ofSeconds(300)); |
// 写锁避免同一业务ID重复查询
long stamp = lock.writeLock();
Integer cacheTime = params.getCacheTime();
try {
// 多参数组装
Object[] objectParams = new Object[argsLength + 1];
objectParams[0] = paramsHandler.handleVal(bindData);
if(objectParams.length > 1){
System.arraycopy(args, 0, objectParams, 1, argsLength);
}
// 请求翻译结果
Object loadResult = ReflectUtil.invoke(loadService, method, objectParams);
if (loadResult != null) {
result = this.responseHandler.handle(this.beanName, method, loadResult, writeClass, objectParams);
cache.put(String.format(cacheKey, OK), result, Duration.ofSeconds(cacheTime));
} else {
LOG.error("【{}】 Expand失败,未找到:{}", beanName, bindData);
cache.put(String.format(cacheKey, FAIL), bindData, Duration.ofSeconds(cacheTime));
result = bindData;
}
} catch (Exception e) {
LOG.error("【{}】 Expand异常:", beanName, e);
cache.put(String.format(cacheKey, FAIL), bindData, Duration.ofSeconds(cacheTime));
result = bindData;
} finally {
lock.unlockWrite(stamp);
}
gen.writeObject(result);
}
/**
* 获取当前字段的path路径
*
* @param outputContext
* @return
*/
private String getFieldPath(JsonStreamContext outputContext) {
List<String> path = new ArrayList<>(4);
while (outputContext != null) {
String currentName = outputContext.getCurrentName();
if (StringUtils.hasText(currentName)) {
path.add(currentName);
}
outputContext = outputContext.getParent();
}
Collections.reverse(path);
return String.join(".", path);
}
/**
* 获取厍信息
*
* @param cacheKey 缓存的KEY
* @return
*/
private Object getCacheInfo(String cacheKey) {
Object result = cache.get(String.format(cacheKey, OK));
if (result == null) {
result = cache.get(String.format(cacheKey, FAIL));
}
return result;
}
@Override
public JsonSerializer<?> createContextual(SerializerProvider prov, BeanProperty property) throws JsonMappingException {
if (property != null) {
Expand load = property.getAnnotation(Expand.class);
if (load == null) {
throw new RuntimeException("未注解相关 @Expand 注解");
}
String bean = load.bean();
Class<? extends ParamsHandler> paramsHandlerClass = load.paramsHandler();
Class<? extends ResponseHandler> responseHandlerClass = load.responseHandler();
String method = load.method();
try {
ParamsHandler paramsHandler = paramsHandlerClass.getDeclaredConstructor().newInstance();
ResponseHandler responseHandler = responseHandlerClass.getDeclaredConstructor().newInstance();
int cacheTime = load.cacheTime();
// 额外参数处理
SerializerParam params = paramsHandler.handleAnnotation(property);
// 参数处理器没设置,且父注设置了,以父注解为主
if (params.getCacheTime() == null && cacheTime != -1) {
params.setCacheTime(cacheTime);
}
// 缓存时间未设置,取默认
if (params.getCacheTime() == null) {
params.setCacheTime(jacksonExpandProperties.getCacheTimeout());
}
if (params.isOpen() == null) {
params.setExpand(load.expand());
}
return new ExpandSerializer(bean, method, params, paramsHandler, responseHandler);
} catch (Exception e) {
LOG.error("@Expand error: ", e);
}
}
return prov.findNullValueSerializer(null);
}
/**
* 获取展开参数
*
* @param key
* @return
*/
private Set<String> getParam(String key) {
RequestAttributes attributes = RequestContextHolder.getRequestAttributes();
if (attributes == null) {
return Collections.emptySet();
}
ServletRequest request = ((ServletRequestAttributes) attributes).getRequest();
String[] parameterValues = request.getParameterValues(key);
if (parameterValues == null) {
return Collections.emptySet();
}
return Arrays.stream(parameterValues).flatMap(o -> Arrays.stream(o.split(",")))
.collect(Collectors.toSet());
}
}
| src/main/java/com/github/stupdit1t/jackson/expand/serializer/ExpandSerializer.java | stupdit1t-jackson-expand-boot-starter-b492b74 | [
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/util/ReflectUtil.java",
"retrieved_chunk": " private static Class<?> unWrap(Class<?> clazz) {\n if (null == clazz || clazz.isPrimitive()) {\n return clazz;\n }\n Class<?> result = WRAPPER_PRIMITIVE_MAP.get(clazz);\n return (null == result) ? clazz : result;\n }\n}",
"score": 36.55130525175843
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/util/ReflectUtil.java",
"retrieved_chunk": " *\n * @param obj\n * @param methodName\n * @param args\n * @return\n */\n public static Object invoke(Object obj, String methodName, Object[] args) throws InvocationTargetException, IllegalAccessException {\n String cacheKey = obj.getClass().getName() + methodName;\n final Method method = METHODS_CACHE.computeIfAbsent(cacheKey, (key) -> getMethod(obj.getClass(), methodName, args));\n if (null == method) {",
"score": 32.02790927361684
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/cache/ExpandCache.java",
"retrieved_chunk": " key.add(String.valueOf(bindData));\n for (Object subVal : annotationVal) {\n key.add(String.valueOf(subVal));\n }\n String cacheKey = properties.getCachePrefix() + \":\" + beanName + \":\" + method + \":%s:\" + key.toString();\n delete(String.format(cacheKey, ExpandSerializer.OK));\n delete(String.format(cacheKey, ExpandSerializer.FAIL));\n }\n /**\n * 模糊匹配key",
"score": 30.222859443111275
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/cache/LocalCache.java",
"retrieved_chunk": " }\n public <T> T get(String cacheKey, T value, Duration timeout) {\n T val = (T) cacheMap.computeIfAbsent(cacheKey, (key) -> {\n scheduleExpiration(cacheKey, timeout.toMillis());\n return value;\n });\n return val;\n }\n @Override\n public Set<String> keys(String pattern) {",
"score": 19.96529865689837
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/util/ReflectUtil.java",
"retrieved_chunk": " throw new UnsupportedOperationException(\"No such method: [\" + methodName + \"] from [\" + obj.getClass() + \"]\");\n }\n return method.invoke(obj, args);\n }\n /**\n * 获取反射方法\n *\n * @param beanClass\n * @param methodName\n * @param args",
"score": 18.365299403092358
}
] | java | lock = lockCache.get(cacheKey, new StampedLock(), Duration.ofSeconds(300)); |
package com.github.stupdit1t.jackson.expand.serializer;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonStreamContext;
import com.fasterxml.jackson.databind.BeanProperty;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.ser.ContextualSerializer;
import com.github.stupdit1t.jackson.expand.annotation.Expand;
import com.github.stupdit1t.jackson.expand.cache.ExpandCache;
import com.github.stupdit1t.jackson.expand.cache.LocalCache;
import com.github.stupdit1t.jackson.expand.config.JacksonExpandProperties;
import com.github.stupdit1t.jackson.expand.domain.ExpandStrategy;
import com.github.stupdit1t.jackson.expand.domain.SerializerParam;
import com.github.stupdit1t.jackson.expand.handler.params.ParamsHandler;
import com.github.stupdit1t.jackson.expand.handler.rsp.ResponseHandler;
import com.github.stupdit1t.jackson.expand.util.ReflectUtil;
import com.github.stupdit1t.jackson.expand.util.SpringUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.ReflectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.web.context.request.RequestAttributes;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import javax.servlet.ServletRequest;
import java.io.IOException;
import java.lang.reflect.Field;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.locks.StampedLock;
import java.util.stream.Collectors;
public class ExpandSerializer extends JsonSerializer<Object> implements ContextualSerializer {
private static final Logger LOG = LoggerFactory.getLogger(ExpandSerializer.class);
/**
* 成功数据
*/
public static final String OK = "OK";
/**
* 失败数据
*/
public static final String FAIL = "FAIL";
/**
* 缓存
*/
private static ExpandCache cache;
/**
* 配置
*/
private static JacksonExpandProperties jacksonExpandProperties;
/**
* 本地锁缓存,防止同时查询
*/
private static final LocalCache lockCache = new LocalCache();
/**
* 远程调用服务
*/
private Object loadService;
/**
* 方法
*/
private String method;
/**
* 注解参数处理
*/
private SerializerParam params;
/**
* 返回结果处理类
*/
private ParamsHandler paramsHandler;
/**
* 返回结果处理类
*/
private ResponseHandler responseHandler;
/**
* bean名称
*/
private String beanName;
public ExpandSerializer() {
super();
if (cache == null) {
synchronized (ExpandSerializer.class) {
if (cache == null) {
cache = SpringUtil.getBean(ExpandCache.class);
jacksonExpandProperties = SpringUtil.getBean(JacksonExpandProperties.class);
}
}
}
}
public ExpandSerializer(String beanName, String method, SerializerParam params, ParamsHandler paramsHandler, ResponseHandler otherResponseHandler) {
this();
this.loadService = SpringUtil.getBean(beanName);
this.method = method;
this.params = params;
this.responseHandler = otherResponseHandler;
this.paramsHandler = paramsHandler;
this.beanName = beanName;
}
@Override
public void serialize(Object bindData, JsonGenerator gen, SerializerProvider serializers) throws IOException {
String writeFieldPath = getFieldPath(gen.getOutputContext());
// 统一path替换
String dynamicExpandCommonPrefix = jacksonExpandProperties.getDynamicExpandCommonPrefix();
if (StringUtils.hasText(dynamicExpandCommonPrefix) && writeFieldPath.startsWith(dynamicExpandCommonPrefix)) {
writeFieldPath = writeFieldPath.substring(dynamicExpandCommonPrefix.length() + 1);
}
// 是否展开
boolean expand;
// 动态展开开启,判断是否展开
boolean dynamicExpand = jacksonExpandProperties.isDynamicExpand();
if (dynamicExpand) {
Set<String> needExpandField = getParam(jacksonExpandProperties.getDynamicExpandParameterName());
// 如果代码里设置不展开,动态展开也不生效
expand = needExpandField.contains(writeFieldPath) && params.isOpen();
} else {
expand = params.isOpen();
}
if (!expand) {
gen.writeObject(bindData);
return;
}
// 判断要写入的字段
String writeField = gen.getOutputContext().getCurrentName();
if (jacksonExpandProperties.getExpandStrategy() == ExpandStrategy.COVER) {
writeField = gen.getOutputContext().getCurrentName();
} else if (jacksonExpandProperties.getExpandStrategy() == ExpandStrategy.COPY) {
writeField = String. | format(jacksonExpandProperties.getCopyStrategyFormat(), gen.getOutputContext().getCurrentName()); |
}
// 自定义要写入的优先级最高
if (StringUtils.hasText(params.getWriteField())) {
writeField = params.getWriteField();
}
// 设置理论上的响应类型,要不要使用取决于 ResponseHandler 要不要处理,比如只能写入数据对象存在的对象,默认是忽略存不存在
Class<?> writeClass = null;
if (params.getWriteField() != null && StringUtils.hasText(params.getWriteField())) {
Field field = ReflectionUtils.findField(gen.getCurrentValue().getClass(), params.getWriteField());
if (field != null) {
writeClass = field.getType();
}
}
// 关闭不存在字段扩展,被写入的字段类型找不到,不扩展
if (!jacksonExpandProperties.isCanExpandToNotExistField() && writeClass == null) {
gen.writeObject(bindData);
return;
}
// 翻译为非当前字段,先写入当前字段值再翻译
boolean currField = gen.getOutputContext().getCurrentName().equals(writeField);
if (!currField) {
gen.writeObject(bindData);
gen.writeFieldName(writeField);
}
if (bindData == null || loadService == null) {
gen.writeObject(bindData);
return;
}
// 获取缓存KEY
Object[] args = params.getRemoteParams();
int argsLength = args == null ? 0 : args.length;
String cacheKey = jacksonExpandProperties.getCachePrefix() + ":" + beanName + ":" + method + ":%s:" + paramsHandler.getCacheKey(bindData, args);
Object result = getCacheInfo(cacheKey);
if (result != null) {
LOG.info("{} Expand cache 命中: {}", beanName, result);
gen.writeObject(result);
return;
}
StampedLock lock = lockCache.get(cacheKey, new StampedLock(), Duration.ofSeconds(300));
// 写锁避免同一业务ID重复查询
long stamp = lock.writeLock();
Integer cacheTime = params.getCacheTime();
try {
// 多参数组装
Object[] objectParams = new Object[argsLength + 1];
objectParams[0] = paramsHandler.handleVal(bindData);
if(objectParams.length > 1){
System.arraycopy(args, 0, objectParams, 1, argsLength);
}
// 请求翻译结果
Object loadResult = ReflectUtil.invoke(loadService, method, objectParams);
if (loadResult != null) {
result = this.responseHandler.handle(this.beanName, method, loadResult, writeClass, objectParams);
cache.put(String.format(cacheKey, OK), result, Duration.ofSeconds(cacheTime));
} else {
LOG.error("【{}】 Expand失败,未找到:{}", beanName, bindData);
cache.put(String.format(cacheKey, FAIL), bindData, Duration.ofSeconds(cacheTime));
result = bindData;
}
} catch (Exception e) {
LOG.error("【{}】 Expand异常:", beanName, e);
cache.put(String.format(cacheKey, FAIL), bindData, Duration.ofSeconds(cacheTime));
result = bindData;
} finally {
lock.unlockWrite(stamp);
}
gen.writeObject(result);
}
/**
* 获取当前字段的path路径
*
* @param outputContext
* @return
*/
private String getFieldPath(JsonStreamContext outputContext) {
List<String> path = new ArrayList<>(4);
while (outputContext != null) {
String currentName = outputContext.getCurrentName();
if (StringUtils.hasText(currentName)) {
path.add(currentName);
}
outputContext = outputContext.getParent();
}
Collections.reverse(path);
return String.join(".", path);
}
/**
* 获取厍信息
*
* @param cacheKey 缓存的KEY
* @return
*/
private Object getCacheInfo(String cacheKey) {
Object result = cache.get(String.format(cacheKey, OK));
if (result == null) {
result = cache.get(String.format(cacheKey, FAIL));
}
return result;
}
@Override
public JsonSerializer<?> createContextual(SerializerProvider prov, BeanProperty property) throws JsonMappingException {
if (property != null) {
Expand load = property.getAnnotation(Expand.class);
if (load == null) {
throw new RuntimeException("未注解相关 @Expand 注解");
}
String bean = load.bean();
Class<? extends ParamsHandler> paramsHandlerClass = load.paramsHandler();
Class<? extends ResponseHandler> responseHandlerClass = load.responseHandler();
String method = load.method();
try {
ParamsHandler paramsHandler = paramsHandlerClass.getDeclaredConstructor().newInstance();
ResponseHandler responseHandler = responseHandlerClass.getDeclaredConstructor().newInstance();
int cacheTime = load.cacheTime();
// 额外参数处理
SerializerParam params = paramsHandler.handleAnnotation(property);
// 参数处理器没设置,且父注设置了,以父注解为主
if (params.getCacheTime() == null && cacheTime != -1) {
params.setCacheTime(cacheTime);
}
// 缓存时间未设置,取默认
if (params.getCacheTime() == null) {
params.setCacheTime(jacksonExpandProperties.getCacheTimeout());
}
if (params.isOpen() == null) {
params.setExpand(load.expand());
}
return new ExpandSerializer(bean, method, params, paramsHandler, responseHandler);
} catch (Exception e) {
LOG.error("@Expand error: ", e);
}
}
return prov.findNullValueSerializer(null);
}
/**
* 获取展开参数
*
* @param key
* @return
*/
private Set<String> getParam(String key) {
RequestAttributes attributes = RequestContextHolder.getRequestAttributes();
if (attributes == null) {
return Collections.emptySet();
}
ServletRequest request = ((ServletRequestAttributes) attributes).getRequest();
String[] parameterValues = request.getParameterValues(key);
if (parameterValues == null) {
return Collections.emptySet();
}
return Arrays.stream(parameterValues).flatMap(o -> Arrays.stream(o.split(",")))
.collect(Collectors.toSet());
}
}
| src/main/java/com/github/stupdit1t/jackson/expand/serializer/ExpandSerializer.java | stupdit1t-jackson-expand-boot-starter-b492b74 | [
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/config/JacksonExpandProperties.java",
"retrieved_chunk": " this.dynamicExpandParameterName = dynamicExpandParameterName;\n }\n public ExpandStrategy getExpandStrategy() {\n return expandStrategy;\n }\n public void setExpandStrategy(ExpandStrategy expandStrategy) {\n this.expandStrategy = expandStrategy;\n }\n public String getCopyStrategyFormat() {\n return copyStrategyFormat;",
"score": 22.354486967157044
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/config/JacksonExpandConfigure.java",
"retrieved_chunk": " }\n /**\n * jackson 配置\n *\n * @return\n */\n @Bean\n public JacksonExpandProperties jacksonExpandProperties() {\n return new JacksonExpandProperties();\n }",
"score": 17.2712782354109
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/domain/SerializerParam.java",
"retrieved_chunk": " public void setWriteField(String writeField) {\n this.writeField = writeField;\n }\n public Integer getCacheTime() {\n return cacheTime;\n }\n public void setCacheTime(Integer cacheTime) {\n this.cacheTime = cacheTime;\n }\n public Boolean isOpen() {",
"score": 16.40621740292572
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/domain/SerializerParam.java",
"retrieved_chunk": " * 写入字段\n */\n private String writeField;\n /**\n * 值缓存时间\n */\n private Integer cacheTime;\n /**\n * 是否展开\n */",
"score": 13.287430441593258
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/config/JacksonExpandProperties.java",
"retrieved_chunk": " */\n private String dynamicExpandParameterName = \"expand\";\n /**\n * 动态展开 统一数据的Path前缀,比如前缀是 data.body. 如果配置 expand=userId, 相当于是expnad=data.body.userId, 默认无\n */\n private String dynamicExpandCommonPrefix;\n /**\n * 展开策略, 默认覆盖\n */\n private ExpandStrategy expandStrategy = ExpandStrategy.COVER;",
"score": 11.462891458218719
}
] | java | format(jacksonExpandProperties.getCopyStrategyFormat(), gen.getOutputContext().getCurrentName()); |
package com.github.stupdit1t.jackson.expand.serializer;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonStreamContext;
import com.fasterxml.jackson.databind.BeanProperty;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.ser.ContextualSerializer;
import com.github.stupdit1t.jackson.expand.annotation.Expand;
import com.github.stupdit1t.jackson.expand.cache.ExpandCache;
import com.github.stupdit1t.jackson.expand.cache.LocalCache;
import com.github.stupdit1t.jackson.expand.config.JacksonExpandProperties;
import com.github.stupdit1t.jackson.expand.domain.ExpandStrategy;
import com.github.stupdit1t.jackson.expand.domain.SerializerParam;
import com.github.stupdit1t.jackson.expand.handler.params.ParamsHandler;
import com.github.stupdit1t.jackson.expand.handler.rsp.ResponseHandler;
import com.github.stupdit1t.jackson.expand.util.ReflectUtil;
import com.github.stupdit1t.jackson.expand.util.SpringUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.ReflectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.web.context.request.RequestAttributes;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import javax.servlet.ServletRequest;
import java.io.IOException;
import java.lang.reflect.Field;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.locks.StampedLock;
import java.util.stream.Collectors;
public class ExpandSerializer extends JsonSerializer<Object> implements ContextualSerializer {
private static final Logger LOG = LoggerFactory.getLogger(ExpandSerializer.class);
/**
* 成功数据
*/
public static final String OK = "OK";
/**
* 失败数据
*/
public static final String FAIL = "FAIL";
/**
* 缓存
*/
private static ExpandCache cache;
/**
* 配置
*/
private static JacksonExpandProperties jacksonExpandProperties;
/**
* 本地锁缓存,防止同时查询
*/
private static final LocalCache lockCache = new LocalCache();
/**
* 远程调用服务
*/
private Object loadService;
/**
* 方法
*/
private String method;
/**
* 注解参数处理
*/
private SerializerParam params;
/**
* 返回结果处理类
*/
private ParamsHandler paramsHandler;
/**
* 返回结果处理类
*/
private ResponseHandler responseHandler;
/**
* bean名称
*/
private String beanName;
public ExpandSerializer() {
super();
if (cache == null) {
synchronized (ExpandSerializer.class) {
if (cache == null) {
cache = SpringUtil.getBean(ExpandCache.class);
jacksonExpandProperties = SpringUtil.getBean(JacksonExpandProperties.class);
}
}
}
}
public ExpandSerializer(String beanName, String method, SerializerParam params, ParamsHandler paramsHandler, ResponseHandler otherResponseHandler) {
this();
this.loadService = SpringUtil.getBean(beanName);
this.method = method;
this.params = params;
this.responseHandler = otherResponseHandler;
this.paramsHandler = paramsHandler;
this.beanName = beanName;
}
@Override
public void serialize(Object bindData, JsonGenerator gen, SerializerProvider serializers) throws IOException {
String writeFieldPath = getFieldPath(gen.getOutputContext());
// 统一path替换
String dynamicExpandCommonPrefix = jacksonExpandProperties.getDynamicExpandCommonPrefix();
if (StringUtils.hasText(dynamicExpandCommonPrefix) && writeFieldPath.startsWith(dynamicExpandCommonPrefix)) {
writeFieldPath = writeFieldPath.substring(dynamicExpandCommonPrefix.length() + 1);
}
// 是否展开
boolean expand;
// 动态展开开启,判断是否展开
boolean dynamicExpand = jacksonExpandProperties.isDynamicExpand();
if (dynamicExpand) {
Set<String> needExpandField = getParam | (jacksonExpandProperties.getDynamicExpandParameterName()); |
// 如果代码里设置不展开,动态展开也不生效
expand = needExpandField.contains(writeFieldPath) && params.isOpen();
} else {
expand = params.isOpen();
}
if (!expand) {
gen.writeObject(bindData);
return;
}
// 判断要写入的字段
String writeField = gen.getOutputContext().getCurrentName();
if (jacksonExpandProperties.getExpandStrategy() == ExpandStrategy.COVER) {
writeField = gen.getOutputContext().getCurrentName();
} else if (jacksonExpandProperties.getExpandStrategy() == ExpandStrategy.COPY) {
writeField = String.format(jacksonExpandProperties.getCopyStrategyFormat(), gen.getOutputContext().getCurrentName());
}
// 自定义要写入的优先级最高
if (StringUtils.hasText(params.getWriteField())) {
writeField = params.getWriteField();
}
// 设置理论上的响应类型,要不要使用取决于 ResponseHandler 要不要处理,比如只能写入数据对象存在的对象,默认是忽略存不存在
Class<?> writeClass = null;
if (params.getWriteField() != null && StringUtils.hasText(params.getWriteField())) {
Field field = ReflectionUtils.findField(gen.getCurrentValue().getClass(), params.getWriteField());
if (field != null) {
writeClass = field.getType();
}
}
// 关闭不存在字段扩展,被写入的字段类型找不到,不扩展
if (!jacksonExpandProperties.isCanExpandToNotExistField() && writeClass == null) {
gen.writeObject(bindData);
return;
}
// 翻译为非当前字段,先写入当前字段值再翻译
boolean currField = gen.getOutputContext().getCurrentName().equals(writeField);
if (!currField) {
gen.writeObject(bindData);
gen.writeFieldName(writeField);
}
if (bindData == null || loadService == null) {
gen.writeObject(bindData);
return;
}
// 获取缓存KEY
Object[] args = params.getRemoteParams();
int argsLength = args == null ? 0 : args.length;
String cacheKey = jacksonExpandProperties.getCachePrefix() + ":" + beanName + ":" + method + ":%s:" + paramsHandler.getCacheKey(bindData, args);
Object result = getCacheInfo(cacheKey);
if (result != null) {
LOG.info("{} Expand cache 命中: {}", beanName, result);
gen.writeObject(result);
return;
}
StampedLock lock = lockCache.get(cacheKey, new StampedLock(), Duration.ofSeconds(300));
// 写锁避免同一业务ID重复查询
long stamp = lock.writeLock();
Integer cacheTime = params.getCacheTime();
try {
// 多参数组装
Object[] objectParams = new Object[argsLength + 1];
objectParams[0] = paramsHandler.handleVal(bindData);
if(objectParams.length > 1){
System.arraycopy(args, 0, objectParams, 1, argsLength);
}
// 请求翻译结果
Object loadResult = ReflectUtil.invoke(loadService, method, objectParams);
if (loadResult != null) {
result = this.responseHandler.handle(this.beanName, method, loadResult, writeClass, objectParams);
cache.put(String.format(cacheKey, OK), result, Duration.ofSeconds(cacheTime));
} else {
LOG.error("【{}】 Expand失败,未找到:{}", beanName, bindData);
cache.put(String.format(cacheKey, FAIL), bindData, Duration.ofSeconds(cacheTime));
result = bindData;
}
} catch (Exception e) {
LOG.error("【{}】 Expand异常:", beanName, e);
cache.put(String.format(cacheKey, FAIL), bindData, Duration.ofSeconds(cacheTime));
result = bindData;
} finally {
lock.unlockWrite(stamp);
}
gen.writeObject(result);
}
/**
* 获取当前字段的path路径
*
* @param outputContext
* @return
*/
private String getFieldPath(JsonStreamContext outputContext) {
List<String> path = new ArrayList<>(4);
while (outputContext != null) {
String currentName = outputContext.getCurrentName();
if (StringUtils.hasText(currentName)) {
path.add(currentName);
}
outputContext = outputContext.getParent();
}
Collections.reverse(path);
return String.join(".", path);
}
/**
* 获取厍信息
*
* @param cacheKey 缓存的KEY
* @return
*/
private Object getCacheInfo(String cacheKey) {
Object result = cache.get(String.format(cacheKey, OK));
if (result == null) {
result = cache.get(String.format(cacheKey, FAIL));
}
return result;
}
@Override
public JsonSerializer<?> createContextual(SerializerProvider prov, BeanProperty property) throws JsonMappingException {
if (property != null) {
Expand load = property.getAnnotation(Expand.class);
if (load == null) {
throw new RuntimeException("未注解相关 @Expand 注解");
}
String bean = load.bean();
Class<? extends ParamsHandler> paramsHandlerClass = load.paramsHandler();
Class<? extends ResponseHandler> responseHandlerClass = load.responseHandler();
String method = load.method();
try {
ParamsHandler paramsHandler = paramsHandlerClass.getDeclaredConstructor().newInstance();
ResponseHandler responseHandler = responseHandlerClass.getDeclaredConstructor().newInstance();
int cacheTime = load.cacheTime();
// 额外参数处理
SerializerParam params = paramsHandler.handleAnnotation(property);
// 参数处理器没设置,且父注设置了,以父注解为主
if (params.getCacheTime() == null && cacheTime != -1) {
params.setCacheTime(cacheTime);
}
// 缓存时间未设置,取默认
if (params.getCacheTime() == null) {
params.setCacheTime(jacksonExpandProperties.getCacheTimeout());
}
if (params.isOpen() == null) {
params.setExpand(load.expand());
}
return new ExpandSerializer(bean, method, params, paramsHandler, responseHandler);
} catch (Exception e) {
LOG.error("@Expand error: ", e);
}
}
return prov.findNullValueSerializer(null);
}
/**
* 获取展开参数
*
* @param key
* @return
*/
private Set<String> getParam(String key) {
RequestAttributes attributes = RequestContextHolder.getRequestAttributes();
if (attributes == null) {
return Collections.emptySet();
}
ServletRequest request = ((ServletRequestAttributes) attributes).getRequest();
String[] parameterValues = request.getParameterValues(key);
if (parameterValues == null) {
return Collections.emptySet();
}
return Arrays.stream(parameterValues).flatMap(o -> Arrays.stream(o.split(",")))
.collect(Collectors.toSet());
}
}
| src/main/java/com/github/stupdit1t/jackson/expand/serializer/ExpandSerializer.java | stupdit1t-jackson-expand-boot-starter-b492b74 | [
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/config/JacksonExpandProperties.java",
"retrieved_chunk": " public boolean isDynamicExpand() {\n return dynamicExpand;\n }\n public void setDynamicExpand(boolean dynamicExpand) {\n this.dynamicExpand = dynamicExpand;\n }\n public String getDynamicExpandParameterName() {\n return dynamicExpandParameterName;\n }\n public void setDynamicExpandParameterName(String dynamicExpandParameterName) {",
"score": 26.13933911812105
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/config/JacksonExpandProperties.java",
"retrieved_chunk": " public String getDynamicExpandCommonPrefix() {\n return dynamicExpandCommonPrefix;\n }\n public void setDynamicExpandCommonPrefix(String dynamicExpandCommonPrefix) {\n this.dynamicExpandCommonPrefix = dynamicExpandCommonPrefix;\n }\n}",
"score": 20.598548422608943
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/handler/params/DefaultParamsHandler.java",
"retrieved_chunk": " if (expand != null) {\n if (StringUtils.hasText(expand.to())) {\n params.setWriteField(expand.to());\n }\n params.setExpand(expand.expand());\n }\n return params;\n }\n}",
"score": 14.548314907038291
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/config/JacksonExpandProperties.java",
"retrieved_chunk": " * 是否要动态展开,如果true。则通过接口url传参进行展开,默认不展开。\n * 如果代码里设置不展开,动态展开也不生效\n * <p>\n * 如传参 /api/user?expand=userId,father.id\n * <p>\n * 则会展开\n */\n private boolean dynamicExpand;\n /**\n * 动态展开参数名字, URL 接受的参数",
"score": 11.869345753464636
},
{
"filename": "src/main/java/com/github/stupdit1t/jackson/expand/annotation/Expand.java",
"retrieved_chunk": " *\n * @return\n */\n int cacheTime() default -1;\n /**\n * 是否要展开\n *\n * @return\n */\n boolean expand() default true;",
"score": 11.33475855723348
}
] | java | (jacksonExpandProperties.getDynamicExpandParameterName()); |
package com.tati.controller;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import com.tati.model.YoutubeData;
import com.tati.service.YoutubeService;
import com.tati.utils.YoutubeAPI;
import java.util.List;
@CrossOrigin("*")
@RestController
public class YoutubeAPIController {
@Autowired
private YoutubeService youtubeService;
@PostMapping("/api/data")
public YoutubeData add(@RequestParam(name= "url") String url){
if (url != null) {
YoutubeAPI youtube = new YoutubeAPI();
List<String> videoData = youtube.getVideoData(url);
YoutubeData dataHolder= new YoutubeData();
dataHolder.setVideoTitle(videoData.get(0));
dataHolder.setPublishedAt(videoData.get(1));
dataHolder.setViewCount(videoData.get(2));
dataHolder.setLikeCount(videoData.get(3));
dataHolder.setCommentCount(videoData.get(4));
dataHolder.setEngagementRate(videoData.get(5));
dataHolder.setChannelTitle(videoData.get(6));
dataHolder.setVideoPopularTitle(videoData.get(7));
| dataHolder.setVideoPopularLink(videoData.get(8)); |
youtubeService.save(dataHolder);
return dataHolder;
} else {
return null;
}
}
}
| src/main/java/com/tati/controller/YoutubeAPIController.java | tati2002med-dashboarder-spring-mvc-api-61d5a07 | [
{
"filename": "src/main/java/com/tati/utils/YoutubeAPI.java",
"retrieved_chunk": "\t\t\tString videoPopularTitle = items1.get(0).getSnippet().getTitle();\n\t\t\tString videoPopularId = items1.get(0).getId().getVideoId();\n\t\t\tString videoLink = \"https://www.youtube.com/watch?v=\" + videoPopularId;\n\t\t\t// Title\n\t\t\tString channelTitle = snippet.getChannelTitle();\n\t\t\t// Video Data ----------------------------------\n\t\t\t// Video Title\n\t\t\tString videoTitle = snippet.getTitle();\n\t\t\t// published At\n\t\t\tString publishedTime = snippet.getPublishedAt().toString();",
"score": 34.200142112002
},
{
"filename": "src/main/java/com/tati/utils/YoutubeAPI.java",
"retrieved_chunk": "\t\t\tvideosListByIdRequest.setKey(API_KEY);\n\t\t\tvideosListByIdRequest.setId(videoId);\n\t\t\tVideo video = videosListByIdRequest.execute().getItems().get(0);\n\t\t\t// Get the video data\n\t\t\tVideoSnippet snippet = video.getSnippet();\n\t\t\tVideoStatistics statistics = video.getStatistics();\n\t\t\t// Channel Data ----------------------------------\n\t\t\t// Id\n\t\t\tString channelID = snippet.getChannelId();\n\t\t\t// Call the search().list method to retrieve the most popular video in the",
"score": 22.683997841596753
},
{
"filename": "src/main/java/com/tati/model/YoutubeData.java",
"retrieved_chunk": "\tpublic void setVideoPopularTitle(String videoPopularTitle) {\n\t\tthis.videoPopularTitle = videoPopularTitle;\n\t}\n\tpublic String getVideoPopularLink() {\n\t\treturn videoPopularLink;\n\t}\n\tpublic void setVideoPopularLink(String videoPopularLink) {\n\t\tthis.videoPopularLink = videoPopularLink;\n\t}\n\t@Override",
"score": 7.461516181326436
},
{
"filename": "src/main/java/com/tati/model/YoutubeData.java",
"retrieved_chunk": "\t\treturn commentCount;\n\t}\n\tpublic void setCommentCount(String commentCount) {\n\t\tthis.commentCount = commentCount;\n\t}\n\tpublic String getEngagementRate() {\n\t\treturn engagementRate;\n\t}\n\tpublic void setEngagementRate(String engagementRate) {\n\t\tthis.engagementRate = engagementRate;",
"score": 7.346120580525285
},
{
"filename": "src/main/java/com/tati/model/YoutubeData.java",
"retrieved_chunk": "\tpublic void setViewCount(String viewCount) {\n\t\tthis.viewCount = viewCount;\n\t}\n\tpublic String getLikeCount() {\n\t\treturn likeCount;\n\t}\n\tpublic void setLikeCount(String likeCount) {\n\t\tthis.likeCount = likeCount;\n\t}\n\tpublic String getCommentCount() {",
"score": 7.23423990640328
}
] | java | dataHolder.setVideoPopularLink(videoData.get(8)); |
/*
* Copyright Strimzi authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.strimzi.kafka.bridge.mqtt.core;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.handler.logging.LogLevel;
import io.netty.handler.logging.LoggingHandler;
import io.strimzi.kafka.bridge.mqtt.config.BridgeConfig;
import io.strimzi.kafka.bridge.mqtt.config.MqttConfig;
import io.strimzi.kafka.bridge.mqtt.kafka.KafkaBridgeProducer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Represents the MqttServer component.
*/
public class MqttServer {
private static final Logger logger = LoggerFactory.getLogger(MqttServer.class);
private final EventLoopGroup masterGroup;
private final EventLoopGroup workerGroup;
private final ServerBootstrap serverBootstrap;
private final MqttConfig mqttConfig;
private final KafkaBridgeProducer kafkaBridgeProducer;
private ChannelFuture channelFuture;
/**
* Constructor
*
* @param config MqttConfig instance with all configuration needed to run the server.
* @param masterGroup EventLoopGroup instance for handle incoming connections.
* @param workerGroup EventLoopGroup instance for processing I/O.
* @param option ChannelOption<Boolean> instance which allows to configure various channel options, such as SO_KEEPALIVE, SO_BACKLOG etc.
* @see BridgeConfig
* @see ChannelOption
*/
public MqttServer(BridgeConfig config, EventLoopGroup masterGroup, EventLoopGroup workerGroup, ChannelOption<Boolean> option) {
this.masterGroup = masterGroup;
this.workerGroup = workerGroup;
this.mqttConfig = config.getMqttConfig();
this.kafkaBridgeProducer = new KafkaBridgeProducer(config.getKafkaConfig());
this.serverBootstrap = new ServerBootstrap();
this.serverBootstrap.group(masterGroup, workerGroup)
.channel(NioServerSocketChannel.class)
.handler(new LoggingHandler(LogLevel.INFO))
.childHandler(new MqttServerInitializer(this.kafkaBridgeProducer))
.childOption(option, true);
}
/**
* Start the server.
*/
public void start() throws InterruptedException {
// bind the Netty server and wait synchronously
this.channelFuture = this.serverBootstrap.bind | (this.mqttConfig.getHost(), this.mqttConfig.getPort()).sync(); |
}
/**
* Stop the server.
*/
public void stop() throws InterruptedException {
logger.info("Shutting down Netty server...");
this.channelFuture.channel().close().sync();
this.channelFuture.channel().closeFuture().sync();
this.masterGroup.shutdownGracefully().sync();
this.workerGroup.shutdownGracefully().sync();
logger.info("Netty server shut down");
logger.info("Closing Kafka producers...");
this.kafkaBridgeProducer.close();
logger.info("Kafka producers closed");
}
}
| src/main/java/io/strimzi/kafka/bridge/mqtt/core/MqttServer.java | strimzi-strimzi-mqtt-bridge-cb0a4b8 | [
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/core/MqttServerInitializer.java",
"retrieved_chunk": "/**\n * This helper class help us add necessary Netty pipelines handlers. <br>\n * During the {@link #initChannel(SocketChannel)}, we use MqttDecoder() and MqttEncoder to decode and encode Mqtt messages respectively. <br>\n */\npublic class MqttServerInitializer extends ChannelInitializer<SocketChannel> {\n private final MqttServerHandler mqttServerHandler;\n public MqttServerInitializer(KafkaBridgeProducer kafkaBridgeProducer) {\n this.mqttServerHandler = new MqttServerHandler(kafkaBridgeProducer);\n }\n @Override",
"score": 30.52412664895244
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/MqttConfig.java",
"retrieved_chunk": " public int getPort() {\n return Integer.parseInt(this.config.getOrDefault(MqttConfig.MQTT_PORT, MqttConfig.DEFAULT_MQTT_PORT).toString());\n }\n /**\n * @return the MQTT server host\n */\n public String getHost() {\n return this.config.getOrDefault(MqttConfig.MQTT_HOST, MqttConfig.DEFAULT_MQTT_HOST).toString();\n }\n @Override",
"score": 30.426629058886988
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/BridgeConfig.java",
"retrieved_chunk": " \", mqttConfig=\" + this.mqttConfig +\n \", kafkaConfig=\" + this.kafkaConfig +\n \")\";\n }\n}",
"score": 27.988781856148478
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/BridgeConfig.java",
"retrieved_chunk": " }\n /**\n * @return the MQTT configuration properties\n */\n public MqttConfig getMqttConfig() {\n return this.mqttConfig;\n }\n /**\n * @return the bridge identification number\n */",
"score": 24.09188845497439
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/BridgeConfig.java",
"retrieved_chunk": " /**\n * Constructor\n *\n * @param config configuration parameters map\n * @param mqttConfig MQTT configuration properties\n * @param kafkaConfig Kafka configuration properties\n */\n public BridgeConfig(Map<String, Object> config, MqttConfig mqttConfig, KafkaConfig kafkaConfig) {\n super(config);\n this.mqttConfig = mqttConfig;",
"score": 22.20860764726067
}
] | java | (this.mqttConfig.getHost(), this.mqttConfig.getPort()).sync(); |
/*
* Copyright Strimzi authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.strimzi.kafka.bridge.mqtt.config;
import java.util.Map;
import java.util.stream.Collectors;
/**
* Represents the bridge configuration properties
*
* @see MqttConfig
* @see KafkaConfig
*/
public class BridgeConfig extends AbstractConfig {
// Prefix for all the specific configuration parameters for the bridge
public static final String BRIDGE_CONFIG_PREFIX = "bridge.";
// Bridge identification number
public static final String BRIDGE_ID = BRIDGE_CONFIG_PREFIX + "id";
private final MqttConfig mqttConfig;
private final KafkaConfig kafkaConfig;
/**
* Constructor
*
* @param config configuration parameters map
* @param mqttConfig MQTT configuration properties
* @param kafkaConfig Kafka configuration properties
*/
public BridgeConfig(Map<String, Object> config, MqttConfig mqttConfig, KafkaConfig kafkaConfig) {
super(config);
this.mqttConfig = mqttConfig;
this.kafkaConfig = kafkaConfig;
}
/**
* Build a bridge configuration object from a map of configuration parameters
*
* @param map configuration parameters map
* @return a new instance of BridgeConfig
*/
public static BridgeConfig fromMap(Map<String, Object> map) {
final MqttConfig | mqttConfig = MqttConfig.fromMap(map); |
final KafkaConfig kafkaConfig = KafkaConfig.fromMap(map);
return new BridgeConfig(map.entrySet().stream()
.filter(entry -> entry.getKey().startsWith(BridgeConfig.BRIDGE_CONFIG_PREFIX))
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)), mqttConfig, kafkaConfig);
}
/**
* @return the Kafka configuration properties
*/
public KafkaConfig getKafkaConfig() {
return this.kafkaConfig;
}
/**
* @return the MQTT configuration properties
*/
public MqttConfig getMqttConfig() {
return this.mqttConfig;
}
/**
* @return the bridge identification number
*/
public String getBridgeID() {
return this.config.get(BridgeConfig.BRIDGE_ID) == null ? null : this.config.get(BridgeConfig.BRIDGE_ID).toString();
}
/**
* @return the bridge configuration properties
*/
@Override
public String toString() {
return "BridgeConfig(" +
"config=" + this.config +
", mqttConfig=" + this.mqttConfig +
", kafkaConfig=" + this.kafkaConfig +
")";
}
}
| src/main/java/io/strimzi/kafka/bridge/mqtt/config/BridgeConfig.java | strimzi-strimzi-mqtt-bridge-cb0a4b8 | [
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/KafkaProducerConfig.java",
"retrieved_chunk": " super(config);\n }\n /**\n * Build a Kafka producer configuration object from a map of configuration parameters\n *\n * @param map configuration parameters map\n * @return a new instance of KafkaProducerConfig\n */\n public static KafkaProducerConfig fromMap(Map<String, Object> map) {\n return new KafkaProducerConfig(map.entrySet().stream()",
"score": 76.76391637668831
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/MqttConfig.java",
"retrieved_chunk": " *\n * @param config configuration parameters map\n */\n public MqttConfig(Map<String, Object> config) {\n super(config);\n }\n /**\n * Build a MQTT configuration object from a map of configuration parameters\n *\n * @param map configuration parameters map",
"score": 74.03650810882706
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/KafkaConfig.java",
"retrieved_chunk": " *\n * @param config configuration parameters map\n * @param kafkaProducerConfig Kafka producer configuration properties\n */\n public KafkaConfig(Map<String, Object> config, KafkaProducerConfig kafkaProducerConfig) {\n super(config);\n this.kafkaProducerConfig = kafkaProducerConfig;\n }\n /**\n * Build a Kafka configuration object from a map of configuration parameters",
"score": 60.557263959456236
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/MqttConfig.java",
"retrieved_chunk": " * @return a new instance of MqttConfig\n */\n public static MqttConfig fromMap(Map<String, Object> map) {\n return new MqttConfig(map.entrySet().stream()\n .filter(entry -> entry.getKey().startsWith(MqttConfig.MQTT_CONFIG_PREFIX))\n .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)));\n }\n /**\n * @return the MQTT server port\n */",
"score": 49.48416402424278
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/KafkaConfig.java",
"retrieved_chunk": " *\n * @param config configuration parameters map\n * @return a new instance of KafkaConfig\n */\n public static KafkaConfig fromMap(Map<String, Object> config) {\n final KafkaProducerConfig kafkaProducerConfig = KafkaProducerConfig.fromMap(config);\n return new KafkaConfig(config.entrySet().stream()\n .filter((entry -> entry.getKey().startsWith(KafkaConfig.KAFKA_CONFIG_PREFIX) &&\n !entry.getKey().startsWith(KafkaProducerConfig.KAFKA_PRODUCER_CONFIG_PREFIX)))\n .collect(Collectors.toMap((e) -> e.getKey().substring(KAFKA_CONFIG_PREFIX.length()), Map.Entry::getValue)), kafkaProducerConfig);",
"score": 47.28202170295927
}
] | java | mqttConfig = MqttConfig.fromMap(map); |
/*
* Copyright Strimzi authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.strimzi.kafka.bridge.mqtt.core;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.handler.logging.LogLevel;
import io.netty.handler.logging.LoggingHandler;
import io.strimzi.kafka.bridge.mqtt.config.BridgeConfig;
import io.strimzi.kafka.bridge.mqtt.config.MqttConfig;
import io.strimzi.kafka.bridge.mqtt.kafka.KafkaBridgeProducer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Represents the MqttServer component.
*/
public class MqttServer {
private static final Logger logger = LoggerFactory.getLogger(MqttServer.class);
private final EventLoopGroup masterGroup;
private final EventLoopGroup workerGroup;
private final ServerBootstrap serverBootstrap;
private final MqttConfig mqttConfig;
private final KafkaBridgeProducer kafkaBridgeProducer;
private ChannelFuture channelFuture;
/**
* Constructor
*
* @param config MqttConfig instance with all configuration needed to run the server.
* @param masterGroup EventLoopGroup instance for handle incoming connections.
* @param workerGroup EventLoopGroup instance for processing I/O.
* @param option ChannelOption<Boolean> instance which allows to configure various channel options, such as SO_KEEPALIVE, SO_BACKLOG etc.
* @see BridgeConfig
* @see ChannelOption
*/
public MqttServer(BridgeConfig config, EventLoopGroup masterGroup, EventLoopGroup workerGroup, ChannelOption<Boolean> option) {
this.masterGroup = masterGroup;
this.workerGroup = workerGroup;
this.mqttConfig = config.getMqttConfig();
this.kafkaBridgeProducer = new KafkaBridgeProducer | (config.getKafkaConfig()); |
this.serverBootstrap = new ServerBootstrap();
this.serverBootstrap.group(masterGroup, workerGroup)
.channel(NioServerSocketChannel.class)
.handler(new LoggingHandler(LogLevel.INFO))
.childHandler(new MqttServerInitializer(this.kafkaBridgeProducer))
.childOption(option, true);
}
/**
* Start the server.
*/
public void start() throws InterruptedException {
// bind the Netty server and wait synchronously
this.channelFuture = this.serverBootstrap.bind(this.mqttConfig.getHost(), this.mqttConfig.getPort()).sync();
}
/**
* Stop the server.
*/
public void stop() throws InterruptedException {
logger.info("Shutting down Netty server...");
this.channelFuture.channel().close().sync();
this.channelFuture.channel().closeFuture().sync();
this.masterGroup.shutdownGracefully().sync();
this.workerGroup.shutdownGracefully().sync();
logger.info("Netty server shut down");
logger.info("Closing Kafka producers...");
this.kafkaBridgeProducer.close();
logger.info("Kafka producers closed");
}
}
| src/main/java/io/strimzi/kafka/bridge/mqtt/core/MqttServer.java | strimzi-strimzi-mqtt-bridge-cb0a4b8 | [
{
"filename": "src/test/java/io/strimzi/kafka/bridge/mqtt/MqttBridgetIT.java",
"retrieved_chunk": " ));\n // prepare the mapping rules\n String mappingRulesPath = Objects.requireNonNull(MqttBridgetIT.class.getClassLoader().getResource(\"mapping-rules-regex.json\")).getPath();\n MappingRulesLoader.getInstance().init(mappingRulesPath);\n // start the MQTT bridge\n EventLoopGroup bossGroup = new NioEventLoopGroup();\n EventLoopGroup workerGroup = new NioEventLoopGroup();\n mqttBridge = new MqttServer(bridgeConfig, bossGroup, workerGroup, ChannelOption.SO_KEEPALIVE);\n mqttBridge.start();\n }",
"score": 50.81104624769955
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/Main.java",
"retrieved_chunk": " EventLoopGroup workerGroup = new NioEventLoopGroup();\n MqttServer mqttServer = new MqttServer(bridgeConfig, bossGroup, workerGroup, ChannelOption.SO_KEEPALIVE);\n CountDownLatch latch = new CountDownLatch(1);\n Runtime.getRuntime().addShutdownHook(new Thread(() -> {\n try {\n mqttServer.stop();\n } catch (Exception e) {\n logger.error(\"Error stopping the MQTT server: \", e);\n } finally {\n latch.countDown();",
"score": 50.67618779143445
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/Main.java",
"retrieved_chunk": "/*\n * Copyright Strimzi authors.\n * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).\n */\npackage io.strimzi.kafka.bridge.mqtt;\nimport io.netty.channel.ChannelOption;\nimport io.netty.channel.EventLoopGroup;\nimport io.netty.channel.nio.NioEventLoopGroup;\nimport io.strimzi.kafka.bridge.mqtt.config.BridgeConfig;\nimport io.strimzi.kafka.bridge.mqtt.config.ConfigRetriever;",
"score": 35.169004029199684
},
{
"filename": "src/test/java/io/strimzi/kafka/bridge/mqtt/MqttBridgetIT.java",
"retrieved_chunk": "/*\n * Copyright Strimzi authors.\n * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).\n */\npackage io.strimzi.kafka.bridge.mqtt;\nimport io.netty.channel.ChannelOption;\nimport io.netty.channel.EventLoopGroup;\nimport io.netty.channel.nio.NioEventLoopGroup;\nimport io.netty.handler.codec.mqtt.MqttQoS;\nimport io.strimzi.kafka.bridge.mqtt.config.BridgeConfig;",
"score": 33.46873228752629
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/BridgeConfig.java",
"retrieved_chunk": " /**\n * Constructor\n *\n * @param config configuration parameters map\n * @param mqttConfig MQTT configuration properties\n * @param kafkaConfig Kafka configuration properties\n */\n public BridgeConfig(Map<String, Object> config, MqttConfig mqttConfig, KafkaConfig kafkaConfig) {\n super(config);\n this.mqttConfig = mqttConfig;",
"score": 31.4950513671654
}
] | java | (config.getKafkaConfig()); |
/*
* Copyright Strimzi authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.strimzi.kafka.bridge.mqtt.core;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.handler.logging.LogLevel;
import io.netty.handler.logging.LoggingHandler;
import io.strimzi.kafka.bridge.mqtt.config.BridgeConfig;
import io.strimzi.kafka.bridge.mqtt.config.MqttConfig;
import io.strimzi.kafka.bridge.mqtt.kafka.KafkaBridgeProducer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Represents the MqttServer component.
*/
public class MqttServer {
private static final Logger logger = LoggerFactory.getLogger(MqttServer.class);
private final EventLoopGroup masterGroup;
private final EventLoopGroup workerGroup;
private final ServerBootstrap serverBootstrap;
private final MqttConfig mqttConfig;
private final KafkaBridgeProducer kafkaBridgeProducer;
private ChannelFuture channelFuture;
/**
* Constructor
*
* @param config MqttConfig instance with all configuration needed to run the server.
* @param masterGroup EventLoopGroup instance for handle incoming connections.
* @param workerGroup EventLoopGroup instance for processing I/O.
* @param option ChannelOption<Boolean> instance which allows to configure various channel options, such as SO_KEEPALIVE, SO_BACKLOG etc.
* @see BridgeConfig
* @see ChannelOption
*/
public MqttServer(BridgeConfig config, EventLoopGroup masterGroup, EventLoopGroup workerGroup, ChannelOption<Boolean> option) {
this.masterGroup = masterGroup;
this.workerGroup = workerGroup;
this.mqttConfig = | config.getMqttConfig(); |
this.kafkaBridgeProducer = new KafkaBridgeProducer(config.getKafkaConfig());
this.serverBootstrap = new ServerBootstrap();
this.serverBootstrap.group(masterGroup, workerGroup)
.channel(NioServerSocketChannel.class)
.handler(new LoggingHandler(LogLevel.INFO))
.childHandler(new MqttServerInitializer(this.kafkaBridgeProducer))
.childOption(option, true);
}
/**
* Start the server.
*/
public void start() throws InterruptedException {
// bind the Netty server and wait synchronously
this.channelFuture = this.serverBootstrap.bind(this.mqttConfig.getHost(), this.mqttConfig.getPort()).sync();
}
/**
* Stop the server.
*/
public void stop() throws InterruptedException {
logger.info("Shutting down Netty server...");
this.channelFuture.channel().close().sync();
this.channelFuture.channel().closeFuture().sync();
this.masterGroup.shutdownGracefully().sync();
this.workerGroup.shutdownGracefully().sync();
logger.info("Netty server shut down");
logger.info("Closing Kafka producers...");
this.kafkaBridgeProducer.close();
logger.info("Kafka producers closed");
}
}
| src/main/java/io/strimzi/kafka/bridge/mqtt/core/MqttServer.java | strimzi-strimzi-mqtt-bridge-cb0a4b8 | [
{
"filename": "src/test/java/io/strimzi/kafka/bridge/mqtt/MqttBridgetIT.java",
"retrieved_chunk": " ));\n // prepare the mapping rules\n String mappingRulesPath = Objects.requireNonNull(MqttBridgetIT.class.getClassLoader().getResource(\"mapping-rules-regex.json\")).getPath();\n MappingRulesLoader.getInstance().init(mappingRulesPath);\n // start the MQTT bridge\n EventLoopGroup bossGroup = new NioEventLoopGroup();\n EventLoopGroup workerGroup = new NioEventLoopGroup();\n mqttBridge = new MqttServer(bridgeConfig, bossGroup, workerGroup, ChannelOption.SO_KEEPALIVE);\n mqttBridge.start();\n }",
"score": 61.16656575704266
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/Main.java",
"retrieved_chunk": " EventLoopGroup workerGroup = new NioEventLoopGroup();\n MqttServer mqttServer = new MqttServer(bridgeConfig, bossGroup, workerGroup, ChannelOption.SO_KEEPALIVE);\n CountDownLatch latch = new CountDownLatch(1);\n Runtime.getRuntime().addShutdownHook(new Thread(() -> {\n try {\n mqttServer.stop();\n } catch (Exception e) {\n logger.error(\"Error stopping the MQTT server: \", e);\n } finally {\n latch.countDown();",
"score": 59.61893795647622
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/Main.java",
"retrieved_chunk": "/*\n * Copyright Strimzi authors.\n * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).\n */\npackage io.strimzi.kafka.bridge.mqtt;\nimport io.netty.channel.ChannelOption;\nimport io.netty.channel.EventLoopGroup;\nimport io.netty.channel.nio.NioEventLoopGroup;\nimport io.strimzi.kafka.bridge.mqtt.config.BridgeConfig;\nimport io.strimzi.kafka.bridge.mqtt.config.ConfigRetriever;",
"score": 36.307362242024844
},
{
"filename": "src/test/java/io/strimzi/kafka/bridge/mqtt/MqttBridgetIT.java",
"retrieved_chunk": "/*\n * Copyright Strimzi authors.\n * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).\n */\npackage io.strimzi.kafka.bridge.mqtt;\nimport io.netty.channel.ChannelOption;\nimport io.netty.channel.EventLoopGroup;\nimport io.netty.channel.nio.NioEventLoopGroup;\nimport io.netty.handler.codec.mqtt.MqttQoS;\nimport io.strimzi.kafka.bridge.mqtt.config.BridgeConfig;",
"score": 35.28048581211247
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/BridgeConfig.java",
"retrieved_chunk": " /**\n * Constructor\n *\n * @param config configuration parameters map\n * @param mqttConfig MQTT configuration properties\n * @param kafkaConfig Kafka configuration properties\n */\n public BridgeConfig(Map<String, Object> config, MqttConfig mqttConfig, KafkaConfig kafkaConfig) {\n super(config);\n this.mqttConfig = mqttConfig;",
"score": 30.116706769673826
}
] | java | config.getMqttConfig(); |
/*
* Copyright Strimzi authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.strimzi.kafka.bridge.mqtt;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.strimzi.kafka.bridge.mqtt.config.BridgeConfig;
import io.strimzi.kafka.bridge.mqtt.config.ConfigRetriever;
import io.strimzi.kafka.bridge.mqtt.core.MqttServer;
import io.strimzi.kafka.bridge.mqtt.mapper.MappingRulesLoader;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
public class Main {
private static final Logger logger = LoggerFactory.getLogger(Main.class);
private static final String CONFIG_FILE_OPTION = "config-file";
private static final String MAPPING_RULES_FILE_OPTION = "mapping-rules";
public static void main(String[] args) {
logger.info("Strimzi MQTT Bridge {} is starting", Main.class.getPackage().getImplementationVersion());
try {
//prepare the command line options
CommandLine cmd = new DefaultParser().parse(generateCommandLineOptions(), args);
//load the configuration file from the path specified in the command line
String configFilePath = getAbsoluteFilePath(cmd.getOptionValue(Main.CONFIG_FILE_OPTION));
String mappingRulesFile = getAbsoluteFilePath(cmd.getOptionValue(Main.MAPPING_RULES_FILE_OPTION));
Map<String, ?> configRetriever = configFilePath != null ? ConfigRetriever.getConfig(configFilePath) : ConfigRetriever.getConfigFromEnv();
BridgeConfig bridgeConfig = BridgeConfig.fromMap((Map<String, Object>) configRetriever);
logger.info("Bridge configuration {}", bridgeConfig);
//set the mapping rules file path
MappingRulesLoader.getInstance().init(mappingRulesFile);
//start the MQTT server
EventLoopGroup bossGroup = new NioEventLoopGroup();
EventLoopGroup workerGroup = new NioEventLoopGroup();
MqttServer mqttServer = new MqttServer(bridgeConfig, bossGroup, workerGroup, ChannelOption.SO_KEEPALIVE);
CountDownLatch latch = new CountDownLatch(1);
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
try {
| mqttServer.stop(); |
} catch (Exception e) {
logger.error("Error stopping the MQTT server: ", e);
} finally {
latch.countDown();
}
}));
// start the MQTT server
mqttServer.start();
latch.await();
} catch (Exception e) {
logger.error("Error starting the MQTT server: ", e);
System.exit(1);
}
System.exit(0);
}
/**
* Generate the command line options.
* The options are:
* --config-file: the path of the configuration file
* --mapping-rules: the path of the topic mapping rules file
* E.g.:
* <application> --config-file=/path/to/config/file --mapping-rules=/path/to/mapping/rules/file
* @return the command line options
*/
private static Options generateCommandLineOptions() {
Options options = new Options();
Option optionConfigFile = Option.builder()
.longOpt(Main.CONFIG_FILE_OPTION)
.hasArg(true)
.required()
.desc("The path to the configuration file")
.build();
options.addOption(optionConfigFile);
Option optionMappingRulesFile = Option.builder()
.longOpt(Main.MAPPING_RULES_FILE_OPTION)
.hasArg(true)
.required()
.desc("The path to the topic mapping rules file")
.build();
options.addOption(optionMappingRulesFile);
return options;
}
/**
* Get the absolute path of the file
*
* @param arg the path of the file
* @return the absolute path of the file
*/
private static String getAbsoluteFilePath(String arg) {
if (arg == null) {
return null;
}
return arg.startsWith(File.separator) ? arg : System.getProperty("user.dir") + File.separator + arg;
}
}
| src/main/java/io/strimzi/kafka/bridge/mqtt/Main.java | strimzi-strimzi-mqtt-bridge-cb0a4b8 | [
{
"filename": "src/test/java/io/strimzi/kafka/bridge/mqtt/MqttBridgetIT.java",
"retrieved_chunk": " ));\n // prepare the mapping rules\n String mappingRulesPath = Objects.requireNonNull(MqttBridgetIT.class.getClassLoader().getResource(\"mapping-rules-regex.json\")).getPath();\n MappingRulesLoader.getInstance().init(mappingRulesPath);\n // start the MQTT bridge\n EventLoopGroup bossGroup = new NioEventLoopGroup();\n EventLoopGroup workerGroup = new NioEventLoopGroup();\n mqttBridge = new MqttServer(bridgeConfig, bossGroup, workerGroup, ChannelOption.SO_KEEPALIVE);\n mqttBridge.start();\n }",
"score": 99.24190669665674
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/core/MqttServer.java",
"retrieved_chunk": " *\n * @param config MqttConfig instance with all configuration needed to run the server.\n * @param masterGroup EventLoopGroup instance for handle incoming connections.\n * @param workerGroup EventLoopGroup instance for processing I/O.\n * @param option ChannelOption<Boolean> instance which allows to configure various channel options, such as SO_KEEPALIVE, SO_BACKLOG etc.\n * @see BridgeConfig\n * @see ChannelOption\n */\n public MqttServer(BridgeConfig config, EventLoopGroup masterGroup, EventLoopGroup workerGroup, ChannelOption<Boolean> option) {\n this.masterGroup = masterGroup;",
"score": 40.877421022894566
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/core/MqttServer.java",
"retrieved_chunk": "public class MqttServer {\n private static final Logger logger = LoggerFactory.getLogger(MqttServer.class);\n private final EventLoopGroup masterGroup;\n private final EventLoopGroup workerGroup;\n private final ServerBootstrap serverBootstrap;\n private final MqttConfig mqttConfig;\n private final KafkaBridgeProducer kafkaBridgeProducer;\n private ChannelFuture channelFuture;\n /**\n * Constructor",
"score": 29.289813010766427
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/core/MqttServerHandler.java",
"retrieved_chunk": " try {\n MappingRulesLoader mappingRulesLoader = MappingRulesLoader.getInstance();\n List<MappingRule> rules = mappingRulesLoader.loadRules();\n this.mqttKafkaMapper = new MqttKafkaRegexMapper(rules);\n } catch (IOException e) {\n logger.error(\"Error reading mapping file: \", e);\n }\n this.kafkaBridgeProducer = kafkaBridgeProducer;\n }\n /**",
"score": 24.088833210845163
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/mapper/MappingRulesLoader.java",
"retrieved_chunk": " * Helper class to Load the rules from the configuration file\n */\npublic class MappingRulesLoader {\n private static final MappingRulesLoader INSTANCE = new MappingRulesLoader();\n // path of the topic mapping rule file\n private String mapperRuleFilePath;\n private boolean initialized = false;\n /**\n * Initialize the MappingRulesLoader\n *",
"score": 22.91968519598461
}
] | java | mqttServer.stop(); |
/*
* Copyright Strimzi authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.strimzi.kafka.bridge.mqtt.core;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.handler.logging.LogLevel;
import io.netty.handler.logging.LoggingHandler;
import io.strimzi.kafka.bridge.mqtt.config.BridgeConfig;
import io.strimzi.kafka.bridge.mqtt.config.MqttConfig;
import io.strimzi.kafka.bridge.mqtt.kafka.KafkaBridgeProducer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Represents the MqttServer component.
*/
public class MqttServer {
private static final Logger logger = LoggerFactory.getLogger(MqttServer.class);
private final EventLoopGroup masterGroup;
private final EventLoopGroup workerGroup;
private final ServerBootstrap serverBootstrap;
private final MqttConfig mqttConfig;
private final KafkaBridgeProducer kafkaBridgeProducer;
private ChannelFuture channelFuture;
/**
* Constructor
*
* @param config MqttConfig instance with all configuration needed to run the server.
* @param masterGroup EventLoopGroup instance for handle incoming connections.
* @param workerGroup EventLoopGroup instance for processing I/O.
* @param option ChannelOption<Boolean> instance which allows to configure various channel options, such as SO_KEEPALIVE, SO_BACKLOG etc.
* @see BridgeConfig
* @see ChannelOption
*/
public MqttServer(BridgeConfig config, EventLoopGroup masterGroup, EventLoopGroup workerGroup, ChannelOption<Boolean> option) {
this.masterGroup = masterGroup;
this.workerGroup = workerGroup;
this.mqttConfig = config.getMqttConfig();
this.kafkaBridgeProducer = new KafkaBridgeProducer(config.getKafkaConfig());
this.serverBootstrap = new ServerBootstrap();
this.serverBootstrap.group(masterGroup, workerGroup)
.channel(NioServerSocketChannel.class)
.handler(new LoggingHandler(LogLevel.INFO))
.childHandler(new MqttServerInitializer(this.kafkaBridgeProducer))
.childOption(option, true);
}
/**
* Start the server.
*/
public void start() throws InterruptedException {
// bind the Netty server and wait synchronously
this.channelFuture = this.serverBootstrap.bind(this.mqttConfig.getHost() | , this.mqttConfig.getPort()).sync(); |
}
/**
* Stop the server.
*/
public void stop() throws InterruptedException {
logger.info("Shutting down Netty server...");
this.channelFuture.channel().close().sync();
this.channelFuture.channel().closeFuture().sync();
this.masterGroup.shutdownGracefully().sync();
this.workerGroup.shutdownGracefully().sync();
logger.info("Netty server shut down");
logger.info("Closing Kafka producers...");
this.kafkaBridgeProducer.close();
logger.info("Kafka producers closed");
}
}
| src/main/java/io/strimzi/kafka/bridge/mqtt/core/MqttServer.java | strimzi-strimzi-mqtt-bridge-cb0a4b8 | [
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/core/MqttServerInitializer.java",
"retrieved_chunk": "/**\n * This helper class help us add necessary Netty pipelines handlers. <br>\n * During the {@link #initChannel(SocketChannel)}, we use MqttDecoder() and MqttEncoder to decode and encode Mqtt messages respectively. <br>\n */\npublic class MqttServerInitializer extends ChannelInitializer<SocketChannel> {\n private final MqttServerHandler mqttServerHandler;\n public MqttServerInitializer(KafkaBridgeProducer kafkaBridgeProducer) {\n this.mqttServerHandler = new MqttServerHandler(kafkaBridgeProducer);\n }\n @Override",
"score": 30.52412664895244
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/MqttConfig.java",
"retrieved_chunk": " public int getPort() {\n return Integer.parseInt(this.config.getOrDefault(MqttConfig.MQTT_PORT, MqttConfig.DEFAULT_MQTT_PORT).toString());\n }\n /**\n * @return the MQTT server host\n */\n public String getHost() {\n return this.config.getOrDefault(MqttConfig.MQTT_HOST, MqttConfig.DEFAULT_MQTT_HOST).toString();\n }\n @Override",
"score": 30.426629058886988
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/BridgeConfig.java",
"retrieved_chunk": " \", mqttConfig=\" + this.mqttConfig +\n \", kafkaConfig=\" + this.kafkaConfig +\n \")\";\n }\n}",
"score": 27.988781856148478
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/BridgeConfig.java",
"retrieved_chunk": " }\n /**\n * @return the MQTT configuration properties\n */\n public MqttConfig getMqttConfig() {\n return this.mqttConfig;\n }\n /**\n * @return the bridge identification number\n */",
"score": 24.09188845497439
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/BridgeConfig.java",
"retrieved_chunk": " /**\n * Constructor\n *\n * @param config configuration parameters map\n * @param mqttConfig MQTT configuration properties\n * @param kafkaConfig Kafka configuration properties\n */\n public BridgeConfig(Map<String, Object> config, MqttConfig mqttConfig, KafkaConfig kafkaConfig) {\n super(config);\n this.mqttConfig = mqttConfig;",
"score": 22.20860764726067
}
] | java | , this.mqttConfig.getPort()).sync(); |
/*
* Copyright Strimzi authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.strimzi.kafka.bridge.mqtt;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.strimzi.kafka.bridge.mqtt.config.BridgeConfig;
import io.strimzi.kafka.bridge.mqtt.config.ConfigRetriever;
import io.strimzi.kafka.bridge.mqtt.core.MqttServer;
import io.strimzi.kafka.bridge.mqtt.mapper.MappingRulesLoader;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
public class Main {
private static final Logger logger = LoggerFactory.getLogger(Main.class);
private static final String CONFIG_FILE_OPTION = "config-file";
private static final String MAPPING_RULES_FILE_OPTION = "mapping-rules";
public static void main(String[] args) {
logger.info("Strimzi MQTT Bridge {} is starting", Main.class.getPackage().getImplementationVersion());
try {
//prepare the command line options
CommandLine cmd = new DefaultParser().parse(generateCommandLineOptions(), args);
//load the configuration file from the path specified in the command line
String configFilePath = getAbsoluteFilePath(cmd.getOptionValue(Main.CONFIG_FILE_OPTION));
String mappingRulesFile = getAbsoluteFilePath(cmd.getOptionValue(Main.MAPPING_RULES_FILE_OPTION));
Map<String, ?> configRetriever = configFilePath != null ? ConfigRetriever.getConfig(configFilePath) : ConfigRetriever.getConfigFromEnv();
BridgeConfig bridgeConfig = BridgeConfig.fromMap((Map<String, Object>) configRetriever);
logger.info("Bridge configuration {}", bridgeConfig);
//set the mapping rules file path
| MappingRulesLoader.getInstance().init(mappingRulesFile); |
//start the MQTT server
EventLoopGroup bossGroup = new NioEventLoopGroup();
EventLoopGroup workerGroup = new NioEventLoopGroup();
MqttServer mqttServer = new MqttServer(bridgeConfig, bossGroup, workerGroup, ChannelOption.SO_KEEPALIVE);
CountDownLatch latch = new CountDownLatch(1);
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
try {
mqttServer.stop();
} catch (Exception e) {
logger.error("Error stopping the MQTT server: ", e);
} finally {
latch.countDown();
}
}));
// start the MQTT server
mqttServer.start();
latch.await();
} catch (Exception e) {
logger.error("Error starting the MQTT server: ", e);
System.exit(1);
}
System.exit(0);
}
/**
* Generate the command line options.
* The options are:
* --config-file: the path of the configuration file
* --mapping-rules: the path of the topic mapping rules file
* E.g.:
* <application> --config-file=/path/to/config/file --mapping-rules=/path/to/mapping/rules/file
* @return the command line options
*/
private static Options generateCommandLineOptions() {
Options options = new Options();
Option optionConfigFile = Option.builder()
.longOpt(Main.CONFIG_FILE_OPTION)
.hasArg(true)
.required()
.desc("The path to the configuration file")
.build();
options.addOption(optionConfigFile);
Option optionMappingRulesFile = Option.builder()
.longOpt(Main.MAPPING_RULES_FILE_OPTION)
.hasArg(true)
.required()
.desc("The path to the topic mapping rules file")
.build();
options.addOption(optionMappingRulesFile);
return options;
}
/**
* Get the absolute path of the file
*
* @param arg the path of the file
* @return the absolute path of the file
*/
private static String getAbsoluteFilePath(String arg) {
if (arg == null) {
return null;
}
return arg.startsWith(File.separator) ? arg : System.getProperty("user.dir") + File.separator + arg;
}
}
| src/main/java/io/strimzi/kafka/bridge/mqtt/Main.java | strimzi-strimzi-mqtt-bridge-cb0a4b8 | [
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/ConfigRetriever.java",
"retrieved_chunk": " *\n * @param path path to the properties file\n * @return configuration as key-value pairs\n * @throws IOException when not possible to get the properties file\n */\n public static Map<String, Object> getConfig(String path) throws IOException {\n return getConfig(path, System.getenv());\n }\n /**\n * Retrieve the bridge configuration from the environment variables",
"score": 46.03699148357694
},
{
"filename": "src/test/java/io/strimzi/kafka/bridge/mqtt/config/ConfigRetrieverTest.java",
"retrieved_chunk": " envs.put(BridgeConfig.BRIDGE_ID, \"my-bridge-env\");\n Map<String, Object> config = ConfigRetriever.getConfig(filePath, envs);\n BridgeConfig bridgeConfig = BridgeConfig.fromMap(config);\n assertThat(\"Bridge-ID should be 'my-bridge-env'\",\n bridgeConfig.getBridgeID(), is(\"my-bridge-env\"));\n }\n}",
"score": 45.068260535888896
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/ConfigRetriever.java",
"retrieved_chunk": " */\n public static Map<String, String> getConfigFromEnv() {\n return System.getenv();\n }\n /**\n * Retrieve the bridge configuration from the properties file provided as parameter\n * and adding the additional configuration parameter provided as well\n * If a parameter is defined in both properties file and additional configuration, the latter wins\n *\n * @param path path to the properties file",
"score": 44.93077435463033
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/config/ConfigRetriever.java",
"retrieved_chunk": " * @param additionalConfig additional configuration to add\n * @return configuration as key-value pairs\n * @throws IOException when not possible to get the properties file\n */\n public static Map<String, Object> getConfig(String path, Map<String, String> additionalConfig) throws IOException {\n Map<String, Object> configuration;\n try (InputStream is = new FileInputStream(path)) {\n Properties props = new Properties();\n props.load(is);\n configuration =",
"score": 42.93781602989241
},
{
"filename": "src/main/java/io/strimzi/kafka/bridge/mqtt/mapper/MappingRulesLoader.java",
"retrieved_chunk": " * Helper class to Load the rules from the configuration file\n */\npublic class MappingRulesLoader {\n private static final MappingRulesLoader INSTANCE = new MappingRulesLoader();\n // path of the topic mapping rule file\n private String mapperRuleFilePath;\n private boolean initialized = false;\n /**\n * Initialize the MappingRulesLoader\n *",
"score": 39.997794281857864
}
] | java | MappingRulesLoader.getInstance().init(mappingRulesFile); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.selector;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.CCIndexRecord;
public class RecordSelector {
private static Logger LOGGER = LoggerFactory.getLogger(RecordSelector.class);
public static RecordSelector ACCEPT_ALL_RECORDS = new AcceptAllRecords();
@JsonProperty
Map<String, List<SelectorClause>> must = new HashMap<>();
@JsonProperty
Map<String, List<SelectorClause>> must_not = new HashMap<>();
@JsonProperty
Map<String, List<SelectorClause>> should = new HashMap<>();
public boolean select(CCIndexRecord record) {
for (Map.Entry<String, List<SelectorClause>> e : must_not.entrySet()) {
String val = getStringValue(e.getKey(), record);
if (val == null) {
LOGGER.warn("Value is null for '{}' in the must not clause", e.getKey());
continue;
}
for (SelectorClause clause : e.getValue()) {
if (clause.select(val)) {
return false;
}
}
}
for (Map.Entry<String, List<SelectorClause>> e : must.entrySet()) {
String val = getStringValue(e.getKey(), record);
if (val == null) {
LOGGER.warn("Value is null for '{}' in the must clause. Record not selected.",
e.getKey());
return false;
}
for (SelectorClause clause : e.getValue()) {
if (!clause.select(val)) {
return false;
}
}
}
if (should.size() == 0) {
return true;
}
for (Map.Entry<String, List<SelectorClause>> e : should.entrySet()) {
String val = getStringValue(e.getKey(), record);
if (val == null) {
LOGGER.warn("Value is null for '{}' in the should clause. Record not selected",
e.getKey());
continue;
}
for (SelectorClause clause : e.getValue()) {
if (clause.select(val)) {
return true;
}
}
}
return false;
}
private String getStringValue(String key, CCIndexRecord record) {
switch (key) {
case "mime_detected":
return record.getMimeDetected();
case "truncated":
return record.getTruncated();
case "mime":
return record.getMime();
case "status":
| return Integer.toString(record.getStatus()); |
case "url":
return record.getUrl();
case "host":
return record.getHost();
case "digest":
return record.getDigest();
default:
throw new IllegalArgumentException("Don't yet support key " + key);
}
}
private static class AcceptAllRecords extends RecordSelector {
@Override
public boolean select(CCIndexRecord record) {
return true;
}
}
}
| src/main/java/org/tallison/cc/index/selector/RecordSelector.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/CCIndexRecord.java",
"retrieved_chunk": " return CCIndexRecord.normalizeMime(mime);\n }\n public String getNormalizedMimeDetected() {\n return CCIndexRecord.normalizeMime(mimeDetected);\n }\n public Integer getStatus() {\n return status;\n }\n public void setStatus(int status) {\n this.status = status;",
"score": 33.422366081613546
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " return false;\n }\n //check for hit max\n //return false;\n Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);\n if (record.isEmpty()) {\n //problem already logged\n return true;\n }\n CCIndexRecord r = record.get();",
"score": 31.693289976077406
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java",
"retrieved_chunk": " String id = record.getUrl();\n try {\n parseWarc(id, record, warcRecordGZBytes);\n } catch (IOException e) {\n LOGGER.warn(\"problem parsing warc file\", e);\n }\n }\n private void fetchPayload(String id, CCIndexRecord ccIndexRecord, WarcRecord record)\n throws IOException {\n if (!((record instanceof WarcResponse) &&",
"score": 28.911331484893825
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " //problem already logged\n return true;\n }\n CCIndexRecord r = record.get();\n if (!fetcherConfig.getRecordSelector().select(r)) {\n return true;\n }\n //if truncated, count appropriately and test for limits\n if (!StringUtils.isBlank(r.getTruncated())) {\n long truncated = counter.getTruncated().incrementAndGet();",
"score": 28.664220740936464
},
{
"filename": "src/main/java/org/tallison/cc/index/selector/AcceptAllRecords.java",
"retrieved_chunk": " public boolean select(CCIndexRecord record) {\n return true;\n }\n}",
"score": 27.503537365928917
}
] | java | return Integer.toString(record.getStatus()); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.selector;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.CCIndexRecord;
public class RecordSelector {
private static Logger LOGGER = LoggerFactory.getLogger(RecordSelector.class);
public static RecordSelector ACCEPT_ALL_RECORDS = new AcceptAllRecords();
@JsonProperty
Map<String, List<SelectorClause>> must = new HashMap<>();
@JsonProperty
Map<String, List<SelectorClause>> must_not = new HashMap<>();
@JsonProperty
Map<String, List<SelectorClause>> should = new HashMap<>();
public boolean select(CCIndexRecord record) {
for (Map.Entry<String, List<SelectorClause>> e : must_not.entrySet()) {
String val = getStringValue(e.getKey(), record);
if (val == null) {
LOGGER.warn("Value is null for '{}' in the must not clause", e.getKey());
continue;
}
for (SelectorClause clause : e.getValue()) {
if (clause.select(val)) {
return false;
}
}
}
for (Map.Entry<String, List<SelectorClause>> e : must.entrySet()) {
String val = getStringValue(e.getKey(), record);
if (val == null) {
LOGGER.warn("Value is null for '{}' in the must clause. Record not selected.",
e.getKey());
return false;
}
for (SelectorClause clause : e.getValue()) {
if (!clause.select(val)) {
return false;
}
}
}
if (should.size() == 0) {
return true;
}
for (Map.Entry<String, List<SelectorClause>> e : should.entrySet()) {
String val = getStringValue(e.getKey(), record);
if (val == null) {
LOGGER.warn("Value is null for '{}' in the should clause. Record not selected",
e.getKey());
continue;
}
for (SelectorClause clause : e.getValue()) {
if (clause.select(val)) {
return true;
}
}
}
return false;
}
private String getStringValue(String key, CCIndexRecord record) {
switch (key) {
case "mime_detected":
return record.getMimeDetected();
case "truncated":
return | record.getTruncated(); |
case "mime":
return record.getMime();
case "status":
return Integer.toString(record.getStatus());
case "url":
return record.getUrl();
case "host":
return record.getHost();
case "digest":
return record.getDigest();
default:
throw new IllegalArgumentException("Don't yet support key " + key);
}
}
private static class AcceptAllRecords extends RecordSelector {
@Override
public boolean select(CCIndexRecord record) {
return true;
}
}
}
| src/main/java/org/tallison/cc/index/selector/RecordSelector.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " return false;\n }\n //check for hit max\n //return false;\n Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);\n if (record.isEmpty()) {\n //problem already logged\n return true;\n }\n CCIndexRecord r = record.get();",
"score": 26.40208315557348
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " LOGGER.info(\"processed: {}\", counter);\n }\n if (fetcherConfig.getMaxRecords() > -1 && totalRead >= fetcherConfig.getMaxRecords()) {\n LOGGER.info(\"hit max read\");\n return false;\n }\n //check for hit max\n //return false;\n Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);\n if (record.isEmpty()) {",
"score": 22.56638520898196
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " //problem already logged\n return true;\n }\n CCIndexRecord r = record.get();\n if (!fetcherConfig.getRecordSelector().select(r)) {\n return true;\n }\n //if truncated, count appropriately and test for limits\n if (!StringUtils.isBlank(r.getTruncated())) {\n long truncated = counter.getTruncated().incrementAndGet();",
"score": 22.264050958417812
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java",
"retrieved_chunk": " String id = record.getUrl();\n try {\n parseWarc(id, record, warcRecordGZBytes);\n } catch (IOException e) {\n LOGGER.warn(\"problem parsing warc file\", e);\n }\n }\n private void fetchPayload(String id, CCIndexRecord ccIndexRecord, WarcRecord record)\n throws IOException {\n if (!((record instanceof WarcResponse) &&",
"score": 19.71898680792518
},
{
"filename": "src/main/java/org/tallison/cc/index/selector/AcceptAllRecords.java",
"retrieved_chunk": " public boolean select(CCIndexRecord record) {\n return true;\n }\n}",
"score": 18.61984259041492
}
] | java | record.getTruncated(); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.nio.file.Files;
import java.nio.file.Paths;
/**
* This is a lighter class that doesn't rely on a database
* to extract files from CC and write a list of truncated urls.
*/
public class CCFetcherCli {
public static void main(String[] args) throws Exception {
String command = args[0];
if (command.equals("Fetch")) {
CCFileExtractor.main(new String[]{args[1]});
} else if (command.equals("FetchIndices")) {
CCIndexFetcher.main(new String[]{args[1]});
} else if (command.equals("CountMimes")) {
| CCMimeCounter.main(new String[]{ | args[1]});
} else if (Files.isRegularFile(Paths.get(command))) {
CCFileExtractor.main(new String[]{args[0]});
} else {
System.out.println("Must start with a command: Fetch, FetchIndices or CountMimes");
}
}
}
| src/main/java/org/tallison/cc/index/extractor/CCFetcherCli.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " private static final Logger LOGGER = LoggerFactory.getLogger(CCMimeCounter.class);\n public static void main(String[] args) throws Exception {\n ExtractorConfig fetcherConfig =\n new ObjectMapper().readValue(new File(args[0]), ExtractorConfig.class);\n execute(fetcherConfig);\n }\n private static void execute(ExtractorConfig fetcherConfig) throws IOException, TikaException {\n ArrayBlockingQueue<FetchEmitTuple> indexPathsList = new ArrayBlockingQueue<>(1000);\n //IndexPathsReader reads a file containing a list of cc-index.paths files\n //and writes the literal gz files (cc-index/collections/CC-MAIN-2023-06/indexes/cdx-00000.gz)",
"score": 48.14484502371386
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCIndexFetcher.java",
"retrieved_chunk": "import org.apache.tika.pipes.pipesiterator.PipesIterator;\n/**\n * This class fetches index files from aws to a local file share.\n * <p>\n * This pulls the index files either via https or s3\n */\npublic class CCIndexFetcher {\n private static final Logger LOGGER = LoggerFactory.getLogger(CCIndexFetcher.class);\n public static void main(String[] args) throws Exception {\n ExtractorConfig fetcherConfig =",
"score": 47.244740985177096
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractor.java",
"retrieved_chunk": "import org.apache.tika.utils.StringUtils;\n/**\n * This is a lighter class that doesn't rely on a database\n * to extract files from CC and log a list of truncated urls.\n */\npublic class CCFileExtractor {\n private static final Long INDEX_WORKER_ID = 1l;\n private static final Long INDEX_READER_ID = 2l;\n private static final Logger LOGGER = LoggerFactory.getLogger(CCFileExtractor.class);\n public static void main(String[] args) throws Exception {",
"score": 43.753415138382024
},
{
"filename": "src/main/java/org/tallison/cc/index/selector/MatchSelector.java",
"retrieved_chunk": " this.match = match;\n this.caseSensitive = caseSensitive == null ? DEFAULT_CASE_SENSITIVE : caseSensitive;\n }\n @Override\n public boolean select(String val) {\n if (caseSensitive) {\n if (match.equals(val)) {\n return true;\n }\n } else {",
"score": 29.247578804523286
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractor.java",
"retrieved_chunk": " if (future != null) {\n Long f = future.get();\n LOGGER.debug(\"completed {}\", f);\n if (f.equals(INDEX_WORKER_ID)) {\n finishedWorkers++;\n } else if (f.equals(INDEX_READER_ID)) {\n LOGGER.info(\"Index paths reader successfully completed\");\n }\n }\n }",
"score": 27.32270732138788
}
] | java | CCMimeCounter.main(new String[]{ |
package com.suimz.open.chatgptweb.java.core.exception;
import com.suimz.open.chatgptweb.java.bean.resp.R;
import org.springframework.http.ResponseEntity;
import org.springframework.http.converter.HttpMessageNotReadableException;
import org.springframework.validation.BindException;
import org.springframework.validation.FieldError;
import org.springframework.web.bind.MethodArgumentNotValidException;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.RestControllerAdvice;
import org.springframework.web.method.annotation.MethodArgumentTypeMismatchException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Handle Exceptions
*
* @author https://github.com/suimz
*/
@RestControllerAdvice
public class AdviceException {
@ExceptionHandler(BizException.class)
public ResponseEntity<R> map(BizException e) {
return ResponseEntity.status(e.getHttpStatus()).body(R.error(e.getMessage()));
}
@ExceptionHandler(UnauthorizedBizException.class)
public R map(UnauthorizedBizException e) {
return R.builder()
.status("Unauthorized")
.message(e.getMessage())
.build();
}
@ExceptionHandler(HttpMessageNotReadableException.class)
public R map(HttpMessageNotReadableException e) {
return | R.error("bad request"); |
}
@ExceptionHandler(BindException.class)
public R map(BindException e) {
List<FieldError> fieldErrors = e.getBindingResult().getFieldErrors();
Map<String, Object> error = this.getValidError(fieldErrors);
return R.error(error.get("errorMsg").toString());
}
@ExceptionHandler(MethodArgumentNotValidException.class)
public R map(MethodArgumentNotValidException e) {
List<FieldError> fieldErrors = e.getBindingResult().getFieldErrors();
Map<String, Object> error = this.getValidError(fieldErrors);
return R.error(error.get("errorMsg").toString());
}
private Map<String, Object> getValidError(List<FieldError> fieldErrors) {
Map<String, Object> map = new HashMap<String, Object>(16);
List<String> errorList = new ArrayList<String>();
StringBuffer errorMsg = new StringBuffer();
for (FieldError error : fieldErrors) {
errorList.add(error.getDefaultMessage());
errorMsg.append(error.getDefaultMessage());
// first
break;
}
map.put("errorList", errorList);
map.put("errorMsg", errorMsg);
return map;
}
@ExceptionHandler(MethodArgumentTypeMismatchException.class)
public R map(MethodArgumentTypeMismatchException e) {
return R.error(e.getMessage());
}
@ExceptionHandler(Exception.class)
public R map(Exception e) {
return R.error();
}
}
| src/main/java/com/suimz/open/chatgptweb/java/core/exception/AdviceException.java | suimz-chatgpt-web-java-37a4c63 | [
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/bean/resp/R.java",
"retrieved_chunk": " public static <T> R<T> ok(T data) {\n return R.ok(null, data);\n }\n public static <T> R<T> ok(String message, T data) {\n return (R<T>) R.builder().status(\"Success\").message(message).data(data).build();\n }\n public static R error() {\n return R.error(\"服务异常 | server exception\");\n }\n public static R error(String error) {",
"score": 40.8821475101427
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/bean/resp/R.java",
"retrieved_chunk": " return R.builder()\n .status(\"Fail\")\n .message(error)\n .build();\n }\n}",
"score": 40.82242926870663
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/bean/resp/R.java",
"retrieved_chunk": "public class R<T> {\n private String status;\n private String message;\n private T data;\n public static R ok() {\n return R.ok(null);\n }\n public static R ok(String message) {\n return R.ok(null, null);\n }",
"score": 33.27711841881835
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/util/SpringUtil.java",
"retrieved_chunk": " ipAddress = ipAddress.substring(0, ipAddress.indexOf(\",\"));\n }\n }\n } catch (Exception e) {\n log.error(e.getMessage(), e);\n }\n return ipAddress;\n }\n public static String getApplicationHomeAbsolutePath() {\n ApplicationHome home = new ApplicationHome(SpringUtil.class);",
"score": 24.29188858525153
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/controller/AppController.java",
"retrieved_chunk": " @Resource\n private ThreadPoolTaskExecutor asyncTaskExecutor;\n @Resource\n private OpenAiApiService openAiApiService;\n @Resource\n private OpenAiReverseService openAiReverseService;\n @PostMapping(\"/session\")\n public R<SessionResp> session() {\n return R.ok(\n SessionResp.builder()",
"score": 24.055177999506505
}
] | java | R.error("bad request"); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.nio.file.Files;
import java.nio.file.Paths;
/**
* This is a lighter class that doesn't rely on a database
* to extract files from CC and write a list of truncated urls.
*/
public class CCFetcherCli {
public static void main(String[] args) throws Exception {
String command = args[0];
if (command.equals("Fetch")) {
| CCFileExtractor.main(new String[]{ | args[1]});
} else if (command.equals("FetchIndices")) {
CCIndexFetcher.main(new String[]{args[1]});
} else if (command.equals("CountMimes")) {
CCMimeCounter.main(new String[]{args[1]});
} else if (Files.isRegularFile(Paths.get(command))) {
CCFileExtractor.main(new String[]{args[0]});
} else {
System.out.println("Must start with a command: Fetch, FetchIndices or CountMimes");
}
}
}
| src/main/java/org/tallison/cc/index/extractor/CCFetcherCli.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractor.java",
"retrieved_chunk": "import org.apache.tika.utils.StringUtils;\n/**\n * This is a lighter class that doesn't rely on a database\n * to extract files from CC and log a list of truncated urls.\n */\npublic class CCFileExtractor {\n private static final Long INDEX_WORKER_ID = 1l;\n private static final Long INDEX_READER_ID = 2l;\n private static final Logger LOGGER = LoggerFactory.getLogger(CCFileExtractor.class);\n public static void main(String[] args) throws Exception {",
"score": 96.9767814771661
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCIndexFetcher.java",
"retrieved_chunk": "import org.apache.tika.pipes.pipesiterator.PipesIterator;\n/**\n * This class fetches index files from aws to a local file share.\n * <p>\n * This pulls the index files either via https or s3\n */\npublic class CCIndexFetcher {\n private static final Logger LOGGER = LoggerFactory.getLogger(CCIndexFetcher.class);\n public static void main(String[] args) throws Exception {\n ExtractorConfig fetcherConfig =",
"score": 49.009567712681296
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " private static final Logger LOGGER = LoggerFactory.getLogger(CCMimeCounter.class);\n public static void main(String[] args) throws Exception {\n ExtractorConfig fetcherConfig =\n new ObjectMapper().readValue(new File(args[0]), ExtractorConfig.class);\n execute(fetcherConfig);\n }\n private static void execute(ExtractorConfig fetcherConfig) throws IOException, TikaException {\n ArrayBlockingQueue<FetchEmitTuple> indexPathsList = new ArrayBlockingQueue<>(1000);\n //IndexPathsReader reads a file containing a list of cc-index.paths files\n //and writes the literal gz files (cc-index/collections/CC-MAIN-2023-06/indexes/cdx-00000.gz)",
"score": 48.391486242285474
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCIndexFetcher.java",
"retrieved_chunk": " new ObjectMapper().readValue(new File(args[0]), ExtractorConfig.class);\n execute(fetcherConfig);\n }\n private static void execute(ExtractorConfig fetcherConfig) throws Exception {\n ArrayBlockingQueue<FetchEmitTuple> indexPathsList = new ArrayBlockingQueue<>(1000);\n //IndexPathsReader reads a file containing a list of cc-index.paths files\n //and writes the literal gz files (cc-index/collections/CC-MAIN-2023-06/indexes/cdx-00000.gz)\n //to indexPathsList\n int totalThreads = fetcherConfig.getNumThreads() + 1;\n ExecutorService executorService = Executors.newFixedThreadPool(totalThreads);",
"score": 37.05720256292353
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractor.java",
"retrieved_chunk": " ExtractorConfig fetcherConfig =\n new ObjectMapper().readValue(new File(args[0]), ExtractorConfig.class);\n execute(fetcherConfig);\n }\n private static void execute(ExtractorConfig fetcherConfig) throws TikaException {\n ArrayBlockingQueue<FetchEmitTuple> indexPathsList = new ArrayBlockingQueue<>(1000);\n //IndexPathsReader reads a file containing a list of cc-index.paths files\n //and writes the literal gz files (cc-index/collections/CC-MAIN-2023-06/indexes/cdx-00000.gz)\n //to indexPathsList\n //IndexWorker reads a single index.gz file at a time and processes each record",
"score": 36.704804780924235
}
] | java | CCFileExtractor.main(new String[]{ |
package com.suimz.open.chatgptweb.java.core.exception;
import com.suimz.open.chatgptweb.java.bean.resp.R;
import org.springframework.http.ResponseEntity;
import org.springframework.http.converter.HttpMessageNotReadableException;
import org.springframework.validation.BindException;
import org.springframework.validation.FieldError;
import org.springframework.web.bind.MethodArgumentNotValidException;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.RestControllerAdvice;
import org.springframework.web.method.annotation.MethodArgumentTypeMismatchException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Handle Exceptions
*
* @author https://github.com/suimz
*/
@RestControllerAdvice
public class AdviceException {
@ExceptionHandler(BizException.class)
public ResponseEntity<R> map(BizException e) {
return ResponseEntity.status(e.getHttpStatus()).body(R.error(e.getMessage()));
}
@ExceptionHandler(UnauthorizedBizException.class)
public R map(UnauthorizedBizException e) {
return R.builder()
.status("Unauthorized")
.message(e.getMessage())
.build();
}
@ExceptionHandler(HttpMessageNotReadableException.class)
public R map(HttpMessageNotReadableException e) {
return R.error("bad request");
}
@ExceptionHandler(BindException.class)
public R map(BindException e) {
List<FieldError> fieldErrors = e.getBindingResult().getFieldErrors();
Map<String, Object> error = this.getValidError(fieldErrors);
| return R.error(error.get("errorMsg").toString()); |
}
@ExceptionHandler(MethodArgumentNotValidException.class)
public R map(MethodArgumentNotValidException e) {
List<FieldError> fieldErrors = e.getBindingResult().getFieldErrors();
Map<String, Object> error = this.getValidError(fieldErrors);
return R.error(error.get("errorMsg").toString());
}
private Map<String, Object> getValidError(List<FieldError> fieldErrors) {
Map<String, Object> map = new HashMap<String, Object>(16);
List<String> errorList = new ArrayList<String>();
StringBuffer errorMsg = new StringBuffer();
for (FieldError error : fieldErrors) {
errorList.add(error.getDefaultMessage());
errorMsg.append(error.getDefaultMessage());
// first
break;
}
map.put("errorList", errorList);
map.put("errorMsg", errorMsg);
return map;
}
@ExceptionHandler(MethodArgumentTypeMismatchException.class)
public R map(MethodArgumentTypeMismatchException e) {
return R.error(e.getMessage());
}
@ExceptionHandler(Exception.class)
public R map(Exception e) {
return R.error();
}
}
| src/main/java/com/suimz/open/chatgptweb/java/core/exception/AdviceException.java | suimz-chatgpt-web-java-37a4c63 | [
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/bean/resp/R.java",
"retrieved_chunk": " public static <T> R<T> ok(T data) {\n return R.ok(null, data);\n }\n public static <T> R<T> ok(String message, T data) {\n return (R<T>) R.builder().status(\"Success\").message(message).data(data).build();\n }\n public static R error() {\n return R.error(\"服务异常 | server exception\");\n }\n public static R error(String error) {",
"score": 46.41707495406912
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/bean/resp/R.java",
"retrieved_chunk": " return R.builder()\n .status(\"Fail\")\n .message(error)\n .build();\n }\n}",
"score": 38.34962444765884
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/bean/po/ReverseResponseBodyCallback.java",
"retrieved_chunk": " ResponseBody errorBody = response.errorBody();\n if (errorBody == null) {\n throw e;\n } else {\n OpenAiError error = mapper.readValue(\n errorBody.string(),\n OpenAiError.class\n );\n throw new OpenAiHttpException(error, e, e.code());\n }",
"score": 34.69814575688426
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/util/SpringUtil.java",
"retrieved_chunk": " ipAddress = ipAddress.substring(0, ipAddress.indexOf(\",\"));\n }\n }\n } catch (Exception e) {\n log.error(e.getMessage(), e);\n }\n return ipAddress;\n }\n public static String getApplicationHomeAbsolutePath() {\n ApplicationHome home = new ApplicationHome(SpringUtil.class);",
"score": 32.533821903874106
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/bean/resp/R.java",
"retrieved_chunk": "public class R<T> {\n private String status;\n private String message;\n private T data;\n public static R ok() {\n return R.ok(null);\n }\n public static R ok(String message) {\n return R.ok(null, null);\n }",
"score": 28.637614649829178
}
] | java | return R.error(error.get("errorMsg").toString()); |
package com.suimz.open.chatgptweb.java.core.exception;
import com.suimz.open.chatgptweb.java.bean.resp.R;
import org.springframework.http.ResponseEntity;
import org.springframework.http.converter.HttpMessageNotReadableException;
import org.springframework.validation.BindException;
import org.springframework.validation.FieldError;
import org.springframework.web.bind.MethodArgumentNotValidException;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.RestControllerAdvice;
import org.springframework.web.method.annotation.MethodArgumentTypeMismatchException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Handle Exceptions
*
* @author https://github.com/suimz
*/
@RestControllerAdvice
public class AdviceException {
@ExceptionHandler(BizException.class)
public ResponseEntity<R> map(BizException e) {
return ResponseEntity.status(e.getHttpStatus()).body(R.error(e.getMessage()));
}
@ExceptionHandler(UnauthorizedBizException.class)
public R map(UnauthorizedBizException e) {
return R.builder()
.status("Unauthorized")
.message(e.getMessage())
.build();
}
@ExceptionHandler(HttpMessageNotReadableException.class)
public R map(HttpMessageNotReadableException e) {
return R.error("bad request");
}
@ExceptionHandler(BindException.class)
public R map(BindException e) {
List<FieldError> fieldErrors = e.getBindingResult().getFieldErrors();
Map<String, Object> error = this.getValidError(fieldErrors);
return R.error(error.get("errorMsg").toString());
}
@ExceptionHandler(MethodArgumentNotValidException.class)
public R map(MethodArgumentNotValidException e) {
List<FieldError> fieldErrors = e.getBindingResult().getFieldErrors();
Map<String, Object> error = this.getValidError(fieldErrors);
return R.error(error.get("errorMsg").toString());
}
private Map<String, Object> getValidError(List<FieldError> fieldErrors) {
Map<String, Object> map = new HashMap<String, Object>(16);
List<String> errorList = new ArrayList<String>();
StringBuffer errorMsg = new StringBuffer();
for (FieldError error : fieldErrors) {
errorList.add(error.getDefaultMessage());
errorMsg.append(error.getDefaultMessage());
// first
break;
}
map.put("errorList", errorList);
map.put("errorMsg", errorMsg);
return map;
}
@ExceptionHandler(MethodArgumentTypeMismatchException.class)
public R map(MethodArgumentTypeMismatchException e) {
return R.error(e.getMessage());
}
@ExceptionHandler(Exception.class)
public R map(Exception e) {
return | R.error(); |
}
}
| src/main/java/com/suimz/open/chatgptweb/java/core/exception/AdviceException.java | suimz-chatgpt-web-java-37a4c63 | [
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/bean/resp/R.java",
"retrieved_chunk": " public static <T> R<T> ok(T data) {\n return R.ok(null, data);\n }\n public static <T> R<T> ok(String message, T data) {\n return (R<T>) R.builder().status(\"Success\").message(message).data(data).build();\n }\n public static R error() {\n return R.error(\"服务异常 | server exception\");\n }\n public static R error(String error) {",
"score": 37.374647158591344
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/util/SpringUtil.java",
"retrieved_chunk": " ipAddress = ipAddress.substring(0, ipAddress.indexOf(\",\"));\n }\n }\n } catch (Exception e) {\n log.error(e.getMessage(), e);\n }\n return ipAddress;\n }\n public static String getApplicationHomeAbsolutePath() {\n ApplicationHome home = new ApplicationHome(SpringUtil.class);",
"score": 36.49580432418233
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/bean/resp/R.java",
"retrieved_chunk": " return R.builder()\n .status(\"Fail\")\n .message(error)\n .build();\n }\n}",
"score": 30.74725245633529
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/service/OpenAiApiService.java",
"retrieved_chunk": " this.checkService();\n Double balance = null;\n try {\n if (StrUtil.isNotBlank(appProperties.getOpenaiSensitiveId())) {\n String authHeader = \"Bearer \" + appProperties.getOpenaiSensitiveId();\n balance = openAiExtApi.billing(authHeader).blockingGet().getTotal_available();\n }\n } catch (Exception e) {\n log.error(e.getMessage(), e);\n }",
"score": 29.300246411221597
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/bean/resp/R.java",
"retrieved_chunk": "public class R<T> {\n private String status;\n private String message;\n private T data;\n public static R ok() {\n return R.ok(null);\n }\n public static R ok(String message) {\n return R.ok(null, null);\n }",
"score": 29.01896823580111
}
] | java | R.error(); |
package com.suimz.open.chatgptweb.java.controller;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import com.suimz.open.chatgptweb.java.bean.constant.ApiRunMode;
import com.suimz.open.chatgptweb.java.bean.req.AuthVerifyReq;
import com.suimz.open.chatgptweb.java.bean.req.ChatProcessReq;
import com.suimz.open.chatgptweb.java.bean.resp.ConfigResp;
import com.suimz.open.chatgptweb.java.bean.resp.R;
import com.suimz.open.chatgptweb.java.bean.resp.SessionResp;
import com.suimz.open.chatgptweb.java.core.component.ratelimiter.ApiRateLimiter;
import com.suimz.open.chatgptweb.java.core.exception.ApiRequestErrorBizException;
import com.suimz.open.chatgptweb.java.core.exception.BizException;
import com.suimz.open.chatgptweb.java.core.properties.AppProperties;
import com.suimz.open.chatgptweb.java.service.OpenAiApiService;
import com.suimz.open.chatgptweb.java.service.OpenAiReverseService;
import com.suimz.open.chatgptweb.java.util.ObjUtil;
import com.theokanning.openai.OpenAiHttpException;
import javax.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;
import java.io.IOException;
import java.net.SocketException;
import java.net.SocketTimeoutException;
/**
* Server API Controller
*
* @author https://github.com/suimz
*/
@RestController
@RequestMapping("/api")
@Slf4j
public class AppController {
@Resource
private AppProperties appProperties;
@Resource
private ThreadPoolTaskExecutor asyncTaskExecutor;
@Resource
private OpenAiApiService openAiApiService;
@Resource
private OpenAiReverseService openAiReverseService;
@PostMapping("/session")
public R<SessionResp> session() {
return R.ok(
SessionResp.builder()
.auth(StrUtil.isNotBlank(appProperties.getAuthSecretKey()))
.model(ApiRunMode.get(appProperties).getName())
.build()
);
}
@PostMapping("/config")
public R<ConfigResp> config() {
String socksProxy;
if (appProperties.getSocksProxy() != null && ObjectUtil.isAllNotEmpty(appProperties.getSocksProxy().getHost(), appProperties.getSocksProxy().getPort())) {
socksProxy = StrUtil.format("{}:{}", appProperties.getSocksProxy().getHost(), appProperties.getSocksProxy().getPort());
} else {
socksProxy = "-";
}
String httpProxy;
if (appProperties.getHttpProxy() != null && ObjectUtil.isAllNotEmpty(appProperties.getHttpProxy().getHost(), appProperties.getHttpProxy().getPort())) {
httpProxy = StrUtil.format("{}:{}", appProperties.getHttpProxy().getHost(), appProperties.getHttpProxy().getPort());
} else {
httpProxy = "-";
}
Double balance = null;
ApiRunMode apiRunMode = ApiRunMode.get(appProperties);
if (apiRunMode == ApiRunMode.API) {
balance = openAiApiService.queryBalance();
}
return R.ok(
ConfigResp.builder()
.apiModel(apiRunMode.getName())
.timeoutMs(appProperties.getApiTimeoutMs())
.httpsProxy(httpProxy)
.socksProxy(socksProxy)
.reverseProxy(ObjUtil.getNotBlankValSequential("-", appProperties.getOpenaiReverseApiProxyUrl()))
.balance(ObjUtil.getNotNullValSequential("-", balance))
.build()
);
}
@PostMapping("/verify")
public R<SessionResp> authVerify(@RequestBody @Validated AuthVerifyReq req) {
if (!StrUtil.equals(appProperties.getAuthSecretKey(), req.getToken())) {
throw new BizException("Secret key is invalid");
}
return | R.ok("Verify successfully"); |
}
@ApiRateLimiter
@PostMapping("/chat-process")
public SseEmitter chatProcess(@RequestBody @Validated ChatProcessReq req) {
SseEmitter sseEmitter = new SseEmitter(appProperties.getApiTimeoutMs());
asyncTaskExecutor.execute(() -> {
try {
switch (ApiRunMode.get(appProperties)) {
case API:
openAiApiService.streamChat(sseEmitter, req);
break;
case REVERSE:
openAiReverseService.streamChat(sseEmitter, req);
break;
}
} catch (Throwable e) {
log.error(e.getMessage(), e);
BizException thrEx;
if (e instanceof BizException) {
thrEx = (BizException) e;
} else if (e instanceof OpenAiHttpException) {
OpenAiHttpException exception = (OpenAiHttpException) e;
thrEx = new ApiRequestErrorBizException(exception.statusCode, exception.getMessage());
} else if (e.getCause() instanceof SocketTimeoutException) {
thrEx = new ApiRequestErrorBizException(0);
} else if (e.getCause() instanceof SocketException || e.getCause() instanceof IOException) {
thrEx = new ApiRequestErrorBizException(-1);
} else {
thrEx = new ApiRequestErrorBizException();
}
sseEmitter.completeWithError(thrEx);
}
});
return sseEmitter;
}
}
| src/main/java/com/suimz/open/chatgptweb/java/controller/AppController.java | suimz-chatgpt-web-java-37a4c63 | [
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/core/interceptor/AuthInterceptor.java",
"retrieved_chunk": " if (StrUtil.isNotBlank(appProperties.getAuthSecretKey())) {\n String token = getToken(request);\n if (!StrUtil.equals(appProperties.getAuthSecretKey(), token)) {\n throw new UnauthorizedBizException();\n }\n }\n return true;\n }\n public String getToken(HttpServletRequest request) {\n String bearer = request.getHeader(\"Authorization\");",
"score": 30.284942001229975
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/bean/resp/R.java",
"retrieved_chunk": " public static <T> R<T> ok(T data) {\n return R.ok(null, data);\n }\n public static <T> R<T> ok(String message, T data) {\n return (R<T>) R.builder().status(\"Success\").message(message).data(data).build();\n }\n public static R error() {\n return R.error(\"服务异常 | server exception\");\n }\n public static R error(String error) {",
"score": 21.15058984685041
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/bean/resp/R.java",
"retrieved_chunk": "public class R<T> {\n private String status;\n private String message;\n private T data;\n public static R ok() {\n return R.ok(null);\n }\n public static R ok(String message) {\n return R.ok(null, null);\n }",
"score": 20.707220443748326
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/bean/req/AuthVerifyReq.java",
"retrieved_chunk": " @NotBlank(message = \"Secret key is empty\")\n private String token;\n}",
"score": 17.38617419413256
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/service/OpenAiReverseService.java",
"retrieved_chunk": " }\n public void checkService() {\n if (StrUtil.isBlank(appProperties.getOpenaiAccessToken()) || StrUtil.isBlank(appProperties.getOpenaiReverseApiProxyUrl())) {\n throw new ReverseServiceNotInitializedBizException();\n }\n }\n @Override\n public void streamChat(SseEmitter sseEmitter, ChatProcessReq req) {\n this.checkService();\n String authHeader = \"Bearer \" + appProperties.getOpenaiAccessToken();",
"score": 16.183951704105713
}
] | java | R.ok("Verify successfully"); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.IOException;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.AbstractRecordProcessor;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.utils.StringUtils;
public class CCFileExtractorRecordProcessor extends AbstractRecordProcessor {
private static Logger LOGGER = LoggerFactory.getLogger(CCFileExtractorRecordProcessor.class);
private static Logger TRUNCATED_URLS_LOGGER = LoggerFactory.getLogger("truncated-urls");
private static Logger TRUNCATED_URLS_FULL_LOGGER =
LoggerFactory.getLogger("truncated-urls-full");
private final ExtractorConfig fetcherConfig;
private final CCIndexReaderCounter counter;
private final FileFromCCWarcExtractor fileFromCCWarcFetcher;
private long reportEvery = 100000;
public CCFileExtractorRecordProcessor(ExtractorConfig fetcherConfig, CCIndexReaderCounter counter)
throws TikaConfigException, IOException {
this.fetcherConfig = fetcherConfig;
this.counter = counter;
this.fileFromCCWarcFetcher = new FileFromCCWarcExtractor(fetcherConfig, counter);
//completely arbitrary
if (fetcherConfig.getNumThreads() > 10) {
reportEvery = 1000000;
}
}
@Override
public boolean process(String json) throws IOException, InterruptedException {
//System.out.println("JSON: " + json);
long totalRead = counter.getRecordsRead().incrementAndGet();
if (totalRead % reportEvery == 0) {
LOGGER.info("processed: {}", counter);
}
if (fetcherConfig.getMaxRecords() > -1 && totalRead >= fetcherConfig.getMaxRecords()) {
LOGGER.info("hit max read");
return false;
}
//check for hit max
//return false;
Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);
if (record.isEmpty()) {
//problem already logged
return true;
}
CCIndexRecord r = record.get();
if (!fetcherConfig.getRecordSelector().select(r)) {
return true;
}
//if truncated, count appropriately and test for limits
if (!StringUtils.isBlank(r.getTruncated())) {
long truncated = counter.getTruncated().incrementAndGet();
if (fetcherConfig.getMaxFilesTruncated() > -1 &&
truncated >= fetcherConfig.getMaxFilesTruncated()) {
LOGGER.info("hit max truncated files");
return false;
}
}
if (fetcherConfig.isExtractTruncated() || StringUtils.isBlank(r.getTruncated())) {
long extracted = counter.getFilesExtracted().incrementAndGet();
if (fetcherConfig.getMaxFilesExtracted() > -1 &&
extracted >= fetcherConfig.getMaxFilesExtracted()) {
LOGGER.info("hit max extracted files");
return false;
}
if (fetcherConfig.isDryRun()) {
LOGGER.info("dry run, but would have extracted {}", r);
return true;
}
fetchBytes(r);
return true;
} else {
String url = r.getUrl();
TRUNCATED_URLS_LOGGER.info("", url);
//url,mime,mime_detected,warc_file,warc_offset,warc_length,truncated
TRUNCATED_URLS_FULL_LOGGER.info("", url,
r.getNormalizedMime(), r.getNormalizedMimeDetected(), r.getFilename(),
| r.getOffset(), r.getLength(), r.getTruncated()); |
return true;
}
}
private void fetchBytes(CCIndexRecord r) throws InterruptedException {
fileFromCCWarcFetcher.fetchToPath(r);
}
@Override
public void close() throws IOException {
}
}
| src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java",
"retrieved_chunk": " //new ObjectArray ?\n //url,mime_detected,warc_file,warc_offset,warc_length,sha256,length,path\n EXTRACTED_LOGGER.info(\"\", ccIndexRecord.getUrl(),\n ccIndexRecord.getNormalizedMime(),\n ccIndexRecord.getNormalizedMimeDetected(),\n ccIndexRecord.getFilename(),\n ccIndexRecord.getOffset(), ccIndexRecord.getLength(),\n targetDigest, length,\n targetPath);\n }",
"score": 71.30582636323038
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " if (!fetcherConfig.getRecordSelector().select(r)) {\n return true;\n }\n increment(totalCounts, r.getNormalizedMimeDetected());\n if (!StringUtils.isBlank(r.getTruncated())) {\n long truncated = counter.getTruncated().incrementAndGet();\n if (fetcherConfig.getMaxFilesTruncated() > -1 &&\n truncated >= fetcherConfig.getMaxFilesTruncated()) {\n LOGGER.info(\"hit max truncated files\");\n return false;",
"score": 70.0701138041251
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java",
"retrieved_chunk": " String targetPath) {\n if (extractTruncated) {\n EXTRACTED_ALL_LOGGER.info(\"\", ccIndexRecord.getUrl(),\n ccIndexRecord.getNormalizedMime(),\n ccIndexRecord.getNormalizedMimeDetected(),\n ccIndexRecord.getFilename(),\n ccIndexRecord.getOffset(), ccIndexRecord.getLength(),\n ccIndexRecord.getTruncated(), targetDigest, length,\n targetPath);\n } else {",
"score": 42.362218424042084
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " }\n increment(truncatedCounts, r.getNormalizedMimeDetected());\n return true;\n }\n return true;\n }\n private void increment(Map<String, MutableLong> m, String k) {\n MutableLong cnt = m.get(k);\n if (cnt == null) {\n cnt = new MutableLong(1);",
"score": 41.814978511889244
},
{
"filename": "src/test/java/org/tallison/cc/index/selector/IndexRecordSelectorTest.java",
"retrieved_chunk": " if (!indexRecord.getMime().equals(indexRecord.getMimeDetected())) {\n System.out.println(line);\n }\n if (!StringUtils.isBlank(indexRecord.getTruncated())) {\n }\n }\n line = r.readLine();\n }\n }\n }",
"score": 41.26033444652995
}
] | java | r.getOffset(), r.getLength(), r.getTruncated()); |
package com.suimz.open.chatgptweb.java.core.component.ratelimiter;
import com.suimz.open.chatgptweb.java.core.exception.ChatApiRequestTooManyBizException;
import com.suimz.open.chatgptweb.java.core.properties.AppProperties;
import com.suimz.open.chatgptweb.java.util.SpringUtil;
import io.github.resilience4j.ratelimiter.RateLimiter;
import io.github.resilience4j.ratelimiter.RateLimiterConfig;
import io.github.resilience4j.ratelimiter.RateLimiterRegistry;
import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Before;
import org.aspectj.lang.reflect.MethodSignature;
import org.springframework.stereotype.Component;
import java.lang.reflect.Method;
import java.time.Duration;
/**
* API request rate restriction - Handle
*
* @author https://github.com/suimz
*/
@Aspect
@Component
public class ApiRateLimiterAspect {
private RateLimiterRegistry rateLimiterRegistry = null;
public ApiRateLimiterAspect(AppProperties appProperties) {
Integer period = appProperties.getMaxRequestPerHour();
if (period != null && period > 0) {
this.rateLimiterRegistry = RateLimiterRegistry.of(
RateLimiterConfig.custom()
.limitForPeriod(period) // Maximum number of requests
.limitRefreshPeriod(Duration.ofHours(1)) // 1 hour
.timeoutDuration(Duration.ofMillis(1))
.build()
);
}
}
@Before("@annotation(apiRateLimiter)")
public void doBefore(JoinPoint point, ApiRateLimiter apiRateLimiter) {
if (this.rateLimiterRegistry == null) return;
RateLimiter rateLimiter = rateLimiterRegistry.rateLimiter(getCombineKey(point));
if (!rateLimiter.acquirePermission()) throw new ChatApiRequestTooManyBizException();
}
public String getCombineKey(JoinPoint point) {
| StringBuilder sb = new StringBuilder(SpringUtil.getClientIp()).append("-"); |
MethodSignature signature = (MethodSignature) point.getSignature();
Method method = signature.getMethod();
Class<?> targetClass = method.getDeclaringClass();
sb.append(targetClass.getName()).append("-").append(method.getName());
return sb.toString();
}
}
| src/main/java/com/suimz/open/chatgptweb/java/core/component/ratelimiter/ApiRateLimiterAspect.java | suimz-chatgpt-web-java-37a4c63 | [
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/bean/po/SsePushEventBuilder.java",
"retrieved_chunk": " }\n SsePushEventBuilder append(char ch) {\n if (this.sb == null) {\n this.sb = new StringBuilder();\n }\n this.sb.append(ch);\n return this;\n }\n @Override\n public Set<ResponseBodyEmitter.DataWithMediaType> build() {",
"score": 33.5477393809479
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/bean/po/SsePushEventBuilder.java",
"retrieved_chunk": " if (!StringUtils.hasLength(this.sb) && this.dataToSend.isEmpty()) {\n return Collections.emptySet();\n }\n saveAppendedText();\n return this.dataToSend;\n }\n private void saveAppendedText() {\n if (this.sb != null) {\n this.dataToSend.add(new ResponseBodyEmitter.DataWithMediaType(this.sb.toString(), TEXT_PLAIN));\n this.sb = null;",
"score": 21.52946943519177
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/bean/po/SsePushEventBuilder.java",
"retrieved_chunk": "public class SsePushEventBuilder implements SseEmitter.SseEventBuilder {\n private final Set<ResponseBodyEmitter.DataWithMediaType> dataToSend = new LinkedHashSet<>(1);\n @Nullable\n private StringBuilder sb;\n @Override\n public SseEmitter.SseEventBuilder id(String id) {\n return this;\n }\n @Override\n public SseEmitter.SseEventBuilder name(String name) {",
"score": 18.062202024862803
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/service/OpenAiApiService.java",
"retrieved_chunk": " }\n public void checkService() {\n if (StrUtil.isBlank(appProperties.getOpenaiApiKey())) {\n throw new ApiServiceNotInitializedBizException();\n }\n }\n @Override\n public void streamChat(SseEmitter sseEmitter, ChatProcessReq req) {\n this.checkService();\n LocalDateTime startTime = LocalDateTime.now();",
"score": 14.691165036117763
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/util/SpringUtil.java",
"retrieved_chunk": "public class SpringUtil implements ApplicationContextAware {\n private static ApplicationContext applicationContext;\n @Override\n public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {\n if (SpringUtil.applicationContext == null) {\n SpringUtil.applicationContext = applicationContext;\n }\n }\n public static ApplicationContext getApplicationContext() {\n return applicationContext;",
"score": 14.302490189905942
}
] | java | StringBuilder sb = new StringBuilder(SpringUtil.getClientIp()).append("-"); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.selector;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.CCIndexRecord;
public class RecordSelector {
private static Logger LOGGER = LoggerFactory.getLogger(RecordSelector.class);
public static RecordSelector ACCEPT_ALL_RECORDS = new AcceptAllRecords();
@JsonProperty
Map<String, List<SelectorClause>> must = new HashMap<>();
@JsonProperty
Map<String, List<SelectorClause>> must_not = new HashMap<>();
@JsonProperty
Map<String, List<SelectorClause>> should = new HashMap<>();
public boolean select(CCIndexRecord record) {
for (Map.Entry<String, List<SelectorClause>> e : must_not.entrySet()) {
String val = getStringValue(e.getKey(), record);
if (val == null) {
LOGGER.warn("Value is null for '{}' in the must not clause", e.getKey());
continue;
}
for (SelectorClause clause : e.getValue()) {
if (clause.select(val)) {
return false;
}
}
}
for (Map.Entry<String, List<SelectorClause>> e : must.entrySet()) {
String val = getStringValue(e.getKey(), record);
if (val == null) {
LOGGER.warn("Value is null for '{}' in the must clause. Record not selected.",
e.getKey());
return false;
}
for (SelectorClause clause : e.getValue()) {
if ( | !clause.select(val)) { |
return false;
}
}
}
if (should.size() == 0) {
return true;
}
for (Map.Entry<String, List<SelectorClause>> e : should.entrySet()) {
String val = getStringValue(e.getKey(), record);
if (val == null) {
LOGGER.warn("Value is null for '{}' in the should clause. Record not selected",
e.getKey());
continue;
}
for (SelectorClause clause : e.getValue()) {
if (clause.select(val)) {
return true;
}
}
}
return false;
}
private String getStringValue(String key, CCIndexRecord record) {
switch (key) {
case "mime_detected":
return record.getMimeDetected();
case "truncated":
return record.getTruncated();
case "mime":
return record.getMime();
case "status":
return Integer.toString(record.getStatus());
case "url":
return record.getUrl();
case "host":
return record.getHost();
case "digest":
return record.getDigest();
default:
throw new IllegalArgumentException("Don't yet support key " + key);
}
}
private static class AcceptAllRecords extends RecordSelector {
@Override
public boolean select(CCIndexRecord record) {
return true;
}
}
}
| src/main/java/org/tallison/cc/index/selector/RecordSelector.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " report(\"non-truncated\", nonTruncated);\n }\n private static void calcNonTruncated(Map<String, Long> truncated, Map<String, Long> total,\n Map<String, Long> nonTruncated) {\n for (Map.Entry<String, Long> e : total.entrySet()) {\n Long val = e.getValue();\n Long t = truncated.getOrDefault(e.getKey(), 0l);\n val -= t;\n nonTruncated.put(e.getKey(), val);\n }",
"score": 62.72557082082902
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " } catch (IOException ex) {\n throw new RuntimeException(ex);\n }\n });\n }\n }\n }\n private static void update(Map<String, MutableLong> from, Map<String, Long> to) {\n for (Map.Entry<String, MutableLong> e : from.entrySet()) {\n Long cnt = to.get(e.getKey());",
"score": 39.597494857405344
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " }\n private static void report(String name, Map<String, Long> m) throws IOException {\n try (BufferedWriter writer = Files.newBufferedWriter(Paths.get(name + \".csv\"),\n StandardCharsets.UTF_8)) {\n try (CSVPrinter printer = new CSVPrinter(writer, CSVFormat.EXCEL)) {\n printer.printRecord(\"mime\", \"count\");\n m.entrySet().stream().sorted(Collections.reverseOrder(Map.Entry.comparingByValue()))\n .forEach(e -> {\n try {\n printer.printRecord(e.getKey(), e.getValue());",
"score": 35.6714148962502
},
{
"filename": "src/main/java/org/tallison/cc/index/selector/RegexSelector.java",
"retrieved_chunk": " public boolean select(String val) {\n Matcher m = pattern.matcher(val);\n if (m.find()) {\n return sampler.select(val);\n }\n return false;\n }\n}",
"score": 34.59793747018873
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " if (cnt == null) {\n cnt = 0l;\n }\n cnt += e.getValue().getValue();\n to.put(e.getKey(), cnt);\n }\n }\n private static class IndexWorker implements Callable<Long> {\n private final ArrayBlockingQueue<FetchEmitTuple> indexUrls;\n private final AbstractRecordProcessor recordProcessor;",
"score": 33.200612196954495
}
] | java | !clause.select(val)) { |
package com.suimz.open.chatgptweb.java.bean.po;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.theokanning.openai.OpenAiError;
import com.theokanning.openai.OpenAiHttpException;
import com.theokanning.openai.service.OpenAiService;
import com.theokanning.openai.service.SSEFormatException;
import io.reactivex.FlowableEmitter;
import okhttp3.ResponseBody;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.HttpException;
import retrofit2.Response;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
public class ReverseResponseBodyCallback implements Callback<ResponseBody> {
private static final ObjectMapper mapper = OpenAiService.defaultObjectMapper();
private FlowableEmitter<ReverseSSE> emitter;
private boolean emitDone;
public ReverseResponseBodyCallback(FlowableEmitter<ReverseSSE> emitter, boolean emitDone) {
this.emitter = emitter;
this.emitDone = emitDone;
}
@Override
public void onResponse(Call<ResponseBody> call, Response<ResponseBody> response) {
BufferedReader reader = null;
try {
if (!response.isSuccessful()) {
HttpException e = new HttpException(response);
ResponseBody errorBody = response.errorBody();
if (errorBody == null) {
throw e;
} else {
OpenAiError error = mapper.readValue(
errorBody.string(),
OpenAiError.class
);
throw new OpenAiHttpException(error, e, e.code());
}
}
InputStream in = response.body().byteStream();
reader = new BufferedReader(new InputStreamReader(in));
String line;
ReverseSSE sse = null;
while ((line = reader.readLine()) != null) {
if (line.startsWith("data:")) {
String data = line.substring(5).trim();
sse = new ReverseSSE(data);
} else if (line.equals("") && sse != null) {
if ( | sse.isDone()) { |
if (emitDone) {
emitter.onNext(sse);
}
break;
}
emitter.onNext(sse);
sse = null;
} else {
throw new SSEFormatException("Invalid sse format! " + line);
}
}
emitter.onComplete();
} catch (Throwable t) {
onFailure(call, t);
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
// do nothing
}
}
}
}
@Override
public void onFailure(Call<ResponseBody> call, Throwable t) {
emitter.onError(t);
}
} | src/main/java/com/suimz/open/chatgptweb/java/bean/po/ReverseResponseBodyCallback.java | suimz-chatgpt-web-java-37a4c63 | [
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/service/OpenAiReverseService.java",
"retrieved_chunk": " ObjectNode body = buildSendMsgBody(req);\n String sendMsgId = body.findValues(\"messages\").get(0).findValue(\"id\").asText();\n Flowable.<ReverseSSE>create(emitter -> reverseApi.conversation(appProperties.getOpenaiReverseApiProxyUrl(), body, authHeader).enqueue(new ReverseResponseBodyCallback(emitter, false)), BackpressureStrategy.BUFFER)\n .map(sse -> okHttpObjectMapper.readValue(sse.getData(), ReverseChatChunk.class))\n .blockingForEach(chunk -> {\n try {\n if (StrUtil.isNotBlank(chunk.getError())) {\n log.debug(chunk.getError());\n sseEmitter.completeWithError(new BizException(chunk.getError()));\n }",
"score": 35.331440723287955
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/bean/po/ReverseSSE.java",
"retrieved_chunk": "package com.suimz.open.chatgptweb.java.bean.po;\npublic class ReverseSSE {\n private static final String DONE_DATA = \"[DONE]\";\n private final String data;\n public ReverseSSE(String data){\n this.data = data;\n }\n public String getData(){\n return this.data;\n }",
"score": 31.608150716914835
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/bean/po/ReverseSSE.java",
"retrieved_chunk": " public byte[] toBytes(){\n return String.format(\"data: %s\\n\\n\", this.data).getBytes();\n }\n public boolean isDone(){\n return DONE_DATA.equalsIgnoreCase(this.data);\n }\n}",
"score": 24.852322427055462
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/bean/po/SsePushEventBuilder.java",
"retrieved_chunk": " @Override\n public SseEmitter.SseEventBuilder data(Object object) {\n return data(object, null);\n }\n @Override\n public SseEmitter.SseEventBuilder data(Object object, @Nullable MediaType mediaType) {\n saveAppendedText();\n this.dataToSend.add(new ResponseBodyEmitter.DataWithMediaType(object, mediaType));\n append('\\n');\n return this;",
"score": 24.412180070985247
},
{
"filename": "src/main/java/com/suimz/open/chatgptweb/java/bean/resp/R.java",
"retrieved_chunk": " public static <T> R<T> ok(T data) {\n return R.ok(null, data);\n }\n public static <T> R<T> ok(String message, T data) {\n return (R<T>) R.builder().status(\"Success\").message(message).data(data).build();\n }\n public static R error() {\n return R.error(\"服务异常 | server exception\");\n }\n public static R error(String error) {",
"score": 22.100230724912915
}
] | java | sse.isDone()) { |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.IOException;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.AbstractRecordProcessor;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.utils.StringUtils;
public class CCFileExtractorRecordProcessor extends AbstractRecordProcessor {
private static Logger LOGGER = LoggerFactory.getLogger(CCFileExtractorRecordProcessor.class);
private static Logger TRUNCATED_URLS_LOGGER = LoggerFactory.getLogger("truncated-urls");
private static Logger TRUNCATED_URLS_FULL_LOGGER =
LoggerFactory.getLogger("truncated-urls-full");
private final ExtractorConfig fetcherConfig;
private final CCIndexReaderCounter counter;
private final FileFromCCWarcExtractor fileFromCCWarcFetcher;
private long reportEvery = 100000;
public CCFileExtractorRecordProcessor(ExtractorConfig fetcherConfig, CCIndexReaderCounter counter)
throws TikaConfigException, IOException {
this.fetcherConfig = fetcherConfig;
this.counter = counter;
this.fileFromCCWarcFetcher = new FileFromCCWarcExtractor(fetcherConfig, counter);
//completely arbitrary
if (fetcherConfig.getNumThreads() > 10) {
reportEvery = 1000000;
}
}
@Override
public boolean process(String json) throws IOException, InterruptedException {
//System.out.println("JSON: " + json);
long totalRead = counter.getRecordsRead().incrementAndGet();
if (totalRead % reportEvery == 0) {
LOGGER.info("processed: {}", counter);
}
if (fetcherConfig.getMaxRecords() > -1 && totalRead >= fetcherConfig.getMaxRecords()) {
LOGGER.info("hit max read");
return false;
}
//check for hit max
//return false;
Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);
if (record.isEmpty()) {
//problem already logged
return true;
}
CCIndexRecord r = record.get();
if (!fetcherConfig.getRecordSelector().select(r)) {
return true;
}
//if truncated, count appropriately and test for limits
if (!StringUtils.isBlank(r.getTruncated())) {
long truncated = counter.getTruncated().incrementAndGet();
if (fetcherConfig.getMaxFilesTruncated() > -1 &&
truncated >= fetcherConfig.getMaxFilesTruncated()) {
LOGGER.info("hit max truncated files");
return false;
}
}
if (fetcherConfig.isExtractTruncated() || StringUtils.isBlank(r.getTruncated())) {
long extracted = counter.getFilesExtracted().incrementAndGet();
if (fetcherConfig.getMaxFilesExtracted() > -1 &&
extracted >= fetcherConfig.getMaxFilesExtracted()) {
LOGGER.info("hit max extracted files");
return false;
}
if (fetcherConfig.isDryRun()) {
LOGGER.info("dry run, but would have extracted {}", r);
return true;
}
fetchBytes(r);
return true;
} else {
String url = r.getUrl();
TRUNCATED_URLS_LOGGER.info("", url);
//url,mime,mime_detected,warc_file,warc_offset,warc_length,truncated
TRUNCATED_URLS_FULL_LOGGER.info("", url,
r.getNormalizedMime(), r.getNormalizedMimeDetected(), r.getFilename(),
r.getOffset( | ), r.getLength(), r.getTruncated()); |
return true;
}
}
private void fetchBytes(CCIndexRecord r) throws InterruptedException {
fileFromCCWarcFetcher.fetchToPath(r);
}
@Override
public void close() throws IOException {
}
}
| src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java",
"retrieved_chunk": " //new ObjectArray ?\n //url,mime_detected,warc_file,warc_offset,warc_length,sha256,length,path\n EXTRACTED_LOGGER.info(\"\", ccIndexRecord.getUrl(),\n ccIndexRecord.getNormalizedMime(),\n ccIndexRecord.getNormalizedMimeDetected(),\n ccIndexRecord.getFilename(),\n ccIndexRecord.getOffset(), ccIndexRecord.getLength(),\n targetDigest, length,\n targetPath);\n }",
"score": 71.30582636323038
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " if (!fetcherConfig.getRecordSelector().select(r)) {\n return true;\n }\n increment(totalCounts, r.getNormalizedMimeDetected());\n if (!StringUtils.isBlank(r.getTruncated())) {\n long truncated = counter.getTruncated().incrementAndGet();\n if (fetcherConfig.getMaxFilesTruncated() > -1 &&\n truncated >= fetcherConfig.getMaxFilesTruncated()) {\n LOGGER.info(\"hit max truncated files\");\n return false;",
"score": 70.0701138041251
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java",
"retrieved_chunk": " String targetPath) {\n if (extractTruncated) {\n EXTRACTED_ALL_LOGGER.info(\"\", ccIndexRecord.getUrl(),\n ccIndexRecord.getNormalizedMime(),\n ccIndexRecord.getNormalizedMimeDetected(),\n ccIndexRecord.getFilename(),\n ccIndexRecord.getOffset(), ccIndexRecord.getLength(),\n ccIndexRecord.getTruncated(), targetDigest, length,\n targetPath);\n } else {",
"score": 42.362218424042084
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " }\n increment(truncatedCounts, r.getNormalizedMimeDetected());\n return true;\n }\n return true;\n }\n private void increment(Map<String, MutableLong> m, String k) {\n MutableLong cnt = m.get(k);\n if (cnt == null) {\n cnt = new MutableLong(1);",
"score": 41.814978511889244
},
{
"filename": "src/test/java/org/tallison/cc/index/selector/IndexRecordSelectorTest.java",
"retrieved_chunk": " if (!indexRecord.getMime().equals(indexRecord.getMimeDetected())) {\n System.out.println(line);\n }\n if (!StringUtils.isBlank(indexRecord.getTruncated())) {\n }\n }\n line = r.readLine();\n }\n }\n }",
"score": 41.26033444652995
}
] | java | ), r.getLength(), r.getTruncated()); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.IOException;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.AbstractRecordProcessor;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.utils.StringUtils;
public class CCFileExtractorRecordProcessor extends AbstractRecordProcessor {
private static Logger LOGGER = LoggerFactory.getLogger(CCFileExtractorRecordProcessor.class);
private static Logger TRUNCATED_URLS_LOGGER = LoggerFactory.getLogger("truncated-urls");
private static Logger TRUNCATED_URLS_FULL_LOGGER =
LoggerFactory.getLogger("truncated-urls-full");
private final ExtractorConfig fetcherConfig;
private final CCIndexReaderCounter counter;
private final FileFromCCWarcExtractor fileFromCCWarcFetcher;
private long reportEvery = 100000;
public CCFileExtractorRecordProcessor(ExtractorConfig fetcherConfig, CCIndexReaderCounter counter)
throws TikaConfigException, IOException {
this.fetcherConfig = fetcherConfig;
this.counter = counter;
this.fileFromCCWarcFetcher = new FileFromCCWarcExtractor(fetcherConfig, counter);
//completely arbitrary
if (fetcherConfig.getNumThreads() > 10) {
reportEvery = 1000000;
}
}
@Override
public boolean process(String json) throws IOException, InterruptedException {
//System.out.println("JSON: " + json);
long totalRead = counter.getRecordsRead().incrementAndGet();
if (totalRead % reportEvery == 0) {
LOGGER.info("processed: {}", counter);
}
if (fetcherConfig.getMaxRecords() > -1 && totalRead >= fetcherConfig.getMaxRecords()) {
LOGGER.info("hit max read");
return false;
}
//check for hit max
//return false;
Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);
if (record.isEmpty()) {
//problem already logged
return true;
}
CCIndexRecord r = record.get();
if (!fetcherConfig.getRecordSelector().select(r)) {
return true;
}
//if truncated, count appropriately and test for limits
if (!StringUtils.isBlank(r.getTruncated())) {
long truncated = counter.getTruncated().incrementAndGet();
if (fetcherConfig.getMaxFilesTruncated() > -1 &&
truncated >= fetcherConfig.getMaxFilesTruncated()) {
LOGGER.info("hit max truncated files");
return false;
}
}
if (fetcherConfig.isExtractTruncated() || StringUtils.isBlank(r.getTruncated())) {
long | extracted = counter.getFilesExtracted().incrementAndGet(); |
if (fetcherConfig.getMaxFilesExtracted() > -1 &&
extracted >= fetcherConfig.getMaxFilesExtracted()) {
LOGGER.info("hit max extracted files");
return false;
}
if (fetcherConfig.isDryRun()) {
LOGGER.info("dry run, but would have extracted {}", r);
return true;
}
fetchBytes(r);
return true;
} else {
String url = r.getUrl();
TRUNCATED_URLS_LOGGER.info("", url);
//url,mime,mime_detected,warc_file,warc_offset,warc_length,truncated
TRUNCATED_URLS_FULL_LOGGER.info("", url,
r.getNormalizedMime(), r.getNormalizedMimeDetected(), r.getFilename(),
r.getOffset(), r.getLength(), r.getTruncated());
return true;
}
}
private void fetchBytes(CCIndexRecord r) throws InterruptedException {
fileFromCCWarcFetcher.fetchToPath(r);
}
@Override
public void close() throws IOException {
}
}
| src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " if (!fetcherConfig.getRecordSelector().select(r)) {\n return true;\n }\n increment(totalCounts, r.getNormalizedMimeDetected());\n if (!StringUtils.isBlank(r.getTruncated())) {\n long truncated = counter.getTruncated().incrementAndGet();\n if (fetcherConfig.getMaxFilesTruncated() > -1 &&\n truncated >= fetcherConfig.getMaxFilesTruncated()) {\n LOGGER.info(\"hit max truncated files\");\n return false;",
"score": 108.17699813971075
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " this.counter = counter;\n }\n @Override\n public boolean process(String json) throws IOException, InterruptedException {\n long totalRead = counter.getRecordsRead().incrementAndGet();\n if (totalRead % 1000000 == 0) {\n LOGGER.info(\"processed: {}\", counter);\n }\n if (fetcherConfig.getMaxRecords() > -1 && totalRead >= fetcherConfig.getMaxRecords()) {\n LOGGER.info(\"hit max read\");",
"score": 55.45732402348927
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractor.java",
"retrieved_chunk": " //It fetches non truncated files and logs truncated files\n int totalThreads = fetcherConfig.getNumThreads() + 1;\n ExecutorService executorService = Executors.newFixedThreadPool(totalThreads);\n ExecutorCompletionService<Long> executorCompletionService =\n new ExecutorCompletionService<>(executorService);\n IndexIterator indexIterator = fetcherConfig.getIndexIterator();\n indexIterator.initialize(Collections.EMPTY_MAP);\n executorCompletionService.submit(new CallablePipesIterator(indexIterator, indexPathsList));\n CCIndexReaderCounter counter = new CCIndexReaderCounter();\n int finishedWorkers = 0;",
"score": 33.25064391656324
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/ExtractorConfig.java",
"retrieved_chunk": " private int numThreads = 2;\n //maximum records to read\n private long maxRecords = -1;\n //maximum files extracted from cc\n private long maxFilesExtracted = -1;\n //maximum files written to 'truncated' logger\n private long maxFilesTruncated = -1;\n private Path indexPathsFile;\n private String targetPathPattern = \"\";\n private boolean dryRun = false;",
"score": 30.70480356267062
},
{
"filename": "src/test/java/org/tallison/cc/index/selector/IndexRecordSelectorTest.java",
"retrieved_chunk": " if (!indexRecord.getMime().equals(indexRecord.getMimeDetected())) {\n System.out.println(line);\n }\n if (!StringUtils.isBlank(indexRecord.getTruncated())) {\n }\n }\n line = r.readLine();\n }\n }\n }",
"score": 25.699508592317503
}
] | java | extracted = counter.getFilesExtracted().incrementAndGet(); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.IOException;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.AbstractRecordProcessor;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.utils.StringUtils;
public class CCFileExtractorRecordProcessor extends AbstractRecordProcessor {
private static Logger LOGGER = LoggerFactory.getLogger(CCFileExtractorRecordProcessor.class);
private static Logger TRUNCATED_URLS_LOGGER = LoggerFactory.getLogger("truncated-urls");
private static Logger TRUNCATED_URLS_FULL_LOGGER =
LoggerFactory.getLogger("truncated-urls-full");
private final ExtractorConfig fetcherConfig;
private final CCIndexReaderCounter counter;
private final FileFromCCWarcExtractor fileFromCCWarcFetcher;
private long reportEvery = 100000;
public CCFileExtractorRecordProcessor(ExtractorConfig fetcherConfig, CCIndexReaderCounter counter)
throws TikaConfigException, IOException {
this.fetcherConfig = fetcherConfig;
this.counter = counter;
this.fileFromCCWarcFetcher = new FileFromCCWarcExtractor(fetcherConfig, counter);
//completely arbitrary
if (fetcherConfig.getNumThreads() > 10) {
reportEvery = 1000000;
}
}
@Override
public boolean process(String json) throws IOException, InterruptedException {
//System.out.println("JSON: " + json);
long totalRead = counter.getRecordsRead().incrementAndGet();
if (totalRead % reportEvery == 0) {
LOGGER.info("processed: {}", counter);
}
if (fetcherConfig.getMaxRecords() > -1 && totalRead >= fetcherConfig.getMaxRecords()) {
LOGGER.info("hit max read");
return false;
}
//check for hit max
//return false;
Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);
if (record.isEmpty()) {
//problem already logged
return true;
}
CCIndexRecord r = record.get();
if (!fetcherConfig.getRecordSelector().select(r)) {
return true;
}
//if truncated, count appropriately and test for limits
if (!StringUtils.isBlank(r.getTruncated())) {
long truncated = counter.getTruncated().incrementAndGet();
if (fetcherConfig.getMaxFilesTruncated() > -1 &&
truncated >= fetcherConfig.getMaxFilesTruncated()) {
LOGGER.info("hit max truncated files");
return false;
}
}
if (fetcherConfig.isExtractTruncated() || StringUtils.isBlank(r.getTruncated())) {
long extracted = counter.getFilesExtracted().incrementAndGet();
if (fetcherConfig.getMaxFilesExtracted() > -1 &&
extracted >= fetcherConfig.getMaxFilesExtracted()) {
LOGGER.info("hit max extracted files");
return false;
}
if (fetcherConfig.isDryRun()) {
LOGGER.info("dry run, but would have extracted {}", r);
return true;
}
fetchBytes(r);
return true;
} else {
String | url = r.getUrl(); |
TRUNCATED_URLS_LOGGER.info("", url);
//url,mime,mime_detected,warc_file,warc_offset,warc_length,truncated
TRUNCATED_URLS_FULL_LOGGER.info("", url,
r.getNormalizedMime(), r.getNormalizedMimeDetected(), r.getFilename(),
r.getOffset(), r.getLength(), r.getTruncated());
return true;
}
}
private void fetchBytes(CCIndexRecord r) throws InterruptedException {
fileFromCCWarcFetcher.fetchToPath(r);
}
@Override
public void close() throws IOException {
}
}
| src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " if (!fetcherConfig.getRecordSelector().select(r)) {\n return true;\n }\n increment(totalCounts, r.getNormalizedMimeDetected());\n if (!StringUtils.isBlank(r.getTruncated())) {\n long truncated = counter.getTruncated().incrementAndGet();\n if (fetcherConfig.getMaxFilesTruncated() > -1 &&\n truncated >= fetcherConfig.getMaxFilesTruncated()) {\n LOGGER.info(\"hit max truncated files\");\n return false;",
"score": 34.635232272525926
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " }\n increment(truncatedCounts, r.getNormalizedMimeDetected());\n return true;\n }\n return true;\n }\n private void increment(Map<String, MutableLong> m, String k) {\n MutableLong cnt = m.get(k);\n if (cnt == null) {\n cnt = new MutableLong(1);",
"score": 23.809197408526327
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " return false;\n }\n //check for hit max\n //return false;\n Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);\n if (record.isEmpty()) {\n //problem already logged\n return true;\n }\n CCIndexRecord r = record.get();",
"score": 21.80135578720396
},
{
"filename": "src/main/java/org/tallison/cc/index/AbstractRecordProcessor.java",
"retrieved_chunk": " return \"\";\n }\n return key.trim().replaceAll(\"[\\r\\n\\t]\", \" \");\n }\n protected int getThreadNumber() {\n return threadNumber;\n }\n String getExtension(String u) {\n if (u == null || u.length() == 0) {\n return null;",
"score": 17.122745747612882
},
{
"filename": "src/main/java/org/tallison/cc/index/CCIndexRecord.java",
"retrieved_chunk": " /**\n * @param url\n * @return \"\" if no tld could be extracted\n */\n public static String getTLD(String url) {\n if (url == null) {\n return \"\";\n }\n Matcher intMatcher = INT_PATTERN.matcher(\"\");\n try {",
"score": 15.891406423926117
}
] | java | url = r.getUrl(); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.IOException;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.AbstractRecordProcessor;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.utils.StringUtils;
public class CCFileExtractorRecordProcessor extends AbstractRecordProcessor {
private static Logger LOGGER = LoggerFactory.getLogger(CCFileExtractorRecordProcessor.class);
private static Logger TRUNCATED_URLS_LOGGER = LoggerFactory.getLogger("truncated-urls");
private static Logger TRUNCATED_URLS_FULL_LOGGER =
LoggerFactory.getLogger("truncated-urls-full");
private final ExtractorConfig fetcherConfig;
private final CCIndexReaderCounter counter;
private final FileFromCCWarcExtractor fileFromCCWarcFetcher;
private long reportEvery = 100000;
public CCFileExtractorRecordProcessor(ExtractorConfig fetcherConfig, CCIndexReaderCounter counter)
throws TikaConfigException, IOException {
this.fetcherConfig = fetcherConfig;
this.counter = counter;
this.fileFromCCWarcFetcher = new FileFromCCWarcExtractor(fetcherConfig, counter);
//completely arbitrary
| if (fetcherConfig.getNumThreads() > 10) { |
reportEvery = 1000000;
}
}
@Override
public boolean process(String json) throws IOException, InterruptedException {
//System.out.println("JSON: " + json);
long totalRead = counter.getRecordsRead().incrementAndGet();
if (totalRead % reportEvery == 0) {
LOGGER.info("processed: {}", counter);
}
if (fetcherConfig.getMaxRecords() > -1 && totalRead >= fetcherConfig.getMaxRecords()) {
LOGGER.info("hit max read");
return false;
}
//check for hit max
//return false;
Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);
if (record.isEmpty()) {
//problem already logged
return true;
}
CCIndexRecord r = record.get();
if (!fetcherConfig.getRecordSelector().select(r)) {
return true;
}
//if truncated, count appropriately and test for limits
if (!StringUtils.isBlank(r.getTruncated())) {
long truncated = counter.getTruncated().incrementAndGet();
if (fetcherConfig.getMaxFilesTruncated() > -1 &&
truncated >= fetcherConfig.getMaxFilesTruncated()) {
LOGGER.info("hit max truncated files");
return false;
}
}
if (fetcherConfig.isExtractTruncated() || StringUtils.isBlank(r.getTruncated())) {
long extracted = counter.getFilesExtracted().incrementAndGet();
if (fetcherConfig.getMaxFilesExtracted() > -1 &&
extracted >= fetcherConfig.getMaxFilesExtracted()) {
LOGGER.info("hit max extracted files");
return false;
}
if (fetcherConfig.isDryRun()) {
LOGGER.info("dry run, but would have extracted {}", r);
return true;
}
fetchBytes(r);
return true;
} else {
String url = r.getUrl();
TRUNCATED_URLS_LOGGER.info("", url);
//url,mime,mime_detected,warc_file,warc_offset,warc_length,truncated
TRUNCATED_URLS_FULL_LOGGER.info("", url,
r.getNormalizedMime(), r.getNormalizedMimeDetected(), r.getFilename(),
r.getOffset(), r.getLength(), r.getTruncated());
return true;
}
}
private void fetchBytes(CCIndexRecord r) throws InterruptedException {
fileFromCCWarcFetcher.fetchToPath(r);
}
@Override
public void close() throws IOException {
}
}
| src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " return true;\n }\n }\n private static class DetectedMimeCounter extends AbstractRecordProcessor {\n private final ExtractorConfig fetcherConfig;\n private final CCIndexReaderCounter counter;\n private final Map<String, MutableLong> totalCounts = new HashMap<>();\n private final Map<String, MutableLong> truncatedCounts = new HashMap<>();\n public DetectedMimeCounter(ExtractorConfig fetcherConfig, CCIndexReaderCounter counter) {\n this.fetcherConfig = fetcherConfig;",
"score": 80.06612120718414
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " this.counter = counter;\n }\n @Override\n public boolean process(String json) throws IOException, InterruptedException {\n long totalRead = counter.getRecordsRead().incrementAndGet();\n if (totalRead % 1000000 == 0) {\n LOGGER.info(\"processed: {}\", counter);\n }\n if (fetcherConfig.getMaxRecords() > -1 && totalRead >= fetcherConfig.getMaxRecords()) {\n LOGGER.info(\"hit max read\");",
"score": 61.31381865571157
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " CCIndexReaderCounter counter = new CCIndexReaderCounter();\n int finishedWorkers = 0;\n List<DetectedMimeCounter> detectedMimeCounters = new ArrayList<>();\n try {\n for (int i = 0; i < fetcherConfig.getNumThreads(); i++) {\n DetectedMimeCounter processor = new DetectedMimeCounter(fetcherConfig, counter);\n detectedMimeCounters.add(processor);\n executorCompletionService.submit(\n new IndexWorker(fetcherConfig, indexPathsList, processor));\n }",
"score": 59.73173419120169
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractor.java",
"retrieved_chunk": " try {\n for (int i = 0; i < fetcherConfig.getNumThreads(); i++) {\n CCFileExtractorRecordProcessor processor =\n new CCFileExtractorRecordProcessor(fetcherConfig, counter);\n executorCompletionService.submit(\n new IndexWorker(fetcherConfig, indexPathsList, processor));\n }\n while (finishedWorkers < fetcherConfig.getNumThreads()) {\n //blocking\n Future<Long> future = executorCompletionService.take();",
"score": 55.63977566917559
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java",
"retrieved_chunk": " LoggerFactory.getLogger(FileFromCCWarcExtractor.class);\n private static Logger EXTRACTED_LOGGER = LoggerFactory.getLogger(\"extracted-urls\");\n private static Logger EXTRACTED_ALL_LOGGER = LoggerFactory.getLogger(\"extracted-urls-all\");\n private final StreamEmitter emitter;\n private final TargetPathRewriter targetPathRewriter;\n private RangeFetcher fetcher;\n private final boolean extractTruncated;\n private Base32 base32 = new Base32();\n private final CCIndexReaderCounter ccIndexReaderCounter;\n public FileFromCCWarcExtractor(ExtractorConfig fetcherConfig,",
"score": 48.64245964734809
}
] | java | if (fetcherConfig.getNumThreads() > 10) { |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.IOException;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.AbstractRecordProcessor;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.utils.StringUtils;
public class CCFileExtractorRecordProcessor extends AbstractRecordProcessor {
private static Logger LOGGER = LoggerFactory.getLogger(CCFileExtractorRecordProcessor.class);
private static Logger TRUNCATED_URLS_LOGGER = LoggerFactory.getLogger("truncated-urls");
private static Logger TRUNCATED_URLS_FULL_LOGGER =
LoggerFactory.getLogger("truncated-urls-full");
private final ExtractorConfig fetcherConfig;
private final CCIndexReaderCounter counter;
private final FileFromCCWarcExtractor fileFromCCWarcFetcher;
private long reportEvery = 100000;
public CCFileExtractorRecordProcessor(ExtractorConfig fetcherConfig, CCIndexReaderCounter counter)
throws TikaConfigException, IOException {
this.fetcherConfig = fetcherConfig;
this.counter = counter;
this.fileFromCCWarcFetcher = new FileFromCCWarcExtractor(fetcherConfig, counter);
//completely arbitrary
if (fetcherConfig.getNumThreads() > 10) {
reportEvery = 1000000;
}
}
@Override
public boolean process(String json) throws IOException, InterruptedException {
//System.out.println("JSON: " + json);
long totalRead = counter.getRecordsRead().incrementAndGet();
if (totalRead % reportEvery == 0) {
LOGGER.info("processed: {}", counter);
}
if (fetcherConfig.getMaxRecords() > -1 && totalRead >= fetcherConfig.getMaxRecords()) {
LOGGER.info("hit max read");
return false;
}
//check for hit max
//return false;
Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);
if (record.isEmpty()) {
//problem already logged
return true;
}
CCIndexRecord r = record.get();
if (!fetcherConfig.getRecordSelector().select(r)) {
return true;
}
//if truncated, count appropriately and test for limits
if (!StringUtils.isBlank(r.getTruncated())) {
long truncated = counter.getTruncated().incrementAndGet();
if (fetcherConfig.getMaxFilesTruncated() > -1 &&
truncated >= fetcherConfig.getMaxFilesTruncated()) {
LOGGER.info("hit max truncated files");
return false;
}
}
if (fetcherConfig.isExtractTruncated() || StringUtils.isBlank(r.getTruncated())) {
long extracted = counter.getFilesExtracted().incrementAndGet();
if (fetcherConfig.getMaxFilesExtracted() > -1 &&
extracted >= fetcherConfig.getMaxFilesExtracted()) {
LOGGER.info("hit max extracted files");
return false;
}
if (fetcherConfig.isDryRun()) {
LOGGER.info("dry run, but would have extracted {}", r);
return true;
}
fetchBytes(r);
return true;
} else {
String url = r.getUrl();
TRUNCATED_URLS_LOGGER.info("", url);
//url,mime,mime_detected,warc_file,warc_offset,warc_length,truncated
TRUNCATED_URLS_FULL_LOGGER.info("", url,
r.getNormalizedMime(), r. | getNormalizedMimeDetected(), r.getFilename(),
r.getOffset(), r.getLength(), r.getTruncated()); |
return true;
}
}
private void fetchBytes(CCIndexRecord r) throws InterruptedException {
fileFromCCWarcFetcher.fetchToPath(r);
}
@Override
public void close() throws IOException {
}
}
| src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java",
"retrieved_chunk": " //new ObjectArray ?\n //url,mime_detected,warc_file,warc_offset,warc_length,sha256,length,path\n EXTRACTED_LOGGER.info(\"\", ccIndexRecord.getUrl(),\n ccIndexRecord.getNormalizedMime(),\n ccIndexRecord.getNormalizedMimeDetected(),\n ccIndexRecord.getFilename(),\n ccIndexRecord.getOffset(), ccIndexRecord.getLength(),\n targetDigest, length,\n targetPath);\n }",
"score": 71.30582636323038
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " if (!fetcherConfig.getRecordSelector().select(r)) {\n return true;\n }\n increment(totalCounts, r.getNormalizedMimeDetected());\n if (!StringUtils.isBlank(r.getTruncated())) {\n long truncated = counter.getTruncated().incrementAndGet();\n if (fetcherConfig.getMaxFilesTruncated() > -1 &&\n truncated >= fetcherConfig.getMaxFilesTruncated()) {\n LOGGER.info(\"hit max truncated files\");\n return false;",
"score": 70.0701138041251
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java",
"retrieved_chunk": " String targetPath) {\n if (extractTruncated) {\n EXTRACTED_ALL_LOGGER.info(\"\", ccIndexRecord.getUrl(),\n ccIndexRecord.getNormalizedMime(),\n ccIndexRecord.getNormalizedMimeDetected(),\n ccIndexRecord.getFilename(),\n ccIndexRecord.getOffset(), ccIndexRecord.getLength(),\n ccIndexRecord.getTruncated(), targetDigest, length,\n targetPath);\n } else {",
"score": 42.362218424042084
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " }\n increment(truncatedCounts, r.getNormalizedMimeDetected());\n return true;\n }\n return true;\n }\n private void increment(Map<String, MutableLong> m, String k) {\n MutableLong cnt = m.get(k);\n if (cnt == null) {\n cnt = new MutableLong(1);",
"score": 41.814978511889244
},
{
"filename": "src/test/java/org/tallison/cc/index/selector/IndexRecordSelectorTest.java",
"retrieved_chunk": " if (!indexRecord.getMime().equals(indexRecord.getMimeDetected())) {\n System.out.println(line);\n }\n if (!StringUtils.isBlank(indexRecord.getTruncated())) {\n }\n }\n line = r.readLine();\n }\n }\n }",
"score": 41.26033444652995
}
] | java | getNormalizedMimeDetected(), r.getFilename(),
r.getOffset(), r.getLength(), r.getTruncated()); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.IOException;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.AbstractRecordProcessor;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.utils.StringUtils;
public class CCFileExtractorRecordProcessor extends AbstractRecordProcessor {
private static Logger LOGGER = LoggerFactory.getLogger(CCFileExtractorRecordProcessor.class);
private static Logger TRUNCATED_URLS_LOGGER = LoggerFactory.getLogger("truncated-urls");
private static Logger TRUNCATED_URLS_FULL_LOGGER =
LoggerFactory.getLogger("truncated-urls-full");
private final ExtractorConfig fetcherConfig;
private final CCIndexReaderCounter counter;
private final FileFromCCWarcExtractor fileFromCCWarcFetcher;
private long reportEvery = 100000;
public CCFileExtractorRecordProcessor(ExtractorConfig fetcherConfig, CCIndexReaderCounter counter)
throws TikaConfigException, IOException {
this.fetcherConfig = fetcherConfig;
this.counter = counter;
this.fileFromCCWarcFetcher = new FileFromCCWarcExtractor(fetcherConfig, counter);
//completely arbitrary
if (fetcherConfig.getNumThreads() > 10) {
reportEvery = 1000000;
}
}
@Override
public boolean process(String json) throws IOException, InterruptedException {
//System.out.println("JSON: " + json);
long totalRead = counter.getRecordsRead().incrementAndGet();
if (totalRead % reportEvery == 0) {
LOGGER.info("processed: {}", counter);
}
if (fetcherConfig.getMaxRecords() > -1 && totalRead >= fetcherConfig.getMaxRecords()) {
LOGGER.info("hit max read");
return false;
}
//check for hit max
//return false;
Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);
if (record.isEmpty()) {
//problem already logged
return true;
}
CCIndexRecord r = record.get();
if (!fetcherConfig.getRecordSelector().select(r)) {
return true;
}
//if truncated, count appropriately and test for limits
if (!StringUtils.isBlank(r.getTruncated())) {
long truncated = counter.getTruncated().incrementAndGet();
if | (fetcherConfig.getMaxFilesTruncated() > -1 &&
truncated >= fetcherConfig.getMaxFilesTruncated()) { |
LOGGER.info("hit max truncated files");
return false;
}
}
if (fetcherConfig.isExtractTruncated() || StringUtils.isBlank(r.getTruncated())) {
long extracted = counter.getFilesExtracted().incrementAndGet();
if (fetcherConfig.getMaxFilesExtracted() > -1 &&
extracted >= fetcherConfig.getMaxFilesExtracted()) {
LOGGER.info("hit max extracted files");
return false;
}
if (fetcherConfig.isDryRun()) {
LOGGER.info("dry run, but would have extracted {}", r);
return true;
}
fetchBytes(r);
return true;
} else {
String url = r.getUrl();
TRUNCATED_URLS_LOGGER.info("", url);
//url,mime,mime_detected,warc_file,warc_offset,warc_length,truncated
TRUNCATED_URLS_FULL_LOGGER.info("", url,
r.getNormalizedMime(), r.getNormalizedMimeDetected(), r.getFilename(),
r.getOffset(), r.getLength(), r.getTruncated());
return true;
}
}
private void fetchBytes(CCIndexRecord r) throws InterruptedException {
fileFromCCWarcFetcher.fetchToPath(r);
}
@Override
public void close() throws IOException {
}
}
| src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " if (!fetcherConfig.getRecordSelector().select(r)) {\n return true;\n }\n increment(totalCounts, r.getNormalizedMimeDetected());\n if (!StringUtils.isBlank(r.getTruncated())) {\n long truncated = counter.getTruncated().incrementAndGet();\n if (fetcherConfig.getMaxFilesTruncated() > -1 &&\n truncated >= fetcherConfig.getMaxFilesTruncated()) {\n LOGGER.info(\"hit max truncated files\");\n return false;",
"score": 104.0254146705685
},
{
"filename": "src/test/java/org/tallison/cc/index/selector/IndexRecordSelectorTest.java",
"retrieved_chunk": " if (!indexRecord.getMime().equals(indexRecord.getMimeDetected())) {\n System.out.println(line);\n }\n if (!StringUtils.isBlank(indexRecord.getTruncated())) {\n }\n }\n line = r.readLine();\n }\n }\n }",
"score": 37.915488940457955
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " return false;\n }\n //check for hit max\n //return false;\n Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);\n if (record.isEmpty()) {\n //problem already logged\n return true;\n }\n CCIndexRecord r = record.get();",
"score": 33.06561307269749
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " this.counter = counter;\n }\n @Override\n public boolean process(String json) throws IOException, InterruptedException {\n long totalRead = counter.getRecordsRead().incrementAndGet();\n if (totalRead % 1000000 == 0) {\n LOGGER.info(\"processed: {}\", counter);\n }\n if (fetcherConfig.getMaxRecords() > -1 && totalRead >= fetcherConfig.getMaxRecords()) {\n LOGGER.info(\"hit max read\");",
"score": 30.619248423104043
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractor.java",
"retrieved_chunk": " //It fetches non truncated files and logs truncated files\n int totalThreads = fetcherConfig.getNumThreads() + 1;\n ExecutorService executorService = Executors.newFixedThreadPool(totalThreads);\n ExecutorCompletionService<Long> executorCompletionService =\n new ExecutorCompletionService<>(executorService);\n IndexIterator indexIterator = fetcherConfig.getIndexIterator();\n indexIterator.initialize(Collections.EMPTY_MAP);\n executorCompletionService.submit(new CallablePipesIterator(indexIterator, indexPathsList));\n CCIndexReaderCounter counter = new CCIndexReaderCounter();\n int finishedWorkers = 0;",
"score": 27.422055804554603
}
] | java | (fetcherConfig.getMaxFilesTruncated() > -1 &&
truncated >= fetcherConfig.getMaxFilesTruncated()) { |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.IOException;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.AbstractRecordProcessor;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.utils.StringUtils;
public class CCFileExtractorRecordProcessor extends AbstractRecordProcessor {
private static Logger LOGGER = LoggerFactory.getLogger(CCFileExtractorRecordProcessor.class);
private static Logger TRUNCATED_URLS_LOGGER = LoggerFactory.getLogger("truncated-urls");
private static Logger TRUNCATED_URLS_FULL_LOGGER =
LoggerFactory.getLogger("truncated-urls-full");
private final ExtractorConfig fetcherConfig;
private final CCIndexReaderCounter counter;
private final FileFromCCWarcExtractor fileFromCCWarcFetcher;
private long reportEvery = 100000;
public CCFileExtractorRecordProcessor(ExtractorConfig fetcherConfig, CCIndexReaderCounter counter)
throws TikaConfigException, IOException {
this.fetcherConfig = fetcherConfig;
this.counter = counter;
this.fileFromCCWarcFetcher = new FileFromCCWarcExtractor(fetcherConfig, counter);
//completely arbitrary
if (fetcherConfig.getNumThreads() > 10) {
reportEvery = 1000000;
}
}
@Override
public boolean process(String json) throws IOException, InterruptedException {
//System.out.println("JSON: " + json);
long totalRead = counter.getRecordsRead().incrementAndGet();
if (totalRead % reportEvery == 0) {
LOGGER.info("processed: {}", counter);
}
if (fetcherConfig.getMaxRecords() > -1 && totalRead >= fetcherConfig.getMaxRecords()) {
LOGGER.info("hit max read");
return false;
}
//check for hit max
//return false;
Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);
if (record.isEmpty()) {
//problem already logged
return true;
}
CCIndexRecord r = record.get();
if (!fetcherConfig.getRecordSelector().select(r)) {
return true;
}
//if truncated, count appropriately and test for limits
if (!StringUtils.isBlank(r.getTruncated())) {
long truncated = counter.getTruncated().incrementAndGet();
if (fetcherConfig.getMaxFilesTruncated() > -1 &&
truncated >= fetcherConfig.getMaxFilesTruncated()) {
LOGGER.info("hit max truncated files");
return false;
}
}
| if (fetcherConfig.isExtractTruncated() || StringUtils.isBlank(r.getTruncated())) { |
long extracted = counter.getFilesExtracted().incrementAndGet();
if (fetcherConfig.getMaxFilesExtracted() > -1 &&
extracted >= fetcherConfig.getMaxFilesExtracted()) {
LOGGER.info("hit max extracted files");
return false;
}
if (fetcherConfig.isDryRun()) {
LOGGER.info("dry run, but would have extracted {}", r);
return true;
}
fetchBytes(r);
return true;
} else {
String url = r.getUrl();
TRUNCATED_URLS_LOGGER.info("", url);
//url,mime,mime_detected,warc_file,warc_offset,warc_length,truncated
TRUNCATED_URLS_FULL_LOGGER.info("", url,
r.getNormalizedMime(), r.getNormalizedMimeDetected(), r.getFilename(),
r.getOffset(), r.getLength(), r.getTruncated());
return true;
}
}
private void fetchBytes(CCIndexRecord r) throws InterruptedException {
fileFromCCWarcFetcher.fetchToPath(r);
}
@Override
public void close() throws IOException {
}
}
| src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " if (!fetcherConfig.getRecordSelector().select(r)) {\n return true;\n }\n increment(totalCounts, r.getNormalizedMimeDetected());\n if (!StringUtils.isBlank(r.getTruncated())) {\n long truncated = counter.getTruncated().incrementAndGet();\n if (fetcherConfig.getMaxFilesTruncated() > -1 &&\n truncated >= fetcherConfig.getMaxFilesTruncated()) {\n LOGGER.info(\"hit max truncated files\");\n return false;",
"score": 123.50929361178642
},
{
"filename": "src/test/java/org/tallison/cc/index/selector/IndexRecordSelectorTest.java",
"retrieved_chunk": " if (!indexRecord.getMime().equals(indexRecord.getMimeDetected())) {\n System.out.println(line);\n }\n if (!StringUtils.isBlank(indexRecord.getTruncated())) {\n }\n }\n line = r.readLine();\n }\n }\n }",
"score": 46.637200325822285
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " this.counter = counter;\n }\n @Override\n public boolean process(String json) throws IOException, InterruptedException {\n long totalRead = counter.getRecordsRead().incrementAndGet();\n if (totalRead % 1000000 == 0) {\n LOGGER.info(\"processed: {}\", counter);\n }\n if (fetcherConfig.getMaxRecords() > -1 && totalRead >= fetcherConfig.getMaxRecords()) {\n LOGGER.info(\"hit max read\");",
"score": 45.17285325400844
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractor.java",
"retrieved_chunk": " //It fetches non truncated files and logs truncated files\n int totalThreads = fetcherConfig.getNumThreads() + 1;\n ExecutorService executorService = Executors.newFixedThreadPool(totalThreads);\n ExecutorCompletionService<Long> executorCompletionService =\n new ExecutorCompletionService<>(executorService);\n IndexIterator indexIterator = fetcherConfig.getIndexIterator();\n indexIterator.initialize(Collections.EMPTY_MAP);\n executorCompletionService.submit(new CallablePipesIterator(indexIterator, indexPathsList));\n CCIndexReaderCounter counter = new CCIndexReaderCounter();\n int finishedWorkers = 0;",
"score": 35.02224950440792
},
{
"filename": "src/main/java/org/tallison/cc/index/CCIndexRecord.java",
"retrieved_chunk": " }\n public String getCharset() {\n return charset;\n }\n public String getLanguages() {\n return languages;\n }\n public String getTruncated() {\n return truncated;\n }",
"score": 30.987669294937792
}
] | java | if (fetcherConfig.isExtractTruncated() || StringUtils.isBlank(r.getTruncated())) { |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.IOException;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.AbstractRecordProcessor;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.utils.StringUtils;
public class CCFileExtractorRecordProcessor extends AbstractRecordProcessor {
private static Logger LOGGER = LoggerFactory.getLogger(CCFileExtractorRecordProcessor.class);
private static Logger TRUNCATED_URLS_LOGGER = LoggerFactory.getLogger("truncated-urls");
private static Logger TRUNCATED_URLS_FULL_LOGGER =
LoggerFactory.getLogger("truncated-urls-full");
private final ExtractorConfig fetcherConfig;
private final CCIndexReaderCounter counter;
private final FileFromCCWarcExtractor fileFromCCWarcFetcher;
private long reportEvery = 100000;
public CCFileExtractorRecordProcessor(ExtractorConfig fetcherConfig, CCIndexReaderCounter counter)
throws TikaConfigException, IOException {
this.fetcherConfig = fetcherConfig;
this.counter = counter;
this.fileFromCCWarcFetcher = new FileFromCCWarcExtractor(fetcherConfig, counter);
//completely arbitrary
if (fetcherConfig.getNumThreads() > 10) {
reportEvery = 1000000;
}
}
@Override
public boolean process(String json) throws IOException, InterruptedException {
//System.out.println("JSON: " + json);
long totalRead = counter.getRecordsRead().incrementAndGet();
if (totalRead % reportEvery == 0) {
LOGGER.info("processed: {}", counter);
}
if (fetcherConfig.getMaxRecords() > -1 && totalRead >= fetcherConfig.getMaxRecords()) {
LOGGER.info("hit max read");
return false;
}
//check for hit max
//return false;
Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);
if (record.isEmpty()) {
//problem already logged
return true;
}
CCIndexRecord r = record.get();
if (!fetcherConfig.getRecordSelector().select(r)) {
return true;
}
//if truncated, count appropriately and test for limits
if (!StringUtils.isBlank(r.getTruncated())) {
long truncated = counter.getTruncated().incrementAndGet();
if (fetcherConfig.getMaxFilesTruncated() > -1 &&
truncated >= fetcherConfig.getMaxFilesTruncated()) {
LOGGER.info("hit max truncated files");
return false;
}
}
if (fetcherConfig.isExtractTruncated() || StringUtils.isBlank(r.getTruncated())) {
long extracted = counter.getFilesExtracted().incrementAndGet();
if (fetcherConfig.getMaxFilesExtracted() > -1 &&
extracted >= fetcherConfig.getMaxFilesExtracted()) {
LOGGER.info("hit max extracted files");
return false;
}
| if (fetcherConfig.isDryRun()) { |
LOGGER.info("dry run, but would have extracted {}", r);
return true;
}
fetchBytes(r);
return true;
} else {
String url = r.getUrl();
TRUNCATED_URLS_LOGGER.info("", url);
//url,mime,mime_detected,warc_file,warc_offset,warc_length,truncated
TRUNCATED_URLS_FULL_LOGGER.info("", url,
r.getNormalizedMime(), r.getNormalizedMimeDetected(), r.getFilename(),
r.getOffset(), r.getLength(), r.getTruncated());
return true;
}
}
private void fetchBytes(CCIndexRecord r) throws InterruptedException {
fileFromCCWarcFetcher.fetchToPath(r);
}
@Override
public void close() throws IOException {
}
}
| src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " if (!fetcherConfig.getRecordSelector().select(r)) {\n return true;\n }\n increment(totalCounts, r.getNormalizedMimeDetected());\n if (!StringUtils.isBlank(r.getTruncated())) {\n long truncated = counter.getTruncated().incrementAndGet();\n if (fetcherConfig.getMaxFilesTruncated() > -1 &&\n truncated >= fetcherConfig.getMaxFilesTruncated()) {\n LOGGER.info(\"hit max truncated files\");\n return false;",
"score": 71.0117492792795
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " this.counter = counter;\n }\n @Override\n public boolean process(String json) throws IOException, InterruptedException {\n long totalRead = counter.getRecordsRead().incrementAndGet();\n if (totalRead % 1000000 == 0) {\n LOGGER.info(\"processed: {}\", counter);\n }\n if (fetcherConfig.getMaxRecords() > -1 && totalRead >= fetcherConfig.getMaxRecords()) {\n LOGGER.info(\"hit max read\");",
"score": 47.225950626927705
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/ExtractorConfig.java",
"retrieved_chunk": " private int numThreads = 2;\n //maximum records to read\n private long maxRecords = -1;\n //maximum files extracted from cc\n private long maxFilesExtracted = -1;\n //maximum files written to 'truncated' logger\n private long maxFilesTruncated = -1;\n private Path indexPathsFile;\n private String targetPathPattern = \"\";\n private boolean dryRun = false;",
"score": 27.387845294288542
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java",
"retrieved_chunk": " LoggerFactory.getLogger(FileFromCCWarcExtractor.class);\n private static Logger EXTRACTED_LOGGER = LoggerFactory.getLogger(\"extracted-urls\");\n private static Logger EXTRACTED_ALL_LOGGER = LoggerFactory.getLogger(\"extracted-urls-all\");\n private final StreamEmitter emitter;\n private final TargetPathRewriter targetPathRewriter;\n private RangeFetcher fetcher;\n private final boolean extractTruncated;\n private Base32 base32 = new Base32();\n private final CCIndexReaderCounter ccIndexReaderCounter;\n public FileFromCCWarcExtractor(ExtractorConfig fetcherConfig,",
"score": 25.45493438659075
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " return false;\n }\n //check for hit max\n //return false;\n Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);\n if (record.isEmpty()) {\n //problem already logged\n return true;\n }\n CCIndexRecord r = record.get();",
"score": 23.228458111948616
}
] | java | if (fetcherConfig.isDryRun()) { |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.Locale;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.zip.GZIPInputStream;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.AbstractRecordProcessor;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.IndexIterator;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.exception.TikaException;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.pipes.FetchEmitTuple;
import org.apache.tika.pipes.fetcher.Fetcher;
import org.apache.tika.pipes.pipesiterator.CallablePipesIterator;
import org.apache.tika.pipes.pipesiterator.PipesIterator;
import org.apache.tika.utils.StringUtils;
/**
* This is a lighter class that doesn't rely on a database
* to extract files from CC and log a list of truncated urls.
*/
public class CCFileExtractor {
private static final Long INDEX_WORKER_ID = 1l;
private static final Long INDEX_READER_ID = 2l;
private static final Logger LOGGER = LoggerFactory.getLogger(CCFileExtractor.class);
public static void main(String[] args) throws Exception {
ExtractorConfig fetcherConfig =
new ObjectMapper().readValue(new File(args[0]), ExtractorConfig.class);
execute(fetcherConfig);
}
private static void execute(ExtractorConfig fetcherConfig) throws TikaException {
ArrayBlockingQueue<FetchEmitTuple> indexPathsList = new ArrayBlockingQueue<>(1000);
//IndexPathsReader reads a file containing a list of cc-index.paths files
//and writes the literal gz files (cc-index/collections/CC-MAIN-2023-06/indexes/cdx-00000.gz)
//to indexPathsList
//IndexWorker reads a single index.gz file at a time and processes each record
//It fetches non truncated files and logs truncated files
int totalThreads = fetcherConfig.getNumThreads() + 1;
ExecutorService executorService = Executors.newFixedThreadPool(totalThreads);
ExecutorCompletionService<Long> executorCompletionService =
new ExecutorCompletionService<>(executorService);
IndexIterator indexIterator = fetcherConfig.getIndexIterator();
indexIterator.initialize(Collections.EMPTY_MAP);
executorCompletionService.submit(new CallablePipesIterator(indexIterator, indexPathsList));
CCIndexReaderCounter counter = new CCIndexReaderCounter();
int finishedWorkers = 0;
try {
for (int i = 0; i < fetcherConfig.getNumThreads(); i++) {
CCFileExtractorRecordProcessor processor =
new CCFileExtractorRecordProcessor(fetcherConfig, counter);
executorCompletionService.submit(
new IndexWorker(fetcherConfig, indexPathsList, processor));
}
while (finishedWorkers < fetcherConfig.getNumThreads()) {
//blocking
Future<Long> future = executorCompletionService.take();
if (future != null) {
Long f = future.get();
LOGGER.debug("completed {}", f);
if (f.equals(INDEX_WORKER_ID)) {
finishedWorkers++;
} else if (f.equals(INDEX_READER_ID)) {
LOGGER.info("Index paths reader successfully completed");
}
}
}
} catch (TikaConfigException | IOException e) {
LOGGER.error("main loop exception", e);
throw new RuntimeException(e);
} catch (ExecutionException e) {
LOGGER.error("main loop exception", e);
throw new RuntimeException(e);
} catch (InterruptedException e) {
LOGGER.warn("main loop interrupted exception", e);
throw new RuntimeException(e);
} finally {
executorService.shutdown();
executorService.shutdownNow();
}
}
private static class IndexWorker implements Callable<Long> {
private final ArrayBlockingQueue<FetchEmitTuple> indexUrls;
private final AbstractRecordProcessor recordProcessor;
private final Fetcher indexFetcher;
IndexWorker(ExtractorConfig fetcherConfig, ArrayBlockingQueue<FetchEmitTuple> indexUrls,
AbstractRecordProcessor recordProcessor) throws TikaException {
this.indexUrls = indexUrls;
this.recordProcessor = recordProcessor;
this | .indexFetcher = fetcherConfig.newIndexFetcher(); |
}
@Override
public Long call() throws Exception {
boolean shouldContinue = true;
while (shouldContinue) {
FetchEmitTuple indexUrl = indexUrls.poll(120, TimeUnit.MINUTES);
if (indexUrl == null) {
throw new TimeoutException("waited 120 minutes for a new record");
}
if (indexUrl == PipesIterator.COMPLETED_SEMAPHORE) {
recordProcessor.close();
//can hang forever
indexUrls.put(PipesIterator.COMPLETED_SEMAPHORE);
return INDEX_WORKER_ID;
}
LOGGER.trace(indexUrl.toString());
shouldContinue = processFile(indexUrl, recordProcessor);
}
return INDEX_WORKER_ID;
}
private boolean processFile(FetchEmitTuple fetchEmitTuple,
AbstractRecordProcessor recordProcessor)
throws InterruptedException {
long start = System.currentTimeMillis();
LOGGER.info("starting to fetch index gz: {}",
fetchEmitTuple.getFetchKey().getFetchKey());
try (TikaInputStream tis = (TikaInputStream) indexFetcher.fetch(
fetchEmitTuple.getFetchKey().getFetchKey(), new Metadata())) {
try (InputStream is = new BufferedInputStream(new GZIPInputStream(tis))) {
try (BufferedReader reader = new BufferedReader(
new InputStreamReader(is, StandardCharsets.UTF_8))) {
String line = reader.readLine();
int lines = 0;
long elapsed = System.currentTimeMillis() - start;
LOGGER.info("Finished fetching {} bytes in {} ms for index gz: {}",
String.format(Locale.US, "%,d", tis.getLength()),
String.format(Locale.US, "%,d", elapsed),
fetchEmitTuple.getFetchKey().getFetchKey());
while (line != null) {
LOGGER.trace("about to add a line");
if (StringUtils.isBlank(line)) {
line = reader.readLine();
continue;
}
try {
boolean shouldContinue = recordProcessor.process(line);
if (!shouldContinue) {
return shouldContinue;
}
} catch (IOException e) {
LOGGER.warn("bad json: " + line);
}
lines++;
line = reader.readLine();
}
}
}
} catch (TikaException | IOException e) {
LOGGER.error(
"failed while processing " + fetchEmitTuple.getFetchKey().getFetchKey(), e);
}
long elapsed = System.currentTimeMillis() - start;
LOGGER.info("finished processing index gz in ({}) ms: {}",
String.format(Locale.US, "%,d", elapsed),
fetchEmitTuple.getFetchKey().getFetchKey());
return true;
}
}
}
| src/main/java/org/tallison/cc/index/extractor/CCFileExtractor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " private final Fetcher fetcher;\n IndexWorker(ExtractorConfig fetcherConfig, ArrayBlockingQueue<FetchEmitTuple> indexUrls,\n AbstractRecordProcessor recordProcessor) throws TikaException {\n this.indexUrls = indexUrls;\n this.recordProcessor = recordProcessor;\n this.fetcher = fetcherConfig.newFetcher();\n }\n @Override\n public Long call() throws Exception {\n boolean shouldContinue = true;",
"score": 123.92774528630147
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " if (cnt == null) {\n cnt = 0l;\n }\n cnt += e.getValue().getValue();\n to.put(e.getKey(), cnt);\n }\n }\n private static class IndexWorker implements Callable<Long> {\n private final ArrayBlockingQueue<FetchEmitTuple> indexUrls;\n private final AbstractRecordProcessor recordProcessor;",
"score": 101.68238851190553
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCIndexFetcher.java",
"retrieved_chunk": " private static class IndexFetcher implements Callable<Long> {\n private final ExtractorConfig fetcherConfig;\n private final ArrayBlockingQueue<FetchEmitTuple> indexPathsList;\n public IndexFetcher(ExtractorConfig fetcherConfig,\n ArrayBlockingQueue<FetchEmitTuple> indexPathsList) {\n this.fetcherConfig = fetcherConfig;\n this.indexPathsList = indexPathsList;\n }\n @Override\n public Long call() throws Exception {",
"score": 71.49207838350979
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " return true;\n }\n }\n private static class DetectedMimeCounter extends AbstractRecordProcessor {\n private final ExtractorConfig fetcherConfig;\n private final CCIndexReaderCounter counter;\n private final Map<String, MutableLong> totalCounts = new HashMap<>();\n private final Map<String, MutableLong> truncatedCounts = new HashMap<>();\n public DetectedMimeCounter(ExtractorConfig fetcherConfig, CCIndexReaderCounter counter) {\n this.fetcherConfig = fetcherConfig;",
"score": 48.11161832091789
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " }\n shouldContinue = processFile(indexUrl, recordProcessor);\n }\n return INDEX_WORKER_ID;\n }\n private boolean processFile(FetchEmitTuple fetchEmitTuple,\n AbstractRecordProcessor recordProcessor)\n throws InterruptedException {\n long start = System.currentTimeMillis();\n LOGGER.info(\"starting to fetch index gz: {}\",",
"score": 46.06627713868111
}
] | java | .indexFetcher = fetcherConfig.newIndexFetcher(); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.Optional;
import java.util.zip.GZIPInputStream;
import org.apache.commons.codec.binary.Base32;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
import org.netpreserve.jwarc.MediaType;
import org.netpreserve.jwarc.WarcPayload;
import org.netpreserve.jwarc.WarcReader;
import org.netpreserve.jwarc.WarcRecord;
import org.netpreserve.jwarc.WarcResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.tallison.cc.index.io.TargetPathRewriter;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.exception.TikaException;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.pipes.FetchEmitTuple;
import org.apache.tika.pipes.emitter.EmitKey;
import org.apache.tika.pipes.emitter.StreamEmitter;
import org.apache.tika.pipes.fetcher.FetchKey;
import org.apache.tika.pipes.fetcher.RangeFetcher;
public class FileFromCCWarcExtractor {
private static Logger LOGGER =
LoggerFactory.getLogger(FileFromCCWarcExtractor.class);
private static Logger EXTRACTED_LOGGER = LoggerFactory.getLogger("extracted-urls");
private static Logger EXTRACTED_ALL_LOGGER = LoggerFactory.getLogger("extracted-urls-all");
private final StreamEmitter emitter;
private final TargetPathRewriter targetPathRewriter;
private RangeFetcher fetcher;
private final boolean extractTruncated;
private Base32 base32 = new Base32();
private final CCIndexReaderCounter ccIndexReaderCounter;
public FileFromCCWarcExtractor(ExtractorConfig fetcherConfig,
CCIndexReaderCounter ccIndexReaderCounter) throws TikaConfigException {
this.emitter = fetcherConfig.newEmitter();
this.fetcher = (RangeFetcher) fetcherConfig.newFetcher();
this.targetPathRewriter = fetcherConfig.getTargetPathRewriter();
this.extractTruncated = fetcherConfig.isExtractTruncated();
this.ccIndexReaderCounter = ccIndexReaderCounter;
}
public void fetchToPath(CCIndexRecord record) throws InterruptedException {
LOGGER.debug("going to fetch {} {}->{}", record.getFilename(), record.getOffset(),
record.getLength());
FetchEmitTuple t = new FetchEmitTuple(record.getFilename(),
new FetchKey("", record.getFilename(), record.getOffset(),
record.getOffset() + record.getLength() - 1), new EmitKey());
byte[] warcRecordGZBytes;
try {
warcRecordGZBytes = fetchWarcBytes(t);
} catch (TikaException | IOException e) {
LOGGER.warn("couldn't get bytes from cc's warc " + t, e);
return;
}
String id = record.getUrl();
try {
parseWarc(id, record, warcRecordGZBytes);
} catch (IOException e) {
LOGGER.warn("problem parsing warc file", e);
}
}
private void fetchPayload(String id, CCIndexRecord ccIndexRecord, WarcRecord record)
throws IOException {
if (!((record instanceof WarcResponse) &&
record.contentType().base().equals(MediaType.HTTP))) {
return;
}
Optional<WarcPayload> payload = ((WarcResponse) record).payload();
if (!payload.isPresent()) {
LOGGER.debug("no payload {}", id);
| ccIndexReaderCounter.getEmptyPayload().incrementAndGet(); |
return;
}
if (payload.get().body().size() == 0) {
LOGGER.debug("empty payload id={}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
Path tmp = Files.createTempFile("ccfile-fetcher-", "");
try {
Files.copy(payload.get().body().stream(), tmp, StandardCopyOption.REPLACE_EXISTING);
String targetDigest = null;
String base32Sha1 = "";
try (InputStream is = Files.newInputStream(tmp)) {
base32Sha1 = base32.encodeAsString(DigestUtils.sha1(is));
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
if (!base32Sha1.equals(ccIndexRecord.getDigest())) {
LOGGER.warn("Bad digest for url={} ccindex={} sha1={}", id,
ccIndexRecord.getDigest(), base32Sha1);
}
//TODO: make digest and encoding configurable
try (InputStream is = Files.newInputStream(tmp)) {
targetDigest = DigestUtils.sha256Hex(is);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
long length = -1;
try {
length = Files.size(tmp);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
String targetPath = targetPathRewriter.rewrite(targetDigest);
Metadata metadata = new Metadata();
try (InputStream is = TikaInputStream.get(tmp, metadata)) {
emitter.emit(targetPath, is, new Metadata());
logSuccess(ccIndexRecord, targetDigest, length, targetPath);
} catch (IOException | TikaException e) {
LOGGER.warn("problem writing id={}", id, e);
}
} finally {
try {
Files.delete(tmp);
} catch (IOException e) {
LOGGER.warn("can't delete " + tmp.toAbsolutePath(), e);
}
}
}
private void logSuccess(CCIndexRecord ccIndexRecord, String targetDigest, long length,
String targetPath) {
if (extractTruncated) {
EXTRACTED_ALL_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
ccIndexRecord.getTruncated(), targetDigest, length,
targetPath);
} else {
//new ObjectArray ?
//url,mime_detected,warc_file,warc_offset,warc_length,sha256,length,path
EXTRACTED_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
targetDigest, length,
targetPath);
}
}
private void parseWarc(String id, CCIndexRecord ccIndexRecord, byte[] warcRecordGZBytes)
throws IOException {
//need to leave initial inputstream open while parsing warcrecord
//can't just parse record and return
try (InputStream is = new GZIPInputStream(new ByteArrayInputStream(warcRecordGZBytes))) {
try (WarcReader warcreader = new WarcReader(is)) {
//should be a single warc per file
//return the first
for (WarcRecord warcRecord : warcreader) {
fetchPayload(id, ccIndexRecord, warcRecord);
return;
}
}
}
}
private byte[] fetchWarcBytes(FetchEmitTuple t)
throws TikaException, InterruptedException, IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
FetchKey k = t.getFetchKey();
try (InputStream is = fetcher.fetch(k.getFetchKey(), k.getRangeStart(), k.getRangeEnd(),
new Metadata())) {
IOUtils.copy(is, bos);
}
return bos.toByteArray();
}
}
| src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/test/java/org/tallison/cc/index/selector/IndexRecordSelectorTest.java",
"retrieved_chunk": " public void testIndexFile() throws Exception {\n Path p = Paths.get(\"/Users/allison/data/cc/index-work/cdx-00000.gz\");\n try (BufferedReader r = new BufferedReader(\n new InputStreamReader(new GZIPInputStream(Files.newInputStream(p)),\n StandardCharsets.UTF_8))) {\n String line = r.readLine();\n while (line != null) {\n Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(line);\n if (record.isPresent()) {\n CCIndexRecord indexRecord = record.get();",
"score": 30.042683698651974
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " return false;\n }\n //check for hit max\n //return false;\n Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);\n if (record.isEmpty()) {\n //problem already logged\n return true;\n }\n CCIndexRecord r = record.get();",
"score": 27.90966994309807
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " LOGGER.info(\"processed: {}\", counter);\n }\n if (fetcherConfig.getMaxRecords() > -1 && totalRead >= fetcherConfig.getMaxRecords()) {\n LOGGER.info(\"hit max read\");\n return false;\n }\n //check for hit max\n //return false;\n Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);\n if (record.isEmpty()) {",
"score": 26.63961946622377
},
{
"filename": "src/main/java/org/tallison/cc/index/selector/RecordSelector.java",
"retrieved_chunk": " return record.getHost();\n case \"digest\":\n return record.getDigest();\n default:\n throw new IllegalArgumentException(\"Don't yet support key \" + key);\n }\n }\n private static class AcceptAllRecords extends RecordSelector {\n @Override\n public boolean select(CCIndexRecord record) {",
"score": 21.757829285743636
},
{
"filename": "src/main/java/org/tallison/cc/index/CCIndexRecord.java",
"retrieved_chunk": " return \"\";\n }\n public static Optional<CCIndexRecord> parseRecord(String row) {\n int urlI = row.indexOf(' ');\n int dateI = row.indexOf(' ', urlI + 1);\n if (dateI < 0) {\n LOGGER.warn(\"bad record dateI < 0: {}\", row);\n return Optional.empty();\n }\n String json = row.substring(dateI + 1);",
"score": 21.617482902742566
}
] | java | ccIndexReaderCounter.getEmptyPayload().incrementAndGet(); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.Locale;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.zip.GZIPInputStream;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.AbstractRecordProcessor;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.IndexIterator;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.exception.TikaException;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.pipes.FetchEmitTuple;
import org.apache.tika.pipes.fetcher.Fetcher;
import org.apache.tika.pipes.pipesiterator.CallablePipesIterator;
import org.apache.tika.pipes.pipesiterator.PipesIterator;
import org.apache.tika.utils.StringUtils;
/**
* This is a lighter class that doesn't rely on a database
* to extract files from CC and log a list of truncated urls.
*/
public class CCFileExtractor {
private static final Long INDEX_WORKER_ID = 1l;
private static final Long INDEX_READER_ID = 2l;
private static final Logger LOGGER = LoggerFactory.getLogger(CCFileExtractor.class);
public static void main(String[] args) throws Exception {
ExtractorConfig fetcherConfig =
new ObjectMapper().readValue(new File(args[0]), ExtractorConfig.class);
execute(fetcherConfig);
}
private static void execute(ExtractorConfig fetcherConfig) throws TikaException {
ArrayBlockingQueue<FetchEmitTuple> indexPathsList = new ArrayBlockingQueue<>(1000);
//IndexPathsReader reads a file containing a list of cc-index.paths files
//and writes the literal gz files (cc-index/collections/CC-MAIN-2023-06/indexes/cdx-00000.gz)
//to indexPathsList
//IndexWorker reads a single index.gz file at a time and processes each record
//It fetches non truncated files and logs truncated files
int totalThreads = fetcherConfig.getNumThreads() + 1;
ExecutorService executorService = Executors.newFixedThreadPool(totalThreads);
ExecutorCompletionService<Long> executorCompletionService =
new ExecutorCompletionService<>(executorService);
IndexIterator indexIterator = fetcherConfig.getIndexIterator();
indexIterator.initialize(Collections.EMPTY_MAP);
executorCompletionService.submit(new CallablePipesIterator(indexIterator, indexPathsList));
CCIndexReaderCounter counter = new CCIndexReaderCounter();
int finishedWorkers = 0;
try {
for (int i = 0; i < fetcherConfig.getNumThreads(); i++) {
CCFileExtractorRecordProcessor processor =
new CCFileExtractorRecordProcessor(fetcherConfig, counter);
executorCompletionService.submit(
new IndexWorker(fetcherConfig, indexPathsList, processor));
}
while ( | finishedWorkers < fetcherConfig.getNumThreads()) { |
//blocking
Future<Long> future = executorCompletionService.take();
if (future != null) {
Long f = future.get();
LOGGER.debug("completed {}", f);
if (f.equals(INDEX_WORKER_ID)) {
finishedWorkers++;
} else if (f.equals(INDEX_READER_ID)) {
LOGGER.info("Index paths reader successfully completed");
}
}
}
} catch (TikaConfigException | IOException e) {
LOGGER.error("main loop exception", e);
throw new RuntimeException(e);
} catch (ExecutionException e) {
LOGGER.error("main loop exception", e);
throw new RuntimeException(e);
} catch (InterruptedException e) {
LOGGER.warn("main loop interrupted exception", e);
throw new RuntimeException(e);
} finally {
executorService.shutdown();
executorService.shutdownNow();
}
}
private static class IndexWorker implements Callable<Long> {
private final ArrayBlockingQueue<FetchEmitTuple> indexUrls;
private final AbstractRecordProcessor recordProcessor;
private final Fetcher indexFetcher;
IndexWorker(ExtractorConfig fetcherConfig, ArrayBlockingQueue<FetchEmitTuple> indexUrls,
AbstractRecordProcessor recordProcessor) throws TikaException {
this.indexUrls = indexUrls;
this.recordProcessor = recordProcessor;
this.indexFetcher = fetcherConfig.newIndexFetcher();
}
@Override
public Long call() throws Exception {
boolean shouldContinue = true;
while (shouldContinue) {
FetchEmitTuple indexUrl = indexUrls.poll(120, TimeUnit.MINUTES);
if (indexUrl == null) {
throw new TimeoutException("waited 120 minutes for a new record");
}
if (indexUrl == PipesIterator.COMPLETED_SEMAPHORE) {
recordProcessor.close();
//can hang forever
indexUrls.put(PipesIterator.COMPLETED_SEMAPHORE);
return INDEX_WORKER_ID;
}
LOGGER.trace(indexUrl.toString());
shouldContinue = processFile(indexUrl, recordProcessor);
}
return INDEX_WORKER_ID;
}
private boolean processFile(FetchEmitTuple fetchEmitTuple,
AbstractRecordProcessor recordProcessor)
throws InterruptedException {
long start = System.currentTimeMillis();
LOGGER.info("starting to fetch index gz: {}",
fetchEmitTuple.getFetchKey().getFetchKey());
try (TikaInputStream tis = (TikaInputStream) indexFetcher.fetch(
fetchEmitTuple.getFetchKey().getFetchKey(), new Metadata())) {
try (InputStream is = new BufferedInputStream(new GZIPInputStream(tis))) {
try (BufferedReader reader = new BufferedReader(
new InputStreamReader(is, StandardCharsets.UTF_8))) {
String line = reader.readLine();
int lines = 0;
long elapsed = System.currentTimeMillis() - start;
LOGGER.info("Finished fetching {} bytes in {} ms for index gz: {}",
String.format(Locale.US, "%,d", tis.getLength()),
String.format(Locale.US, "%,d", elapsed),
fetchEmitTuple.getFetchKey().getFetchKey());
while (line != null) {
LOGGER.trace("about to add a line");
if (StringUtils.isBlank(line)) {
line = reader.readLine();
continue;
}
try {
boolean shouldContinue = recordProcessor.process(line);
if (!shouldContinue) {
return shouldContinue;
}
} catch (IOException e) {
LOGGER.warn("bad json: " + line);
}
lines++;
line = reader.readLine();
}
}
}
} catch (TikaException | IOException e) {
LOGGER.error(
"failed while processing " + fetchEmitTuple.getFetchKey().getFetchKey(), e);
}
long elapsed = System.currentTimeMillis() - start;
LOGGER.info("finished processing index gz in ({}) ms: {}",
String.format(Locale.US, "%,d", elapsed),
fetchEmitTuple.getFetchKey().getFetchKey());
return true;
}
}
}
| src/main/java/org/tallison/cc/index/extractor/CCFileExtractor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " CCIndexReaderCounter counter = new CCIndexReaderCounter();\n int finishedWorkers = 0;\n List<DetectedMimeCounter> detectedMimeCounters = new ArrayList<>();\n try {\n for (int i = 0; i < fetcherConfig.getNumThreads(); i++) {\n DetectedMimeCounter processor = new DetectedMimeCounter(fetcherConfig, counter);\n detectedMimeCounters.add(processor);\n executorCompletionService.submit(\n new IndexWorker(fetcherConfig, indexPathsList, processor));\n }",
"score": 101.44444728605997
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCIndexFetcher.java",
"retrieved_chunk": " ExecutorCompletionService<Long> executorCompletionService =\n new ExecutorCompletionService<>(executorService);\n IndexIterator indexIterator = fetcherConfig.getIndexIterator();\n indexIterator.initialize(Collections.EMPTY_MAP);\n executorCompletionService.submit(new CallablePipesIterator(indexIterator, indexPathsList));\n int finishedWorkers = 0;\n try {\n for (int i = 0; i < fetcherConfig.getNumThreads(); i++) {\n executorCompletionService.submit(new IndexFetcher(fetcherConfig, indexPathsList));\n }",
"score": 80.37684160846197
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " //to indexPathsList\n //IndexWorker reads a single index.gz file at a time and processes each record\n //It fetches non truncated files and logs truncated files\n int totalThreads = fetcherConfig.getNumThreads() + 1;\n ExecutorService executorService = Executors.newFixedThreadPool(totalThreads);\n ExecutorCompletionService<Long> executorCompletionService =\n new ExecutorCompletionService<>(executorService);\n IndexIterator indexIterator = fetcherConfig.getIndexIterator();\n indexIterator.initialize(Collections.EMPTY_MAP);\n executorCompletionService.submit(new CallablePipesIterator(indexIterator, indexPathsList));",
"score": 39.56424696673274
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " LoggerFactory.getLogger(\"truncated-urls-full\");\n private final ExtractorConfig fetcherConfig;\n private final CCIndexReaderCounter counter;\n private final FileFromCCWarcExtractor fileFromCCWarcFetcher;\n private long reportEvery = 100000;\n public CCFileExtractorRecordProcessor(ExtractorConfig fetcherConfig, CCIndexReaderCounter counter)\n throws TikaConfigException, IOException {\n this.fetcherConfig = fetcherConfig;\n this.counter = counter;\n this.fileFromCCWarcFetcher = new FileFromCCWarcExtractor(fetcherConfig, counter);",
"score": 33.18556652764143
},
{
"filename": "src/main/java/org/tallison/cc/index/io/TargetPathRewriter.java",
"retrieved_chunk": " StringBuilder sb = new StringBuilder();\n int start = 0;\n for (int i : offsets) {\n sb.append(originalPath.substring(start, i));\n sb.append('/');\n start = i;\n }\n sb.append(originalPath);\n return sb.toString();\n }",
"score": 31.16898829026245
}
] | java | finishedWorkers < fetcherConfig.getNumThreads()) { |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collections;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.IndexIterator;
import org.apache.tika.exception.TikaException;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.pipes.FetchEmitTuple;
import org.apache.tika.pipes.emitter.StreamEmitter;
import org.apache.tika.pipes.fetcher.Fetcher;
import org.apache.tika.pipes.pipesiterator.CallablePipesIterator;
import org.apache.tika.pipes.pipesiterator.PipesIterator;
/**
* This class fetches index files from aws to a local file share.
* <p>
* This pulls the index files either via https or s3
*/
public class CCIndexFetcher {
private static final Logger LOGGER = LoggerFactory.getLogger(CCIndexFetcher.class);
public static void main(String[] args) throws Exception {
ExtractorConfig fetcherConfig =
new ObjectMapper().readValue(new File(args[0]), ExtractorConfig.class);
execute(fetcherConfig);
}
private static void execute(ExtractorConfig fetcherConfig) throws Exception {
ArrayBlockingQueue<FetchEmitTuple> indexPathsList = new ArrayBlockingQueue<>(1000);
//IndexPathsReader reads a file containing a list of cc-index.paths files
//and writes the literal gz files (cc-index/collections/CC-MAIN-2023-06/indexes/cdx-00000.gz)
//to indexPathsList
int totalThreads = fetcherConfig.getNumThreads() + 1;
ExecutorService executorService = Executors.newFixedThreadPool(totalThreads);
ExecutorCompletionService<Long> executorCompletionService =
new ExecutorCompletionService<>(executorService);
IndexIterator indexIterator = fetcherConfig.getIndexIterator();
indexIterator.initialize(Collections.EMPTY_MAP);
executorCompletionService.submit(new CallablePipesIterator(indexIterator, indexPathsList));
int finishedWorkers = 0;
try {
for (int i = 0; i < fetcherConfig.getNumThreads(); i++) {
executorCompletionService.submit(new IndexFetcher(fetcherConfig, indexPathsList));
}
while (finishedWorkers < totalThreads) {
//blocking
Future<Long> future = executorCompletionService.take();
if (future != null) {
Long f = future.get();
finishedWorkers++;
LOGGER.debug("completed {}: {}", f, finishedWorkers);
}
}
} catch (ExecutionException e) {
LOGGER.error("main loop exception", e);
throw new RuntimeException(e);
} catch (InterruptedException e) {
LOGGER.warn("main loop interrupted exception", e);
throw new RuntimeException(e);
} finally {
executorService.shutdown();
executorService.shutdownNow();
}
}
private static class IndexFetcher implements Callable<Long> {
private final ExtractorConfig fetcherConfig;
private final ArrayBlockingQueue<FetchEmitTuple> indexPathsList;
public IndexFetcher(ExtractorConfig fetcherConfig,
ArrayBlockingQueue<FetchEmitTuple> indexPathsList) {
this.fetcherConfig = fetcherConfig;
this.indexPathsList = indexPathsList;
}
@Override
public Long call() throws Exception {
Fetcher fetcher = fetcherConfig.newFetcher();
StreamEmitter | streamEmitter = fetcherConfig.newEmitter(); |
while (true) {
FetchEmitTuple t = indexPathsList.poll(120, TimeUnit.MINUTES);
if (t == null) {
throw new TimeoutException("waited 120 minutes for a new record");
}
if (t == PipesIterator.COMPLETED_SEMAPHORE) {
indexPathsList.put(PipesIterator.COMPLETED_SEMAPHORE);
LOGGER.info("Index fetcher finished");
return 1l;
}
fetch(t, fetcher, streamEmitter);
}
}
private void fetch(FetchEmitTuple t, Fetcher fetcher, StreamEmitter streamEmitter) {
LOGGER.info("about to download: " + t.getFetchKey().getFetchKey());
try (InputStream is = fetcher.fetch(t.getFetchKey().getFetchKey(), new Metadata())) {
streamEmitter.emit(t.getFetchKey().getFetchKey(), is, new Metadata());
LOGGER.info("successfully downloaded: " + t.getFetchKey().getFetchKey());
} catch (TikaException | IOException e) {
LOGGER.error("failed to copy " + t.getFetchKey().getFetchKey(), e);
}
}
}
}
| src/main/java/org/tallison/cc/index/extractor/CCIndexFetcher.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " private final Fetcher fetcher;\n IndexWorker(ExtractorConfig fetcherConfig, ArrayBlockingQueue<FetchEmitTuple> indexUrls,\n AbstractRecordProcessor recordProcessor) throws TikaException {\n this.indexUrls = indexUrls;\n this.recordProcessor = recordProcessor;\n this.fetcher = fetcherConfig.newFetcher();\n }\n @Override\n public Long call() throws Exception {\n boolean shouldContinue = true;",
"score": 57.573545028872864
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractor.java",
"retrieved_chunk": " ExtractorConfig fetcherConfig =\n new ObjectMapper().readValue(new File(args[0]), ExtractorConfig.class);\n execute(fetcherConfig);\n }\n private static void execute(ExtractorConfig fetcherConfig) throws TikaException {\n ArrayBlockingQueue<FetchEmitTuple> indexPathsList = new ArrayBlockingQueue<>(1000);\n //IndexPathsReader reads a file containing a list of cc-index.paths files\n //and writes the literal gz files (cc-index/collections/CC-MAIN-2023-06/indexes/cdx-00000.gz)\n //to indexPathsList\n //IndexWorker reads a single index.gz file at a time and processes each record",
"score": 38.27861525753548
},
{
"filename": "src/test/java/org/tallison/cc/index/FetcherConfigTest.java",
"retrieved_chunk": " assertEquals(BackoffHttpFetcher.class, fetcherConfig.newFetcher().getClass());\n assertEquals(FileSystemEmitter.class, fetcherConfig.newEmitter().getClass());\n }\n @Test\n public void testS3() throws Exception {\n Path p = Paths.get(getClass().getResource(\"/configs/basic-s3.json\").toURI());\n ExtractorConfig fetcherConfig = new ObjectMapper().readValue(p.toFile(), ExtractorConfig.class);\n //TODO -- add actual unit test that tests fetcher and emitter\n assertEquals(S3Fetcher.class, fetcherConfig.newFetcher().getClass());\n assertEquals(S3Emitter.class, fetcherConfig.newEmitter().getClass());",
"score": 37.899351354072294
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " private static final Logger LOGGER = LoggerFactory.getLogger(CCMimeCounter.class);\n public static void main(String[] args) throws Exception {\n ExtractorConfig fetcherConfig =\n new ObjectMapper().readValue(new File(args[0]), ExtractorConfig.class);\n execute(fetcherConfig);\n }\n private static void execute(ExtractorConfig fetcherConfig) throws IOException, TikaException {\n ArrayBlockingQueue<FetchEmitTuple> indexPathsList = new ArrayBlockingQueue<>(1000);\n //IndexPathsReader reads a file containing a list of cc-index.paths files\n //and writes the literal gz files (cc-index/collections/CC-MAIN-2023-06/indexes/cdx-00000.gz)",
"score": 37.33551032302138
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java",
"retrieved_chunk": " CCIndexReaderCounter ccIndexReaderCounter) throws TikaConfigException {\n this.emitter = fetcherConfig.newEmitter();\n this.fetcher = (RangeFetcher) fetcherConfig.newFetcher();\n this.targetPathRewriter = fetcherConfig.getTargetPathRewriter();\n this.extractTruncated = fetcherConfig.isExtractTruncated();\n this.ccIndexReaderCounter = ccIndexReaderCounter;\n }\n public void fetchToPath(CCIndexRecord record) throws InterruptedException {\n LOGGER.debug(\"going to fetch {} {}->{}\", record.getFilename(), record.getOffset(),\n record.getLength());",
"score": 36.83994667628997
}
] | java | streamEmitter = fetcherConfig.newEmitter(); |
package com.tcoded.folialib.impl;
import com.tcoded.folialib.FoliaLib;
import com.tcoded.folialib.enums.EntityTaskResult;
import com.tcoded.folialib.util.TimeConverter;
import com.tcoded.folialib.wrapper.WrappedTask;
import com.tcoded.folialib.wrapper.task.WrappedFoliaTask;
import io.papermc.paper.threadedregions.scheduler.AsyncScheduler;
import io.papermc.paper.threadedregions.scheduler.GlobalRegionScheduler;
import org.bukkit.Chunk;
import org.bukkit.Location;
import org.bukkit.World;
import org.bukkit.entity.Entity;
import org.bukkit.entity.Player;
import org.bukkit.plugin.java.JavaPlugin;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
public class FoliaImplementation implements ServerImplementation {
private final JavaPlugin plugin;
private final GlobalRegionScheduler globalRegionScheduler;
private final AsyncScheduler asyncScheduler;
public FoliaImplementation(FoliaLib foliaLib) {
this.plugin = foliaLib.getPlugin();
this.globalRegionScheduler = plugin.getServer().getGlobalRegionScheduler();
this.asyncScheduler = plugin.getServer().getAsyncScheduler();
}
@Override
public CompletableFuture<Void> runNextTick(Runnable runnable) {
CompletableFuture<Void> future = new CompletableFuture<>();
this.globalRegionScheduler.execute(plugin, () -> {
runnable.run();
future.complete(null);
});
return future;
}
@Override
public CompletableFuture<Void> runAsync(Runnable runnable) {
CompletableFuture<Void> future = new CompletableFuture<>();
this.asyncScheduler.runNow(plugin, task -> {
runnable.run();
future.complete(null);
});
return future;
}
@Override
public WrappedTask runLater(Runnable runnable, long delay, TimeUnit unit) {
return new WrappedFoliaTask(
this.globalRegionScheduler.runDelayed(
plugin, task -> runnable. | run(), TimeConverter.toTicks(delay, unit)
)
); |
}
@Override
public WrappedTask runLaterAsync(Runnable runnable, long delay, TimeUnit unit) {
return new WrappedFoliaTask(
this.asyncScheduler.runDelayed(
plugin, task -> runnable.run(), delay, unit
)
);
}
@Override
public WrappedTask runTimer(Runnable runnable, long delay, long period, TimeUnit unit) {
return new WrappedFoliaTask(
this.globalRegionScheduler.runAtFixedRate(
plugin, task -> runnable.run(),
TimeConverter.toTicks(delay, unit),
TimeConverter.toTicks(period, unit)
)
);
}
@Override
public WrappedTask runTimerAsync(Runnable runnable, long delay, long period, TimeUnit unit) {
return new WrappedFoliaTask(
this.asyncScheduler.runAtFixedRate(
plugin, task -> runnable.run(),
delay, period, unit
)
);
}
@Override
public CompletableFuture<Void> runAtLocation(Location location, Runnable runnable) {
CompletableFuture<Void> future = new CompletableFuture<>();
this.plugin.getServer().getRegionScheduler().execute(plugin, location, () -> {
runnable.run();
future.complete(null);
});
return future;
}
@Override
public WrappedTask runAtLocationLater(Location location, Runnable runnable, long delay, TimeUnit unit) {
return new WrappedFoliaTask(
this.plugin.getServer().getRegionScheduler().runDelayed(
plugin, location, task -> runnable.run(),
TimeConverter.toTicks(delay, unit)
)
);
}
@Override
public WrappedTask runAtLocationTimer(Location location, Runnable runnable, long delay, long period, TimeUnit unit) {
return new WrappedFoliaTask(
this.plugin.getServer().getRegionScheduler().runAtFixedRate(
plugin, location, task -> runnable.run(),
TimeConverter.toTicks(delay, unit),
TimeConverter.toTicks(period, unit)
)
);
}
@Override
public CompletableFuture<EntityTaskResult> runAtEntity(Entity entity, Runnable runnable) {
CompletableFuture<EntityTaskResult> future = new CompletableFuture<>();
boolean success = entity.getScheduler().execute(this.plugin, () -> {
runnable.run();
future.complete(EntityTaskResult.SUCCESS);
}, null, 0);
if (!success) {
future.complete(EntityTaskResult.SCHEDULER_RETIRED);
}
return future;
}
@Override
public CompletableFuture<EntityTaskResult> runAtEntityWithFallback(Entity entity, Runnable runnable, Runnable fallback) {
CompletableFuture<EntityTaskResult> future = new CompletableFuture<>();
boolean success = entity.getScheduler().execute(this.plugin, () -> {
runnable.run();
future.complete(EntityTaskResult.SUCCESS);
}, () -> {
fallback.run();
future.complete(EntityTaskResult.ENTITY_RETIRED);
}, 0);
if (!success) {
future.complete(EntityTaskResult.SCHEDULER_RETIRED);
}
return future;
}
@Override
public WrappedTask runAtEntityLater(Entity entity, Runnable runnable, long delay, TimeUnit unit) {
return new WrappedFoliaTask(
entity.getScheduler().runDelayed(
plugin,
task -> runnable.run(),
null,
TimeConverter.toTicks(delay, unit)
)
);
}
@Override
public WrappedTask runAtEntityTimer(Entity entity, Runnable runnable, long delay, long period, TimeUnit unit) {
return new WrappedFoliaTask(
entity.getScheduler().runAtFixedRate(
plugin,
task -> runnable.run(),
null,
TimeConverter.toTicks(delay, unit),
TimeConverter.toTicks(period, unit)
)
);
}
@Override
public void cancelTask(WrappedTask task) {
task.cancel();
}
@Override
public void cancelAllTasks() {
this.globalRegionScheduler.cancelTasks(plugin);
this.asyncScheduler.cancelTasks(plugin);
}
@Override
public Player getPlayer(String name) {
// This is thread-safe in folia
return this.plugin.getServer().getPlayer(name);
}
@Override
public Player getPlayerExact(String name) {
// This is thread-safe in folia
return this.plugin.getServer().getPlayerExact(name);
}
@Override
public Player getPlayer(UUID uuid) {
// This is thread-safe in folia
return this.plugin.getServer().getPlayer(uuid);
}
@Override
public CompletableFuture<Boolean> teleportAsync(Player player, Location location) {
return player.teleportAsync(location);
}
}
| src/main/java/com/tcoded/folialib/impl/FoliaImplementation.java | TechnicallyCoded-FoliaLib-8f9f24f | [
{
"filename": "src/main/java/com/tcoded/folialib/impl/SpigotImplementation.java",
"retrieved_chunk": " return future;\n }\n @Override\n public WrappedTask runLater(Runnable runnable, long delay, TimeUnit unit) {\n return new WrappedBukkitTask(\n this.scheduler.runTaskLater(plugin, runnable, TimeConverter.toTicks(delay, unit))\n );\n }\n @Override\n public WrappedTask runLaterAsync(Runnable runnable, long delay, TimeUnit unit) {",
"score": 25.0693460691172
},
{
"filename": "src/main/java/com/tcoded/folialib/impl/SpigotImplementation.java",
"retrieved_chunk": " return new WrappedBukkitTask(\n this.scheduler.runTaskLaterAsynchronously(plugin, runnable, TimeConverter.toTicks(delay, unit))\n );\n }\n @Override\n public WrappedTask runTimer(Runnable runnable, long delay, long period, TimeUnit unit) {\n return new WrappedBukkitTask(\n this.scheduler.runTaskTimer(\n plugin, runnable,\n TimeConverter.toTicks(delay, unit),",
"score": 20.770527401232453
},
{
"filename": "src/main/java/com/tcoded/folialib/impl/SpigotImplementation.java",
"retrieved_chunk": " TimeConverter.toTicks(period, unit))\n );\n }\n @Override\n public WrappedTask runTimerAsync(Runnable runnable, long delay, long period, TimeUnit unit) {\n return new WrappedBukkitTask(\n this.scheduler.runTaskTimerAsynchronously(\n plugin, runnable,\n TimeConverter.toTicks(delay, unit),\n TimeConverter.toTicks(period, unit))",
"score": 20.617395344505805
},
{
"filename": "src/main/java/com/tcoded/folialib/impl/SpigotImplementation.java",
"retrieved_chunk": " }\n @Override\n public WrappedTask runAtLocationLater(Location location, Runnable runnable, long delay, TimeUnit unit) {\n return new WrappedBukkitTask(\n this.scheduler.runTaskLater(plugin, runnable, TimeConverter.toTicks(delay, unit))\n );\n }\n @Override\n public WrappedTask runAtLocationTimer(Location location, Runnable runnable, long delay, long period, TimeUnit unit) {\n return new WrappedBukkitTask(",
"score": 19.74387273309718
},
{
"filename": "src/main/java/com/tcoded/folialib/impl/SpigotImplementation.java",
"retrieved_chunk": " public WrappedTask runAtEntityLater(Entity entity, Runnable runnable, long delay, TimeUnit unit) {\n return new WrappedBukkitTask(\n this.scheduler.runTaskLater(plugin, runnable, TimeConverter.toTicks(delay, unit))\n );\n }\n @Override\n public WrappedTask runAtEntityTimer(Entity entity, Runnable runnable, long delay, long period, TimeUnit unit) {\n return new WrappedBukkitTask(\n this.scheduler.runTaskTimer(\n plugin, runnable,",
"score": 19.23340039244508
}
] | java | run(), TimeConverter.toTicks(delay, unit)
)
); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.Optional;
import java.util.zip.GZIPInputStream;
import org.apache.commons.codec.binary.Base32;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
import org.netpreserve.jwarc.MediaType;
import org.netpreserve.jwarc.WarcPayload;
import org.netpreserve.jwarc.WarcReader;
import org.netpreserve.jwarc.WarcRecord;
import org.netpreserve.jwarc.WarcResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.tallison.cc.index.io.TargetPathRewriter;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.exception.TikaException;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.pipes.FetchEmitTuple;
import org.apache.tika.pipes.emitter.EmitKey;
import org.apache.tika.pipes.emitter.StreamEmitter;
import org.apache.tika.pipes.fetcher.FetchKey;
import org.apache.tika.pipes.fetcher.RangeFetcher;
public class FileFromCCWarcExtractor {
private static Logger LOGGER =
LoggerFactory.getLogger(FileFromCCWarcExtractor.class);
private static Logger EXTRACTED_LOGGER = LoggerFactory.getLogger("extracted-urls");
private static Logger EXTRACTED_ALL_LOGGER = LoggerFactory.getLogger("extracted-urls-all");
private final StreamEmitter emitter;
private final TargetPathRewriter targetPathRewriter;
private RangeFetcher fetcher;
private final boolean extractTruncated;
private Base32 base32 = new Base32();
private final CCIndexReaderCounter ccIndexReaderCounter;
public FileFromCCWarcExtractor(ExtractorConfig fetcherConfig,
CCIndexReaderCounter ccIndexReaderCounter) throws TikaConfigException {
this.emitter = fetcherConfig.newEmitter();
this.fetcher = (RangeFetcher) fetcherConfig.newFetcher();
this.targetPathRewriter = fetcherConfig.getTargetPathRewriter();
this.extractTruncated = fetcherConfig.isExtractTruncated();
this.ccIndexReaderCounter = ccIndexReaderCounter;
}
public void fetchToPath(CCIndexRecord record) throws InterruptedException {
LOGGER.debug("going to fetch {} {}->{}" | , record.getFilename(), record.getOffset(),
record.getLength()); |
FetchEmitTuple t = new FetchEmitTuple(record.getFilename(),
new FetchKey("", record.getFilename(), record.getOffset(),
record.getOffset() + record.getLength() - 1), new EmitKey());
byte[] warcRecordGZBytes;
try {
warcRecordGZBytes = fetchWarcBytes(t);
} catch (TikaException | IOException e) {
LOGGER.warn("couldn't get bytes from cc's warc " + t, e);
return;
}
String id = record.getUrl();
try {
parseWarc(id, record, warcRecordGZBytes);
} catch (IOException e) {
LOGGER.warn("problem parsing warc file", e);
}
}
private void fetchPayload(String id, CCIndexRecord ccIndexRecord, WarcRecord record)
throws IOException {
if (!((record instanceof WarcResponse) &&
record.contentType().base().equals(MediaType.HTTP))) {
return;
}
Optional<WarcPayload> payload = ((WarcResponse) record).payload();
if (!payload.isPresent()) {
LOGGER.debug("no payload {}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
if (payload.get().body().size() == 0) {
LOGGER.debug("empty payload id={}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
Path tmp = Files.createTempFile("ccfile-fetcher-", "");
try {
Files.copy(payload.get().body().stream(), tmp, StandardCopyOption.REPLACE_EXISTING);
String targetDigest = null;
String base32Sha1 = "";
try (InputStream is = Files.newInputStream(tmp)) {
base32Sha1 = base32.encodeAsString(DigestUtils.sha1(is));
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
if (!base32Sha1.equals(ccIndexRecord.getDigest())) {
LOGGER.warn("Bad digest for url={} ccindex={} sha1={}", id,
ccIndexRecord.getDigest(), base32Sha1);
}
//TODO: make digest and encoding configurable
try (InputStream is = Files.newInputStream(tmp)) {
targetDigest = DigestUtils.sha256Hex(is);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
long length = -1;
try {
length = Files.size(tmp);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
String targetPath = targetPathRewriter.rewrite(targetDigest);
Metadata metadata = new Metadata();
try (InputStream is = TikaInputStream.get(tmp, metadata)) {
emitter.emit(targetPath, is, new Metadata());
logSuccess(ccIndexRecord, targetDigest, length, targetPath);
} catch (IOException | TikaException e) {
LOGGER.warn("problem writing id={}", id, e);
}
} finally {
try {
Files.delete(tmp);
} catch (IOException e) {
LOGGER.warn("can't delete " + tmp.toAbsolutePath(), e);
}
}
}
private void logSuccess(CCIndexRecord ccIndexRecord, String targetDigest, long length,
String targetPath) {
if (extractTruncated) {
EXTRACTED_ALL_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
ccIndexRecord.getTruncated(), targetDigest, length,
targetPath);
} else {
//new ObjectArray ?
//url,mime_detected,warc_file,warc_offset,warc_length,sha256,length,path
EXTRACTED_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
targetDigest, length,
targetPath);
}
}
private void parseWarc(String id, CCIndexRecord ccIndexRecord, byte[] warcRecordGZBytes)
throws IOException {
//need to leave initial inputstream open while parsing warcrecord
//can't just parse record and return
try (InputStream is = new GZIPInputStream(new ByteArrayInputStream(warcRecordGZBytes))) {
try (WarcReader warcreader = new WarcReader(is)) {
//should be a single warc per file
//return the first
for (WarcRecord warcRecord : warcreader) {
fetchPayload(id, ccIndexRecord, warcRecord);
return;
}
}
}
}
private byte[] fetchWarcBytes(FetchEmitTuple t)
throws TikaException, InterruptedException, IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
FetchKey k = t.getFetchKey();
try (InputStream is = fetcher.fetch(k.getFetchKey(), k.getRangeStart(), k.getRangeEnd(),
new Metadata())) {
IOUtils.copy(is, bos);
}
return bos.toByteArray();
}
}
| src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " LOGGER.info(\"processed: {}\", counter);\n }\n if (fetcherConfig.getMaxRecords() > -1 && totalRead >= fetcherConfig.getMaxRecords()) {\n LOGGER.info(\"hit max read\");\n return false;\n }\n //check for hit max\n //return false;\n Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);\n if (record.isEmpty()) {",
"score": 32.46592521587957
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " private final Fetcher fetcher;\n IndexWorker(ExtractorConfig fetcherConfig, ArrayBlockingQueue<FetchEmitTuple> indexUrls,\n AbstractRecordProcessor recordProcessor) throws TikaException {\n this.indexUrls = indexUrls;\n this.recordProcessor = recordProcessor;\n this.fetcher = fetcherConfig.newFetcher();\n }\n @Override\n public Long call() throws Exception {\n boolean shouldContinue = true;",
"score": 32.225817078153355
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCIndexFetcher.java",
"retrieved_chunk": " Fetcher fetcher = fetcherConfig.newFetcher();\n StreamEmitter streamEmitter = fetcherConfig.newEmitter();\n while (true) {\n FetchEmitTuple t = indexPathsList.poll(120, TimeUnit.MINUTES);\n if (t == null) {\n throw new TimeoutException(\"waited 120 minutes for a new record\");\n }\n if (t == PipesIterator.COMPLETED_SEMAPHORE) {\n indexPathsList.put(PipesIterator.COMPLETED_SEMAPHORE);\n LOGGER.info(\"Index fetcher finished\");",
"score": 31.931642236961352
},
{
"filename": "src/test/java/org/tallison/cc/index/FetcherConfigTest.java",
"retrieved_chunk": " assertEquals(BackoffHttpFetcher.class, fetcherConfig.newFetcher().getClass());\n assertEquals(FileSystemEmitter.class, fetcherConfig.newEmitter().getClass());\n }\n @Test\n public void testS3() throws Exception {\n Path p = Paths.get(getClass().getResource(\"/configs/basic-s3.json\").toURI());\n ExtractorConfig fetcherConfig = new ObjectMapper().readValue(p.toFile(), ExtractorConfig.class);\n //TODO -- add actual unit test that tests fetcher and emitter\n assertEquals(S3Fetcher.class, fetcherConfig.newFetcher().getClass());\n assertEquals(S3Emitter.class, fetcherConfig.newEmitter().getClass());",
"score": 30.625559901019013
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/ExtractorConfig.java",
"retrieved_chunk": " if (emitConfig == null) {\n emitConfig = new EmitConfig(DEFAULT_FS_DOCS_PATH);\n }\n return emitConfig.newEmitter();\n }\n public void setExtractTruncated(boolean extractTruncated) {\n this.extractTruncated = extractTruncated;\n }\n public boolean isExtractTruncated() {\n return extractTruncated;",
"score": 28.972378759332106
}
] | java | , record.getFilename(), record.getOffset(),
record.getLength()); |
package com.tcoded.folialib.impl;
import com.tcoded.folialib.FoliaLib;
import com.tcoded.folialib.enums.EntityTaskResult;
import com.tcoded.folialib.util.TimeConverter;
import com.tcoded.folialib.wrapper.WrappedTask;
import com.tcoded.folialib.wrapper.task.WrappedBukkitTask;
import org.bukkit.Location;
import org.bukkit.entity.Entity;
import org.bukkit.entity.Player;
import org.bukkit.plugin.java.JavaPlugin;
import org.bukkit.scheduler.BukkitScheduler;
import org.jetbrains.annotations.NotNull;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
public class SpigotImplementation implements ServerImplementation {
private final JavaPlugin plugin;
@SuppressWarnings("deprecation")
private final @NotNull BukkitScheduler scheduler;
public SpigotImplementation(FoliaLib foliaLib) {
this.plugin = foliaLib.getPlugin();
this.scheduler = plugin.getServer().getScheduler();
}
@Override
public CompletableFuture<Void> runNextTick(Runnable runnable) {
CompletableFuture<Void> future = new CompletableFuture<>();
this.scheduler.runTask(plugin, () -> {
runnable.run();
future.complete(null);
});
return future;
}
@Override
public CompletableFuture<Void> runAsync(Runnable runnable) {
CompletableFuture<Void> future = new CompletableFuture<>();
this.scheduler.runTaskAsynchronously(plugin, () -> {
runnable.run();
future.complete(null);
});
return future;
}
@Override
public WrappedTask runLater(Runnable runnable, long delay, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler.runTaskLater(plugin, runnable, TimeConverter.toTicks(delay, unit))
);
}
@Override
public WrappedTask runLaterAsync(Runnable runnable, long delay, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler.runTaskLaterAsynchronously(plugin, runnable, TimeConverter.toTicks(delay, unit))
);
}
@Override
public WrappedTask runTimer(Runnable runnable, long delay, long period, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler.runTaskTimer(
plugin, runnable,
TimeConverter.toTicks(delay, unit),
| TimeConverter.toTicks(period, unit))
); |
}
@Override
public WrappedTask runTimerAsync(Runnable runnable, long delay, long period, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler.runTaskTimerAsynchronously(
plugin, runnable,
TimeConverter.toTicks(delay, unit),
TimeConverter.toTicks(period, unit))
);
}
@Override
public CompletableFuture<Void> runAtLocation(Location location, Runnable runnable) {
CompletableFuture<Void> future = new CompletableFuture<>();
this.scheduler.runTask(plugin, () -> {
runnable.run();
future.complete(null);
});
return future;
}
@Override
public WrappedTask runAtLocationLater(Location location, Runnable runnable, long delay, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler.runTaskLater(plugin, runnable, TimeConverter.toTicks(delay, unit))
);
}
@Override
public WrappedTask runAtLocationTimer(Location location, Runnable runnable, long delay, long period, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler.runTaskTimer(
plugin, runnable,
TimeConverter.toTicks(delay, unit),
TimeConverter.toTicks(period, unit))
);
}
@Override
public CompletableFuture<EntityTaskResult> runAtEntity(Entity entity, Runnable runnable) {
CompletableFuture<EntityTaskResult> future = new CompletableFuture<>();
this.scheduler.runTask(plugin, () -> {
runnable.run();
future.complete(EntityTaskResult.SUCCESS);
});
return future;
}
@Override
public CompletableFuture<EntityTaskResult> runAtEntityWithFallback(Entity entity, Runnable runnable, Runnable fallback) {
CompletableFuture<EntityTaskResult> future = new CompletableFuture<>();
this.scheduler.runTask(plugin, () -> {
if (entity.isValid()) {
runnable.run();
future.complete(EntityTaskResult.SUCCESS);
} else {
fallback.run();
future.complete(EntityTaskResult.ENTITY_RETIRED);
}
});
return future;
}
@Override
public WrappedTask runAtEntityLater(Entity entity, Runnable runnable, long delay, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler.runTaskLater(plugin, runnable, TimeConverter.toTicks(delay, unit))
);
}
@Override
public WrappedTask runAtEntityTimer(Entity entity, Runnable runnable, long delay, long period, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler.runTaskTimer(
plugin, runnable,
TimeConverter.toTicks(delay, unit),
TimeConverter.toTicks(period, unit))
);
}
@Override
public void cancelTask(WrappedTask task) {
task.cancel();
}
@Override
public void cancelAllTasks() {
this.scheduler.cancelTasks(plugin);
}
@Override
public Player getPlayer(String name) {
// Already on the main thread
if (this.plugin.getServer().isPrimaryThread()) {
return this.plugin.getServer().getPlayer(name);
}
// Not on the main thread, we need to wait until the next tick
else {
try {
return this.scheduler.callSyncMethod(plugin, () -> this.plugin.getServer().getPlayer(name)).get();
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
}
// Fallback to null
return null;
}
@Override
public Player getPlayerExact(String name) {
// Already on the main thread
if (this.plugin.getServer().isPrimaryThread()) {
return this.plugin.getServer().getPlayerExact(name);
}
// Not on the main thread, we need to wait until the next tick
else {
try {
return this.scheduler.callSyncMethod(plugin, () -> this.plugin.getServer().getPlayerExact(name)).get();
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
}
// Fallback to null
return null;
}
@SuppressWarnings("DuplicatedCode")
@Override
public Player getPlayer(UUID uuid) {
// Already on the main thread
if (this.plugin.getServer().isPrimaryThread()) {
return this.plugin.getServer().getPlayer(uuid);
}
// Not on the main thread, we need to wait until the next tick
else {
try {
return this.scheduler.callSyncMethod(plugin, () -> this.plugin.getServer().getPlayer(uuid)).get();
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
}
// Fallback to null
return null;
}
@Override
public CompletableFuture<Boolean> teleportAsync(Player player, Location location) {
CompletableFuture<Boolean> future = new CompletableFuture<>();
this.runAtEntity(player, () -> {
if (player.isValid() && player.isOnline()) {
player.teleport(location);
future.complete(true);
} else {
future.complete(false);
}
});
return future;
}
}
| src/main/java/com/tcoded/folialib/impl/SpigotImplementation.java | TechnicallyCoded-FoliaLib-8f9f24f | [
{
"filename": "src/main/java/com/tcoded/folialib/impl/FoliaImplementation.java",
"retrieved_chunk": " @Override\n public WrappedTask runTimer(Runnable runnable, long delay, long period, TimeUnit unit) {\n return new WrappedFoliaTask(\n this.globalRegionScheduler.runAtFixedRate(\n plugin, task -> runnable.run(),\n TimeConverter.toTicks(delay, unit),\n TimeConverter.toTicks(period, unit)\n )\n );\n }",
"score": 30.30378225861793
},
{
"filename": "src/main/java/com/tcoded/folialib/impl/FoliaImplementation.java",
"retrieved_chunk": " null,\n TimeConverter.toTicks(delay, unit)\n )\n );\n }\n @Override\n public WrappedTask runAtEntityTimer(Entity entity, Runnable runnable, long delay, long period, TimeUnit unit) {\n return new WrappedFoliaTask(\n entity.getScheduler().runAtFixedRate(\n plugin,",
"score": 23.075585732073215
},
{
"filename": "src/main/java/com/tcoded/folialib/impl/FoliaImplementation.java",
"retrieved_chunk": " return new WrappedFoliaTask(\n this.plugin.getServer().getRegionScheduler().runDelayed(\n plugin, location, task -> runnable.run(),\n TimeConverter.toTicks(delay, unit)\n )\n );\n }\n @Override\n public WrappedTask runAtLocationTimer(Location location, Runnable runnable, long delay, long period, TimeUnit unit) {\n return new WrappedFoliaTask(",
"score": 22.205192403764066
},
{
"filename": "src/main/java/com/tcoded/folialib/impl/FoliaImplementation.java",
"retrieved_chunk": " task -> runnable.run(),\n null,\n TimeConverter.toTicks(delay, unit),\n TimeConverter.toTicks(period, unit)\n )\n );\n }\n @Override\n public void cancelTask(WrappedTask task) {\n task.cancel();",
"score": 21.24822441945603
},
{
"filename": "src/main/java/com/tcoded/folialib/impl/FoliaImplementation.java",
"retrieved_chunk": " this.plugin.getServer().getRegionScheduler().runAtFixedRate(\n plugin, location, task -> runnable.run(),\n TimeConverter.toTicks(delay, unit),\n TimeConverter.toTicks(period, unit)\n )\n );\n }\n @Override\n public CompletableFuture<EntityTaskResult> runAtEntity(Entity entity, Runnable runnable) {\n CompletableFuture<EntityTaskResult> future = new CompletableFuture<>();",
"score": 20.531267350689138
}
] | java | TimeConverter.toTicks(period, unit))
); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.Optional;
import java.util.zip.GZIPInputStream;
import org.apache.commons.codec.binary.Base32;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
import org.netpreserve.jwarc.MediaType;
import org.netpreserve.jwarc.WarcPayload;
import org.netpreserve.jwarc.WarcReader;
import org.netpreserve.jwarc.WarcRecord;
import org.netpreserve.jwarc.WarcResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.tallison.cc.index.io.TargetPathRewriter;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.exception.TikaException;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.pipes.FetchEmitTuple;
import org.apache.tika.pipes.emitter.EmitKey;
import org.apache.tika.pipes.emitter.StreamEmitter;
import org.apache.tika.pipes.fetcher.FetchKey;
import org.apache.tika.pipes.fetcher.RangeFetcher;
public class FileFromCCWarcExtractor {
private static Logger LOGGER =
LoggerFactory.getLogger(FileFromCCWarcExtractor.class);
private static Logger EXTRACTED_LOGGER = LoggerFactory.getLogger("extracted-urls");
private static Logger EXTRACTED_ALL_LOGGER = LoggerFactory.getLogger("extracted-urls-all");
private final StreamEmitter emitter;
private final TargetPathRewriter targetPathRewriter;
private RangeFetcher fetcher;
private final boolean extractTruncated;
private Base32 base32 = new Base32();
private final CCIndexReaderCounter ccIndexReaderCounter;
public FileFromCCWarcExtractor(ExtractorConfig fetcherConfig,
CCIndexReaderCounter ccIndexReaderCounter) throws TikaConfigException {
this.emitter = fetcherConfig.newEmitter();
this.fetcher = (RangeFetcher) fetcherConfig.newFetcher();
this.targetPathRewriter = fetcherConfig.getTargetPathRewriter();
this.extractTruncated = fetcherConfig.isExtractTruncated();
this.ccIndexReaderCounter = ccIndexReaderCounter;
}
public void fetchToPath(CCIndexRecord record) throws InterruptedException {
LOGGER | .debug("going to fetch { | } {}->{}", record.getFilename(), record.getOffset(),
record.getLength());
FetchEmitTuple t = new FetchEmitTuple(record.getFilename(),
new FetchKey("", record.getFilename(), record.getOffset(),
record.getOffset() + record.getLength() - 1), new EmitKey());
byte[] warcRecordGZBytes;
try {
warcRecordGZBytes = fetchWarcBytes(t);
} catch (TikaException | IOException e) {
LOGGER.warn("couldn't get bytes from cc's warc " + t, e);
return;
}
String id = record.getUrl();
try {
parseWarc(id, record, warcRecordGZBytes);
} catch (IOException e) {
LOGGER.warn("problem parsing warc file", e);
}
}
private void fetchPayload(String id, CCIndexRecord ccIndexRecord, WarcRecord record)
throws IOException {
if (!((record instanceof WarcResponse) &&
record.contentType().base().equals(MediaType.HTTP))) {
return;
}
Optional<WarcPayload> payload = ((WarcResponse) record).payload();
if (!payload.isPresent()) {
LOGGER.debug("no payload {}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
if (payload.get().body().size() == 0) {
LOGGER.debug("empty payload id={}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
Path tmp = Files.createTempFile("ccfile-fetcher-", "");
try {
Files.copy(payload.get().body().stream(), tmp, StandardCopyOption.REPLACE_EXISTING);
String targetDigest = null;
String base32Sha1 = "";
try (InputStream is = Files.newInputStream(tmp)) {
base32Sha1 = base32.encodeAsString(DigestUtils.sha1(is));
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
if (!base32Sha1.equals(ccIndexRecord.getDigest())) {
LOGGER.warn("Bad digest for url={} ccindex={} sha1={}", id,
ccIndexRecord.getDigest(), base32Sha1);
}
//TODO: make digest and encoding configurable
try (InputStream is = Files.newInputStream(tmp)) {
targetDigest = DigestUtils.sha256Hex(is);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
long length = -1;
try {
length = Files.size(tmp);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
String targetPath = targetPathRewriter.rewrite(targetDigest);
Metadata metadata = new Metadata();
try (InputStream is = TikaInputStream.get(tmp, metadata)) {
emitter.emit(targetPath, is, new Metadata());
logSuccess(ccIndexRecord, targetDigest, length, targetPath);
} catch (IOException | TikaException e) {
LOGGER.warn("problem writing id={}", id, e);
}
} finally {
try {
Files.delete(tmp);
} catch (IOException e) {
LOGGER.warn("can't delete " + tmp.toAbsolutePath(), e);
}
}
}
private void logSuccess(CCIndexRecord ccIndexRecord, String targetDigest, long length,
String targetPath) {
if (extractTruncated) {
EXTRACTED_ALL_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
ccIndexRecord.getTruncated(), targetDigest, length,
targetPath);
} else {
//new ObjectArray ?
//url,mime_detected,warc_file,warc_offset,warc_length,sha256,length,path
EXTRACTED_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
targetDigest, length,
targetPath);
}
}
private void parseWarc(String id, CCIndexRecord ccIndexRecord, byte[] warcRecordGZBytes)
throws IOException {
//need to leave initial inputstream open while parsing warcrecord
//can't just parse record and return
try (InputStream is = new GZIPInputStream(new ByteArrayInputStream(warcRecordGZBytes))) {
try (WarcReader warcreader = new WarcReader(is)) {
//should be a single warc per file
//return the first
for (WarcRecord warcRecord : warcreader) {
fetchPayload(id, ccIndexRecord, warcRecord);
return;
}
}
}
}
private byte[] fetchWarcBytes(FetchEmitTuple t)
throws TikaException, InterruptedException, IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
FetchKey k = t.getFetchKey();
try (InputStream is = fetcher.fetch(k.getFetchKey(), k.getRangeStart(), k.getRangeEnd(),
new Metadata())) {
IOUtils.copy(is, bos);
}
return bos.toByteArray();
}
}
| src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " LoggerFactory.getLogger(\"truncated-urls-full\");\n private final ExtractorConfig fetcherConfig;\n private final CCIndexReaderCounter counter;\n private final FileFromCCWarcExtractor fileFromCCWarcFetcher;\n private long reportEvery = 100000;\n public CCFileExtractorRecordProcessor(ExtractorConfig fetcherConfig, CCIndexReaderCounter counter)\n throws TikaConfigException, IOException {\n this.fetcherConfig = fetcherConfig;\n this.counter = counter;\n this.fileFromCCWarcFetcher = new FileFromCCWarcExtractor(fetcherConfig, counter);",
"score": 35.59231303101368
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " private final Fetcher fetcher;\n IndexWorker(ExtractorConfig fetcherConfig, ArrayBlockingQueue<FetchEmitTuple> indexUrls,\n AbstractRecordProcessor recordProcessor) throws TikaException {\n this.indexUrls = indexUrls;\n this.recordProcessor = recordProcessor;\n this.fetcher = fetcherConfig.newFetcher();\n }\n @Override\n public Long call() throws Exception {\n boolean shouldContinue = true;",
"score": 34.501605879639285
},
{
"filename": "src/test/java/org/tallison/cc/index/FetcherConfigTest.java",
"retrieved_chunk": " assertEquals(BackoffHttpFetcher.class, fetcherConfig.newFetcher().getClass());\n assertEquals(FileSystemEmitter.class, fetcherConfig.newEmitter().getClass());\n }\n @Test\n public void testS3() throws Exception {\n Path p = Paths.get(getClass().getResource(\"/configs/basic-s3.json\").toURI());\n ExtractorConfig fetcherConfig = new ObjectMapper().readValue(p.toFile(), ExtractorConfig.class);\n //TODO -- add actual unit test that tests fetcher and emitter\n assertEquals(S3Fetcher.class, fetcherConfig.newFetcher().getClass());\n assertEquals(S3Emitter.class, fetcherConfig.newEmitter().getClass());",
"score": 31.87378426501846
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/ExtractorConfig.java",
"retrieved_chunk": " if (emitConfig == null) {\n emitConfig = new EmitConfig(DEFAULT_FS_DOCS_PATH);\n }\n return emitConfig.newEmitter();\n }\n public void setExtractTruncated(boolean extractTruncated) {\n this.extractTruncated = extractTruncated;\n }\n public boolean isExtractTruncated() {\n return extractTruncated;",
"score": 28.972378759332106
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCIndexFetcher.java",
"retrieved_chunk": " private static class IndexFetcher implements Callable<Long> {\n private final ExtractorConfig fetcherConfig;\n private final ArrayBlockingQueue<FetchEmitTuple> indexPathsList;\n public IndexFetcher(ExtractorConfig fetcherConfig,\n ArrayBlockingQueue<FetchEmitTuple> indexPathsList) {\n this.fetcherConfig = fetcherConfig;\n this.indexPathsList = indexPathsList;\n }\n @Override\n public Long call() throws Exception {",
"score": 27.91433118897895
}
] | java | .debug("going to fetch { |
package com.tcoded.folialib.impl;
import com.tcoded.folialib.FoliaLib;
import com.tcoded.folialib.enums.EntityTaskResult;
import com.tcoded.folialib.util.TimeConverter;
import com.tcoded.folialib.wrapper.WrappedTask;
import com.tcoded.folialib.wrapper.task.WrappedFoliaTask;
import io.papermc.paper.threadedregions.scheduler.AsyncScheduler;
import io.papermc.paper.threadedregions.scheduler.GlobalRegionScheduler;
import org.bukkit.Chunk;
import org.bukkit.Location;
import org.bukkit.World;
import org.bukkit.entity.Entity;
import org.bukkit.entity.Player;
import org.bukkit.plugin.java.JavaPlugin;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
public class FoliaImplementation implements ServerImplementation {
private final JavaPlugin plugin;
private final GlobalRegionScheduler globalRegionScheduler;
private final AsyncScheduler asyncScheduler;
public FoliaImplementation(FoliaLib foliaLib) {
this.plugin = foliaLib.getPlugin();
this.globalRegionScheduler = plugin.getServer().getGlobalRegionScheduler();
this.asyncScheduler = plugin.getServer().getAsyncScheduler();
}
@Override
public CompletableFuture<Void> runNextTick(Runnable runnable) {
CompletableFuture<Void> future = new CompletableFuture<>();
this.globalRegionScheduler.execute(plugin, () -> {
runnable.run();
future.complete(null);
});
return future;
}
@Override
public CompletableFuture<Void> runAsync(Runnable runnable) {
CompletableFuture<Void> future = new CompletableFuture<>();
this.asyncScheduler.runNow(plugin, task -> {
runnable.run();
future.complete(null);
});
return future;
}
@Override
public WrappedTask runLater(Runnable runnable, long delay, TimeUnit unit) {
return new WrappedFoliaTask(
this.globalRegionScheduler.runDelayed(
plugin, task -> runnable.run(), TimeConverter.toTicks(delay, unit)
)
);
}
@Override
public WrappedTask runLaterAsync(Runnable runnable, long delay, TimeUnit unit) {
return new WrappedFoliaTask(
this.asyncScheduler.runDelayed(
plugin, task -> runnable.run(), delay, unit
)
);
}
@Override
public WrappedTask runTimer(Runnable runnable, long delay, long period, TimeUnit unit) {
return new WrappedFoliaTask(
this.globalRegionScheduler.runAtFixedRate(
plugin, task -> runnable.run(),
| TimeConverter.toTicks(delay, unit),
TimeConverter.toTicks(period, unit)
)
); |
}
@Override
public WrappedTask runTimerAsync(Runnable runnable, long delay, long period, TimeUnit unit) {
return new WrappedFoliaTask(
this.asyncScheduler.runAtFixedRate(
plugin, task -> runnable.run(),
delay, period, unit
)
);
}
@Override
public CompletableFuture<Void> runAtLocation(Location location, Runnable runnable) {
CompletableFuture<Void> future = new CompletableFuture<>();
this.plugin.getServer().getRegionScheduler().execute(plugin, location, () -> {
runnable.run();
future.complete(null);
});
return future;
}
@Override
public WrappedTask runAtLocationLater(Location location, Runnable runnable, long delay, TimeUnit unit) {
return new WrappedFoliaTask(
this.plugin.getServer().getRegionScheduler().runDelayed(
plugin, location, task -> runnable.run(),
TimeConverter.toTicks(delay, unit)
)
);
}
@Override
public WrappedTask runAtLocationTimer(Location location, Runnable runnable, long delay, long period, TimeUnit unit) {
return new WrappedFoliaTask(
this.plugin.getServer().getRegionScheduler().runAtFixedRate(
plugin, location, task -> runnable.run(),
TimeConverter.toTicks(delay, unit),
TimeConverter.toTicks(period, unit)
)
);
}
@Override
public CompletableFuture<EntityTaskResult> runAtEntity(Entity entity, Runnable runnable) {
CompletableFuture<EntityTaskResult> future = new CompletableFuture<>();
boolean success = entity.getScheduler().execute(this.plugin, () -> {
runnable.run();
future.complete(EntityTaskResult.SUCCESS);
}, null, 0);
if (!success) {
future.complete(EntityTaskResult.SCHEDULER_RETIRED);
}
return future;
}
@Override
public CompletableFuture<EntityTaskResult> runAtEntityWithFallback(Entity entity, Runnable runnable, Runnable fallback) {
CompletableFuture<EntityTaskResult> future = new CompletableFuture<>();
boolean success = entity.getScheduler().execute(this.plugin, () -> {
runnable.run();
future.complete(EntityTaskResult.SUCCESS);
}, () -> {
fallback.run();
future.complete(EntityTaskResult.ENTITY_RETIRED);
}, 0);
if (!success) {
future.complete(EntityTaskResult.SCHEDULER_RETIRED);
}
return future;
}
@Override
public WrappedTask runAtEntityLater(Entity entity, Runnable runnable, long delay, TimeUnit unit) {
return new WrappedFoliaTask(
entity.getScheduler().runDelayed(
plugin,
task -> runnable.run(),
null,
TimeConverter.toTicks(delay, unit)
)
);
}
@Override
public WrappedTask runAtEntityTimer(Entity entity, Runnable runnable, long delay, long period, TimeUnit unit) {
return new WrappedFoliaTask(
entity.getScheduler().runAtFixedRate(
plugin,
task -> runnable.run(),
null,
TimeConverter.toTicks(delay, unit),
TimeConverter.toTicks(period, unit)
)
);
}
@Override
public void cancelTask(WrappedTask task) {
task.cancel();
}
@Override
public void cancelAllTasks() {
this.globalRegionScheduler.cancelTasks(plugin);
this.asyncScheduler.cancelTasks(plugin);
}
@Override
public Player getPlayer(String name) {
// This is thread-safe in folia
return this.plugin.getServer().getPlayer(name);
}
@Override
public Player getPlayerExact(String name) {
// This is thread-safe in folia
return this.plugin.getServer().getPlayerExact(name);
}
@Override
public Player getPlayer(UUID uuid) {
// This is thread-safe in folia
return this.plugin.getServer().getPlayer(uuid);
}
@Override
public CompletableFuture<Boolean> teleportAsync(Player player, Location location) {
return player.teleportAsync(location);
}
}
| src/main/java/com/tcoded/folialib/impl/FoliaImplementation.java | TechnicallyCoded-FoliaLib-8f9f24f | [
{
"filename": "src/main/java/com/tcoded/folialib/impl/SpigotImplementation.java",
"retrieved_chunk": " TimeConverter.toTicks(period, unit))\n );\n }\n @Override\n public WrappedTask runTimerAsync(Runnable runnable, long delay, long period, TimeUnit unit) {\n return new WrappedBukkitTask(\n this.scheduler.runTaskTimerAsynchronously(\n plugin, runnable,\n TimeConverter.toTicks(delay, unit),\n TimeConverter.toTicks(period, unit))",
"score": 33.71250624965994
},
{
"filename": "src/main/java/com/tcoded/folialib/impl/SpigotImplementation.java",
"retrieved_chunk": " return new WrappedBukkitTask(\n this.scheduler.runTaskLaterAsynchronously(plugin, runnable, TimeConverter.toTicks(delay, unit))\n );\n }\n @Override\n public WrappedTask runTimer(Runnable runnable, long delay, long period, TimeUnit unit) {\n return new WrappedBukkitTask(\n this.scheduler.runTaskTimer(\n plugin, runnable,\n TimeConverter.toTicks(delay, unit),",
"score": 33.47499875240863
},
{
"filename": "src/main/java/com/tcoded/folialib/impl/SpigotImplementation.java",
"retrieved_chunk": " }\n @Override\n public WrappedTask runAtLocationLater(Location location, Runnable runnable, long delay, TimeUnit unit) {\n return new WrappedBukkitTask(\n this.scheduler.runTaskLater(plugin, runnable, TimeConverter.toTicks(delay, unit))\n );\n }\n @Override\n public WrappedTask runAtLocationTimer(Location location, Runnable runnable, long delay, long period, TimeUnit unit) {\n return new WrappedBukkitTask(",
"score": 27.71876287675105
},
{
"filename": "src/main/java/com/tcoded/folialib/impl/SpigotImplementation.java",
"retrieved_chunk": " public WrappedTask runAtEntityLater(Entity entity, Runnable runnable, long delay, TimeUnit unit) {\n return new WrappedBukkitTask(\n this.scheduler.runTaskLater(plugin, runnable, TimeConverter.toTicks(delay, unit))\n );\n }\n @Override\n public WrappedTask runAtEntityTimer(Entity entity, Runnable runnable, long delay, long period, TimeUnit unit) {\n return new WrappedBukkitTask(\n this.scheduler.runTaskTimer(\n plugin, runnable,",
"score": 26.893180776620525
},
{
"filename": "src/main/java/com/tcoded/folialib/impl/SpigotImplementation.java",
"retrieved_chunk": " return future;\n }\n @Override\n public WrappedTask runLater(Runnable runnable, long delay, TimeUnit unit) {\n return new WrappedBukkitTask(\n this.scheduler.runTaskLater(plugin, runnable, TimeConverter.toTicks(delay, unit))\n );\n }\n @Override\n public WrappedTask runLaterAsync(Runnable runnable, long delay, TimeUnit unit) {",
"score": 25.66445634698052
}
] | java | TimeConverter.toTicks(delay, unit),
TimeConverter.toTicks(period, unit)
)
); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.Optional;
import java.util.zip.GZIPInputStream;
import org.apache.commons.codec.binary.Base32;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
import org.netpreserve.jwarc.MediaType;
import org.netpreserve.jwarc.WarcPayload;
import org.netpreserve.jwarc.WarcReader;
import org.netpreserve.jwarc.WarcRecord;
import org.netpreserve.jwarc.WarcResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.tallison.cc.index.io.TargetPathRewriter;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.exception.TikaException;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.pipes.FetchEmitTuple;
import org.apache.tika.pipes.emitter.EmitKey;
import org.apache.tika.pipes.emitter.StreamEmitter;
import org.apache.tika.pipes.fetcher.FetchKey;
import org.apache.tika.pipes.fetcher.RangeFetcher;
public class FileFromCCWarcExtractor {
private static Logger LOGGER =
LoggerFactory.getLogger(FileFromCCWarcExtractor.class);
private static Logger EXTRACTED_LOGGER = LoggerFactory.getLogger("extracted-urls");
private static Logger EXTRACTED_ALL_LOGGER = LoggerFactory.getLogger("extracted-urls-all");
private final StreamEmitter emitter;
private final TargetPathRewriter targetPathRewriter;
private RangeFetcher fetcher;
private final boolean extractTruncated;
private Base32 base32 = new Base32();
private final CCIndexReaderCounter ccIndexReaderCounter;
public FileFromCCWarcExtractor(ExtractorConfig fetcherConfig,
CCIndexReaderCounter ccIndexReaderCounter) throws TikaConfigException {
this.emitter = fetcherConfig.newEmitter();
this.fetcher = (RangeFetcher) fetcherConfig.newFetcher();
this.targetPathRewriter = fetcherConfig.getTargetPathRewriter();
this.extractTruncated = fetcherConfig.isExtractTruncated();
this.ccIndexReaderCounter = ccIndexReaderCounter;
}
public void fetchToPath(CCIndexRecord record) throws InterruptedException {
LOGGER.debug("going to fetch {} {}->{}", record.getFilename(), record.getOffset(),
| record.getLength()); |
FetchEmitTuple t = new FetchEmitTuple(record.getFilename(),
new FetchKey("", record.getFilename(), record.getOffset(),
record.getOffset() + record.getLength() - 1), new EmitKey());
byte[] warcRecordGZBytes;
try {
warcRecordGZBytes = fetchWarcBytes(t);
} catch (TikaException | IOException e) {
LOGGER.warn("couldn't get bytes from cc's warc " + t, e);
return;
}
String id = record.getUrl();
try {
parseWarc(id, record, warcRecordGZBytes);
} catch (IOException e) {
LOGGER.warn("problem parsing warc file", e);
}
}
private void fetchPayload(String id, CCIndexRecord ccIndexRecord, WarcRecord record)
throws IOException {
if (!((record instanceof WarcResponse) &&
record.contentType().base().equals(MediaType.HTTP))) {
return;
}
Optional<WarcPayload> payload = ((WarcResponse) record).payload();
if (!payload.isPresent()) {
LOGGER.debug("no payload {}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
if (payload.get().body().size() == 0) {
LOGGER.debug("empty payload id={}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
Path tmp = Files.createTempFile("ccfile-fetcher-", "");
try {
Files.copy(payload.get().body().stream(), tmp, StandardCopyOption.REPLACE_EXISTING);
String targetDigest = null;
String base32Sha1 = "";
try (InputStream is = Files.newInputStream(tmp)) {
base32Sha1 = base32.encodeAsString(DigestUtils.sha1(is));
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
if (!base32Sha1.equals(ccIndexRecord.getDigest())) {
LOGGER.warn("Bad digest for url={} ccindex={} sha1={}", id,
ccIndexRecord.getDigest(), base32Sha1);
}
//TODO: make digest and encoding configurable
try (InputStream is = Files.newInputStream(tmp)) {
targetDigest = DigestUtils.sha256Hex(is);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
long length = -1;
try {
length = Files.size(tmp);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
String targetPath = targetPathRewriter.rewrite(targetDigest);
Metadata metadata = new Metadata();
try (InputStream is = TikaInputStream.get(tmp, metadata)) {
emitter.emit(targetPath, is, new Metadata());
logSuccess(ccIndexRecord, targetDigest, length, targetPath);
} catch (IOException | TikaException e) {
LOGGER.warn("problem writing id={}", id, e);
}
} finally {
try {
Files.delete(tmp);
} catch (IOException e) {
LOGGER.warn("can't delete " + tmp.toAbsolutePath(), e);
}
}
}
private void logSuccess(CCIndexRecord ccIndexRecord, String targetDigest, long length,
String targetPath) {
if (extractTruncated) {
EXTRACTED_ALL_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
ccIndexRecord.getTruncated(), targetDigest, length,
targetPath);
} else {
//new ObjectArray ?
//url,mime_detected,warc_file,warc_offset,warc_length,sha256,length,path
EXTRACTED_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
targetDigest, length,
targetPath);
}
}
private void parseWarc(String id, CCIndexRecord ccIndexRecord, byte[] warcRecordGZBytes)
throws IOException {
//need to leave initial inputstream open while parsing warcrecord
//can't just parse record and return
try (InputStream is = new GZIPInputStream(new ByteArrayInputStream(warcRecordGZBytes))) {
try (WarcReader warcreader = new WarcReader(is)) {
//should be a single warc per file
//return the first
for (WarcRecord warcRecord : warcreader) {
fetchPayload(id, ccIndexRecord, warcRecord);
return;
}
}
}
}
private byte[] fetchWarcBytes(FetchEmitTuple t)
throws TikaException, InterruptedException, IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
FetchKey k = t.getFetchKey();
try (InputStream is = fetcher.fetch(k.getFetchKey(), k.getRangeStart(), k.getRangeEnd(),
new Metadata())) {
IOUtils.copy(is, bos);
}
return bos.toByteArray();
}
}
| src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " LoggerFactory.getLogger(\"truncated-urls-full\");\n private final ExtractorConfig fetcherConfig;\n private final CCIndexReaderCounter counter;\n private final FileFromCCWarcExtractor fileFromCCWarcFetcher;\n private long reportEvery = 100000;\n public CCFileExtractorRecordProcessor(ExtractorConfig fetcherConfig, CCIndexReaderCounter counter)\n throws TikaConfigException, IOException {\n this.fetcherConfig = fetcherConfig;\n this.counter = counter;\n this.fileFromCCWarcFetcher = new FileFromCCWarcExtractor(fetcherConfig, counter);",
"score": 35.59231303101368
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " private final Fetcher fetcher;\n IndexWorker(ExtractorConfig fetcherConfig, ArrayBlockingQueue<FetchEmitTuple> indexUrls,\n AbstractRecordProcessor recordProcessor) throws TikaException {\n this.indexUrls = indexUrls;\n this.recordProcessor = recordProcessor;\n this.fetcher = fetcherConfig.newFetcher();\n }\n @Override\n public Long call() throws Exception {\n boolean shouldContinue = true;",
"score": 34.501605879639285
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " LOGGER.info(\"processed: {}\", counter);\n }\n if (fetcherConfig.getMaxRecords() > -1 && totalRead >= fetcherConfig.getMaxRecords()) {\n LOGGER.info(\"hit max read\");\n return false;\n }\n //check for hit max\n //return false;\n Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);\n if (record.isEmpty()) {",
"score": 32.46592521587957
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCIndexFetcher.java",
"retrieved_chunk": " Fetcher fetcher = fetcherConfig.newFetcher();\n StreamEmitter streamEmitter = fetcherConfig.newEmitter();\n while (true) {\n FetchEmitTuple t = indexPathsList.poll(120, TimeUnit.MINUTES);\n if (t == null) {\n throw new TimeoutException(\"waited 120 minutes for a new record\");\n }\n if (t == PipesIterator.COMPLETED_SEMAPHORE) {\n indexPathsList.put(PipesIterator.COMPLETED_SEMAPHORE);\n LOGGER.info(\"Index fetcher finished\");",
"score": 31.931642236961352
},
{
"filename": "src/test/java/org/tallison/cc/index/FetcherConfigTest.java",
"retrieved_chunk": " assertEquals(BackoffHttpFetcher.class, fetcherConfig.newFetcher().getClass());\n assertEquals(FileSystemEmitter.class, fetcherConfig.newEmitter().getClass());\n }\n @Test\n public void testS3() throws Exception {\n Path p = Paths.get(getClass().getResource(\"/configs/basic-s3.json\").toURI());\n ExtractorConfig fetcherConfig = new ObjectMapper().readValue(p.toFile(), ExtractorConfig.class);\n //TODO -- add actual unit test that tests fetcher and emitter\n assertEquals(S3Fetcher.class, fetcherConfig.newFetcher().getClass());\n assertEquals(S3Emitter.class, fetcherConfig.newEmitter().getClass());",
"score": 31.87378426501846
}
] | java | record.getLength()); |
package com.tcoded.folialib.impl;
import com.tcoded.folialib.FoliaLib;
import com.tcoded.folialib.enums.EntityTaskResult;
import com.tcoded.folialib.util.TimeConverter;
import com.tcoded.folialib.wrapper.WrappedTask;
import com.tcoded.folialib.wrapper.task.WrappedBukkitTask;
import org.bukkit.Location;
import org.bukkit.entity.Entity;
import org.bukkit.entity.Player;
import org.bukkit.plugin.java.JavaPlugin;
import org.bukkit.scheduler.BukkitScheduler;
import org.jetbrains.annotations.NotNull;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
public class SpigotImplementation implements ServerImplementation {
private final JavaPlugin plugin;
@SuppressWarnings("deprecation")
private final @NotNull BukkitScheduler scheduler;
public SpigotImplementation(FoliaLib foliaLib) {
this.plugin = foliaLib.getPlugin();
this.scheduler = plugin.getServer().getScheduler();
}
@Override
public CompletableFuture<Void> runNextTick(Runnable runnable) {
CompletableFuture<Void> future = new CompletableFuture<>();
this.scheduler.runTask(plugin, () -> {
runnable.run();
future.complete(null);
});
return future;
}
@Override
public CompletableFuture<Void> runAsync(Runnable runnable) {
CompletableFuture<Void> future = new CompletableFuture<>();
this.scheduler.runTaskAsynchronously(plugin, () -> {
runnable.run();
future.complete(null);
});
return future;
}
@Override
public WrappedTask runLater(Runnable runnable, long delay, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler.runTaskLater(plugin, runnable, TimeConverter.toTicks(delay, unit))
);
}
@Override
public WrappedTask runLaterAsync(Runnable runnable, long delay, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler.runTaskLaterAsynchronously(plugin, runnable, TimeConverter.toTicks(delay, unit))
);
}
@Override
public WrappedTask runTimer(Runnable runnable, long delay, long period, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler.runTaskTimer(
plugin, runnable,
| TimeConverter.toTicks(delay, unit),
TimeConverter.toTicks(period, unit))
); |
}
@Override
public WrappedTask runTimerAsync(Runnable runnable, long delay, long period, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler.runTaskTimerAsynchronously(
plugin, runnable,
TimeConverter.toTicks(delay, unit),
TimeConverter.toTicks(period, unit))
);
}
@Override
public CompletableFuture<Void> runAtLocation(Location location, Runnable runnable) {
CompletableFuture<Void> future = new CompletableFuture<>();
this.scheduler.runTask(plugin, () -> {
runnable.run();
future.complete(null);
});
return future;
}
@Override
public WrappedTask runAtLocationLater(Location location, Runnable runnable, long delay, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler.runTaskLater(plugin, runnable, TimeConverter.toTicks(delay, unit))
);
}
@Override
public WrappedTask runAtLocationTimer(Location location, Runnable runnable, long delay, long period, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler.runTaskTimer(
plugin, runnable,
TimeConverter.toTicks(delay, unit),
TimeConverter.toTicks(period, unit))
);
}
@Override
public CompletableFuture<EntityTaskResult> runAtEntity(Entity entity, Runnable runnable) {
CompletableFuture<EntityTaskResult> future = new CompletableFuture<>();
this.scheduler.runTask(plugin, () -> {
runnable.run();
future.complete(EntityTaskResult.SUCCESS);
});
return future;
}
@Override
public CompletableFuture<EntityTaskResult> runAtEntityWithFallback(Entity entity, Runnable runnable, Runnable fallback) {
CompletableFuture<EntityTaskResult> future = new CompletableFuture<>();
this.scheduler.runTask(plugin, () -> {
if (entity.isValid()) {
runnable.run();
future.complete(EntityTaskResult.SUCCESS);
} else {
fallback.run();
future.complete(EntityTaskResult.ENTITY_RETIRED);
}
});
return future;
}
@Override
public WrappedTask runAtEntityLater(Entity entity, Runnable runnable, long delay, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler.runTaskLater(plugin, runnable, TimeConverter.toTicks(delay, unit))
);
}
@Override
public WrappedTask runAtEntityTimer(Entity entity, Runnable runnable, long delay, long period, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler.runTaskTimer(
plugin, runnable,
TimeConverter.toTicks(delay, unit),
TimeConverter.toTicks(period, unit))
);
}
@Override
public void cancelTask(WrappedTask task) {
task.cancel();
}
@Override
public void cancelAllTasks() {
this.scheduler.cancelTasks(plugin);
}
@Override
public Player getPlayer(String name) {
// Already on the main thread
if (this.plugin.getServer().isPrimaryThread()) {
return this.plugin.getServer().getPlayer(name);
}
// Not on the main thread, we need to wait until the next tick
else {
try {
return this.scheduler.callSyncMethod(plugin, () -> this.plugin.getServer().getPlayer(name)).get();
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
}
// Fallback to null
return null;
}
@Override
public Player getPlayerExact(String name) {
// Already on the main thread
if (this.plugin.getServer().isPrimaryThread()) {
return this.plugin.getServer().getPlayerExact(name);
}
// Not on the main thread, we need to wait until the next tick
else {
try {
return this.scheduler.callSyncMethod(plugin, () -> this.plugin.getServer().getPlayerExact(name)).get();
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
}
// Fallback to null
return null;
}
@SuppressWarnings("DuplicatedCode")
@Override
public Player getPlayer(UUID uuid) {
// Already on the main thread
if (this.plugin.getServer().isPrimaryThread()) {
return this.plugin.getServer().getPlayer(uuid);
}
// Not on the main thread, we need to wait until the next tick
else {
try {
return this.scheduler.callSyncMethod(plugin, () -> this.plugin.getServer().getPlayer(uuid)).get();
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
}
// Fallback to null
return null;
}
@Override
public CompletableFuture<Boolean> teleportAsync(Player player, Location location) {
CompletableFuture<Boolean> future = new CompletableFuture<>();
this.runAtEntity(player, () -> {
if (player.isValid() && player.isOnline()) {
player.teleport(location);
future.complete(true);
} else {
future.complete(false);
}
});
return future;
}
}
| src/main/java/com/tcoded/folialib/impl/SpigotImplementation.java | TechnicallyCoded-FoliaLib-8f9f24f | [
{
"filename": "src/main/java/com/tcoded/folialib/impl/FoliaImplementation.java",
"retrieved_chunk": " @Override\n public WrappedTask runTimer(Runnable runnable, long delay, long period, TimeUnit unit) {\n return new WrappedFoliaTask(\n this.globalRegionScheduler.runAtFixedRate(\n plugin, task -> runnable.run(),\n TimeConverter.toTicks(delay, unit),\n TimeConverter.toTicks(period, unit)\n )\n );\n }",
"score": 30.30378225861793
},
{
"filename": "src/main/java/com/tcoded/folialib/impl/FoliaImplementation.java",
"retrieved_chunk": " null,\n TimeConverter.toTicks(delay, unit)\n )\n );\n }\n @Override\n public WrappedTask runAtEntityTimer(Entity entity, Runnable runnable, long delay, long period, TimeUnit unit) {\n return new WrappedFoliaTask(\n entity.getScheduler().runAtFixedRate(\n plugin,",
"score": 23.075585732073215
},
{
"filename": "src/main/java/com/tcoded/folialib/impl/FoliaImplementation.java",
"retrieved_chunk": " return new WrappedFoliaTask(\n this.plugin.getServer().getRegionScheduler().runDelayed(\n plugin, location, task -> runnable.run(),\n TimeConverter.toTicks(delay, unit)\n )\n );\n }\n @Override\n public WrappedTask runAtLocationTimer(Location location, Runnable runnable, long delay, long period, TimeUnit unit) {\n return new WrappedFoliaTask(",
"score": 22.205192403764066
},
{
"filename": "src/main/java/com/tcoded/folialib/impl/FoliaImplementation.java",
"retrieved_chunk": " task -> runnable.run(),\n null,\n TimeConverter.toTicks(delay, unit),\n TimeConverter.toTicks(period, unit)\n )\n );\n }\n @Override\n public void cancelTask(WrappedTask task) {\n task.cancel();",
"score": 21.24822441945603
},
{
"filename": "src/main/java/com/tcoded/folialib/impl/FoliaImplementation.java",
"retrieved_chunk": " this.plugin.getServer().getRegionScheduler().runAtFixedRate(\n plugin, location, task -> runnable.run(),\n TimeConverter.toTicks(delay, unit),\n TimeConverter.toTicks(period, unit)\n )\n );\n }\n @Override\n public CompletableFuture<EntityTaskResult> runAtEntity(Entity entity, Runnable runnable) {\n CompletableFuture<EntityTaskResult> future = new CompletableFuture<>();",
"score": 20.531267350689138
}
] | java | TimeConverter.toTicks(delay, unit),
TimeConverter.toTicks(period, unit))
); |
package com.tcoded.folialib.impl;
import com.tcoded.folialib.FoliaLib;
import com.tcoded.folialib.enums.EntityTaskResult;
import com.tcoded.folialib.util.TimeConverter;
import com.tcoded.folialib.wrapper.WrappedTask;
import com.tcoded.folialib.wrapper.task.WrappedBukkitTask;
import org.bukkit.Location;
import org.bukkit.entity.Entity;
import org.bukkit.entity.Player;
import org.bukkit.plugin.java.JavaPlugin;
import org.bukkit.scheduler.BukkitScheduler;
import org.jetbrains.annotations.NotNull;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
public class SpigotImplementation implements ServerImplementation {
private final JavaPlugin plugin;
@SuppressWarnings("deprecation")
private final @NotNull BukkitScheduler scheduler;
public SpigotImplementation(FoliaLib foliaLib) {
this.plugin = foliaLib.getPlugin();
this.scheduler = plugin.getServer().getScheduler();
}
@Override
public CompletableFuture<Void> runNextTick(Runnable runnable) {
CompletableFuture<Void> future = new CompletableFuture<>();
this.scheduler.runTask(plugin, () -> {
runnable.run();
future.complete(null);
});
return future;
}
@Override
public CompletableFuture<Void> runAsync(Runnable runnable) {
CompletableFuture<Void> future = new CompletableFuture<>();
this.scheduler.runTaskAsynchronously(plugin, () -> {
runnable.run();
future.complete(null);
});
return future;
}
@Override
public WrappedTask runLater(Runnable runnable, long delay, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler. | runTaskLater(plugin, runnable, TimeConverter.toTicks(delay, unit))
); |
}
@Override
public WrappedTask runLaterAsync(Runnable runnable, long delay, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler.runTaskLaterAsynchronously(plugin, runnable, TimeConverter.toTicks(delay, unit))
);
}
@Override
public WrappedTask runTimer(Runnable runnable, long delay, long period, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler.runTaskTimer(
plugin, runnable,
TimeConverter.toTicks(delay, unit),
TimeConverter.toTicks(period, unit))
);
}
@Override
public WrappedTask runTimerAsync(Runnable runnable, long delay, long period, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler.runTaskTimerAsynchronously(
plugin, runnable,
TimeConverter.toTicks(delay, unit),
TimeConverter.toTicks(period, unit))
);
}
@Override
public CompletableFuture<Void> runAtLocation(Location location, Runnable runnable) {
CompletableFuture<Void> future = new CompletableFuture<>();
this.scheduler.runTask(plugin, () -> {
runnable.run();
future.complete(null);
});
return future;
}
@Override
public WrappedTask runAtLocationLater(Location location, Runnable runnable, long delay, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler.runTaskLater(plugin, runnable, TimeConverter.toTicks(delay, unit))
);
}
@Override
public WrappedTask runAtLocationTimer(Location location, Runnable runnable, long delay, long period, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler.runTaskTimer(
plugin, runnable,
TimeConverter.toTicks(delay, unit),
TimeConverter.toTicks(period, unit))
);
}
@Override
public CompletableFuture<EntityTaskResult> runAtEntity(Entity entity, Runnable runnable) {
CompletableFuture<EntityTaskResult> future = new CompletableFuture<>();
this.scheduler.runTask(plugin, () -> {
runnable.run();
future.complete(EntityTaskResult.SUCCESS);
});
return future;
}
@Override
public CompletableFuture<EntityTaskResult> runAtEntityWithFallback(Entity entity, Runnable runnable, Runnable fallback) {
CompletableFuture<EntityTaskResult> future = new CompletableFuture<>();
this.scheduler.runTask(plugin, () -> {
if (entity.isValid()) {
runnable.run();
future.complete(EntityTaskResult.SUCCESS);
} else {
fallback.run();
future.complete(EntityTaskResult.ENTITY_RETIRED);
}
});
return future;
}
@Override
public WrappedTask runAtEntityLater(Entity entity, Runnable runnable, long delay, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler.runTaskLater(plugin, runnable, TimeConverter.toTicks(delay, unit))
);
}
@Override
public WrappedTask runAtEntityTimer(Entity entity, Runnable runnable, long delay, long period, TimeUnit unit) {
return new WrappedBukkitTask(
this.scheduler.runTaskTimer(
plugin, runnable,
TimeConverter.toTicks(delay, unit),
TimeConverter.toTicks(period, unit))
);
}
@Override
public void cancelTask(WrappedTask task) {
task.cancel();
}
@Override
public void cancelAllTasks() {
this.scheduler.cancelTasks(plugin);
}
@Override
public Player getPlayer(String name) {
// Already on the main thread
if (this.plugin.getServer().isPrimaryThread()) {
return this.plugin.getServer().getPlayer(name);
}
// Not on the main thread, we need to wait until the next tick
else {
try {
return this.scheduler.callSyncMethod(plugin, () -> this.plugin.getServer().getPlayer(name)).get();
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
}
// Fallback to null
return null;
}
@Override
public Player getPlayerExact(String name) {
// Already on the main thread
if (this.plugin.getServer().isPrimaryThread()) {
return this.plugin.getServer().getPlayerExact(name);
}
// Not on the main thread, we need to wait until the next tick
else {
try {
return this.scheduler.callSyncMethod(plugin, () -> this.plugin.getServer().getPlayerExact(name)).get();
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
}
// Fallback to null
return null;
}
@SuppressWarnings("DuplicatedCode")
@Override
public Player getPlayer(UUID uuid) {
// Already on the main thread
if (this.plugin.getServer().isPrimaryThread()) {
return this.plugin.getServer().getPlayer(uuid);
}
// Not on the main thread, we need to wait until the next tick
else {
try {
return this.scheduler.callSyncMethod(plugin, () -> this.plugin.getServer().getPlayer(uuid)).get();
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
}
// Fallback to null
return null;
}
@Override
public CompletableFuture<Boolean> teleportAsync(Player player, Location location) {
CompletableFuture<Boolean> future = new CompletableFuture<>();
this.runAtEntity(player, () -> {
if (player.isValid() && player.isOnline()) {
player.teleport(location);
future.complete(true);
} else {
future.complete(false);
}
});
return future;
}
}
| src/main/java/com/tcoded/folialib/impl/SpigotImplementation.java | TechnicallyCoded-FoliaLib-8f9f24f | [
{
"filename": "src/main/java/com/tcoded/folialib/impl/FoliaImplementation.java",
"retrieved_chunk": " future.complete(null);\n });\n return future;\n }\n @Override\n public WrappedTask runLater(Runnable runnable, long delay, TimeUnit unit) {\n return new WrappedFoliaTask(\n this.globalRegionScheduler.runDelayed(\n plugin, task -> runnable.run(), TimeConverter.toTicks(delay, unit)\n )",
"score": 27.759331504266086
},
{
"filename": "src/main/java/com/tcoded/folialib/impl/FoliaImplementation.java",
"retrieved_chunk": " public CompletableFuture<Void> runAtLocation(Location location, Runnable runnable) {\n CompletableFuture<Void> future = new CompletableFuture<>();\n this.plugin.getServer().getRegionScheduler().execute(plugin, location, () -> {\n runnable.run();\n future.complete(null);\n });\n return future;\n }\n @Override\n public WrappedTask runAtLocationLater(Location location, Runnable runnable, long delay, TimeUnit unit) {",
"score": 18.21757253642042
},
{
"filename": "src/main/java/com/tcoded/folialib/impl/FoliaImplementation.java",
"retrieved_chunk": " future.complete(EntityTaskResult.SCHEDULER_RETIRED);\n }\n return future;\n }\n @Override\n public WrappedTask runAtEntityLater(Entity entity, Runnable runnable, long delay, TimeUnit unit) {\n return new WrappedFoliaTask(\n entity.getScheduler().runDelayed(\n plugin,\n task -> runnable.run(),",
"score": 17.088636648418532
},
{
"filename": "src/main/java/com/tcoded/folialib/impl/FoliaImplementation.java",
"retrieved_chunk": " null,\n TimeConverter.toTicks(delay, unit)\n )\n );\n }\n @Override\n public WrappedTask runAtEntityTimer(Entity entity, Runnable runnable, long delay, long period, TimeUnit unit) {\n return new WrappedFoliaTask(\n entity.getScheduler().runAtFixedRate(\n plugin,",
"score": 16.892113261453336
},
{
"filename": "src/main/java/com/tcoded/folialib/impl/FoliaImplementation.java",
"retrieved_chunk": " runnable.run();\n future.complete(null);\n });\n return future;\n }\n @Override\n public CompletableFuture<Void> runAsync(Runnable runnable) {\n CompletableFuture<Void> future = new CompletableFuture<>();\n this.asyncScheduler.runNow(plugin, task -> {\n runnable.run();",
"score": 16.1093245271763
}
] | java | runTaskLater(plugin, runnable, TimeConverter.toTicks(delay, unit))
); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.Optional;
import java.util.zip.GZIPInputStream;
import org.apache.commons.codec.binary.Base32;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
import org.netpreserve.jwarc.MediaType;
import org.netpreserve.jwarc.WarcPayload;
import org.netpreserve.jwarc.WarcReader;
import org.netpreserve.jwarc.WarcRecord;
import org.netpreserve.jwarc.WarcResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.tallison.cc.index.io.TargetPathRewriter;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.exception.TikaException;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.pipes.FetchEmitTuple;
import org.apache.tika.pipes.emitter.EmitKey;
import org.apache.tika.pipes.emitter.StreamEmitter;
import org.apache.tika.pipes.fetcher.FetchKey;
import org.apache.tika.pipes.fetcher.RangeFetcher;
public class FileFromCCWarcExtractor {
private static Logger LOGGER =
LoggerFactory.getLogger(FileFromCCWarcExtractor.class);
private static Logger EXTRACTED_LOGGER = LoggerFactory.getLogger("extracted-urls");
private static Logger EXTRACTED_ALL_LOGGER = LoggerFactory.getLogger("extracted-urls-all");
private final StreamEmitter emitter;
private final TargetPathRewriter targetPathRewriter;
private RangeFetcher fetcher;
private final boolean extractTruncated;
private Base32 base32 = new Base32();
private final CCIndexReaderCounter ccIndexReaderCounter;
public FileFromCCWarcExtractor(ExtractorConfig fetcherConfig,
CCIndexReaderCounter ccIndexReaderCounter) throws TikaConfigException {
this.emitter = fetcherConfig.newEmitter();
this.fetcher = (RangeFetcher) fetcherConfig.newFetcher();
this.targetPathRewriter = fetcherConfig.getTargetPathRewriter();
this.extractTruncated = fetcherConfig.isExtractTruncated();
this.ccIndexReaderCounter = ccIndexReaderCounter;
}
public void fetchToPath(CCIndexRecord record) throws InterruptedException {
LOGGER.debug("going to fetch {} {}->{}", record.getFilename(), record.getOffset(),
record.getLength());
FetchEmitTuple t = new FetchEmitTuple(record.getFilename(),
new FetchKey("", record.getFilename(), record.getOffset(),
| record.getOffset() + record.getLength() - 1), new EmitKey()); |
byte[] warcRecordGZBytes;
try {
warcRecordGZBytes = fetchWarcBytes(t);
} catch (TikaException | IOException e) {
LOGGER.warn("couldn't get bytes from cc's warc " + t, e);
return;
}
String id = record.getUrl();
try {
parseWarc(id, record, warcRecordGZBytes);
} catch (IOException e) {
LOGGER.warn("problem parsing warc file", e);
}
}
private void fetchPayload(String id, CCIndexRecord ccIndexRecord, WarcRecord record)
throws IOException {
if (!((record instanceof WarcResponse) &&
record.contentType().base().equals(MediaType.HTTP))) {
return;
}
Optional<WarcPayload> payload = ((WarcResponse) record).payload();
if (!payload.isPresent()) {
LOGGER.debug("no payload {}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
if (payload.get().body().size() == 0) {
LOGGER.debug("empty payload id={}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
Path tmp = Files.createTempFile("ccfile-fetcher-", "");
try {
Files.copy(payload.get().body().stream(), tmp, StandardCopyOption.REPLACE_EXISTING);
String targetDigest = null;
String base32Sha1 = "";
try (InputStream is = Files.newInputStream(tmp)) {
base32Sha1 = base32.encodeAsString(DigestUtils.sha1(is));
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
if (!base32Sha1.equals(ccIndexRecord.getDigest())) {
LOGGER.warn("Bad digest for url={} ccindex={} sha1={}", id,
ccIndexRecord.getDigest(), base32Sha1);
}
//TODO: make digest and encoding configurable
try (InputStream is = Files.newInputStream(tmp)) {
targetDigest = DigestUtils.sha256Hex(is);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
long length = -1;
try {
length = Files.size(tmp);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
String targetPath = targetPathRewriter.rewrite(targetDigest);
Metadata metadata = new Metadata();
try (InputStream is = TikaInputStream.get(tmp, metadata)) {
emitter.emit(targetPath, is, new Metadata());
logSuccess(ccIndexRecord, targetDigest, length, targetPath);
} catch (IOException | TikaException e) {
LOGGER.warn("problem writing id={}", id, e);
}
} finally {
try {
Files.delete(tmp);
} catch (IOException e) {
LOGGER.warn("can't delete " + tmp.toAbsolutePath(), e);
}
}
}
private void logSuccess(CCIndexRecord ccIndexRecord, String targetDigest, long length,
String targetPath) {
if (extractTruncated) {
EXTRACTED_ALL_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
ccIndexRecord.getTruncated(), targetDigest, length,
targetPath);
} else {
//new ObjectArray ?
//url,mime_detected,warc_file,warc_offset,warc_length,sha256,length,path
EXTRACTED_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
targetDigest, length,
targetPath);
}
}
private void parseWarc(String id, CCIndexRecord ccIndexRecord, byte[] warcRecordGZBytes)
throws IOException {
//need to leave initial inputstream open while parsing warcrecord
//can't just parse record and return
try (InputStream is = new GZIPInputStream(new ByteArrayInputStream(warcRecordGZBytes))) {
try (WarcReader warcreader = new WarcReader(is)) {
//should be a single warc per file
//return the first
for (WarcRecord warcRecord : warcreader) {
fetchPayload(id, ccIndexRecord, warcRecord);
return;
}
}
}
}
private byte[] fetchWarcBytes(FetchEmitTuple t)
throws TikaException, InterruptedException, IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
FetchKey k = t.getFetchKey();
try (InputStream is = fetcher.fetch(k.getFetchKey(), k.getRangeStart(), k.getRangeEnd(),
new Metadata())) {
IOUtils.copy(is, bos);
}
return bos.toByteArray();
}
}
| src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/selector/RecordSelector.java",
"retrieved_chunk": " return record.getHost();\n case \"digest\":\n return record.getDigest();\n default:\n throw new IllegalArgumentException(\"Don't yet support key \" + key);\n }\n }\n private static class AcceptAllRecords extends RecordSelector {\n @Override\n public boolean select(CCIndexRecord record) {",
"score": 46.772268573596925
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " LOGGER.info(\"processed: {}\", counter);\n }\n if (fetcherConfig.getMaxRecords() > -1 && totalRead >= fetcherConfig.getMaxRecords()) {\n LOGGER.info(\"hit max read\");\n return false;\n }\n //check for hit max\n //return false;\n Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);\n if (record.isEmpty()) {",
"score": 46.51377701327363
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " String url = r.getUrl();\n TRUNCATED_URLS_LOGGER.info(\"\", url);\n //url,mime,mime_detected,warc_file,warc_offset,warc_length,truncated\n TRUNCATED_URLS_FULL_LOGGER.info(\"\", url,\n r.getNormalizedMime(), r.getNormalizedMimeDetected(), r.getFilename(),\n r.getOffset(), r.getLength(), r.getTruncated());\n return true;\n }\n }\n private void fetchBytes(CCIndexRecord r) throws InterruptedException {",
"score": 44.773592980835105
},
{
"filename": "src/test/java/org/tallison/cc/index/selector/IndexRecordSelectorTest.java",
"retrieved_chunk": " public void testIndexFile() throws Exception {\n Path p = Paths.get(\"/Users/allison/data/cc/index-work/cdx-00000.gz\");\n try (BufferedReader r = new BufferedReader(\n new InputStreamReader(new GZIPInputStream(Files.newInputStream(p)),\n StandardCharsets.UTF_8))) {\n String line = r.readLine();\n while (line != null) {\n Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(line);\n if (record.isPresent()) {\n CCIndexRecord indexRecord = record.get();",
"score": 43.63909877712323
},
{
"filename": "src/main/java/org/tallison/cc/index/selector/RecordSelector.java",
"retrieved_chunk": " return record.getMimeDetected();\n case \"truncated\":\n return record.getTruncated();\n case \"mime\":\n return record.getMime();\n case \"status\":\n return Integer.toString(record.getStatus());\n case \"url\":\n return record.getUrl();\n case \"host\":",
"score": 42.877837892921825
}
] | java | record.getOffset() + record.getLength() - 1), new EmitKey()); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.Optional;
import java.util.zip.GZIPInputStream;
import org.apache.commons.codec.binary.Base32;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
import org.netpreserve.jwarc.MediaType;
import org.netpreserve.jwarc.WarcPayload;
import org.netpreserve.jwarc.WarcReader;
import org.netpreserve.jwarc.WarcRecord;
import org.netpreserve.jwarc.WarcResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.tallison.cc.index.io.TargetPathRewriter;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.exception.TikaException;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.pipes.FetchEmitTuple;
import org.apache.tika.pipes.emitter.EmitKey;
import org.apache.tika.pipes.emitter.StreamEmitter;
import org.apache.tika.pipes.fetcher.FetchKey;
import org.apache.tika.pipes.fetcher.RangeFetcher;
public class FileFromCCWarcExtractor {
private static Logger LOGGER =
LoggerFactory.getLogger(FileFromCCWarcExtractor.class);
private static Logger EXTRACTED_LOGGER = LoggerFactory.getLogger("extracted-urls");
private static Logger EXTRACTED_ALL_LOGGER = LoggerFactory.getLogger("extracted-urls-all");
private final StreamEmitter emitter;
private final TargetPathRewriter targetPathRewriter;
private RangeFetcher fetcher;
private final boolean extractTruncated;
private Base32 base32 = new Base32();
private final CCIndexReaderCounter ccIndexReaderCounter;
public FileFromCCWarcExtractor(ExtractorConfig fetcherConfig,
CCIndexReaderCounter ccIndexReaderCounter) throws TikaConfigException {
this.emitter = fetcherConfig.newEmitter();
this.fetcher = (RangeFetcher) fetcherConfig.newFetcher();
this.targetPathRewriter = fetcherConfig.getTargetPathRewriter();
this.extractTruncated = fetcherConfig.isExtractTruncated();
this.ccIndexReaderCounter = ccIndexReaderCounter;
}
public void fetchToPath(CCIndexRecord record) throws InterruptedException {
LOGGER.debug("going to fetch {} {}->{}", record.getFilename(), record.getOffset(),
record.getLength());
FetchEmitTuple t = new FetchEmitTuple(record.getFilename(),
new FetchKey("", record.getFilename(), record.getOffset(),
record.getOffset() + record.getLength() - 1), new EmitKey());
byte[] warcRecordGZBytes;
try {
warcRecordGZBytes = fetchWarcBytes(t);
} catch (TikaException | IOException e) {
LOGGER.warn("couldn't get bytes from cc's warc " + t, e);
return;
}
String id = record.getUrl();
try {
parseWarc(id, record, warcRecordGZBytes);
} catch (IOException e) {
LOGGER.warn("problem parsing warc file", e);
}
}
private void fetchPayload(String id, CCIndexRecord ccIndexRecord, WarcRecord record)
throws IOException {
if (!((record instanceof WarcResponse) &&
record.contentType().base().equals(MediaType.HTTP))) {
return;
}
Optional<WarcPayload> payload = ((WarcResponse) record).payload();
if (!payload.isPresent()) {
LOGGER.debug("no payload {}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
if (payload.get().body().size() == 0) {
LOGGER.debug("empty payload id={}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
Path tmp = Files.createTempFile("ccfile-fetcher-", "");
try {
Files.copy(payload.get().body().stream(), tmp, StandardCopyOption.REPLACE_EXISTING);
String targetDigest = null;
String base32Sha1 = "";
try (InputStream is = Files.newInputStream(tmp)) {
base32Sha1 = base32.encodeAsString(DigestUtils.sha1(is));
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
if (!base32Sha1 | .equals(ccIndexRecord.getDigest())) { |
LOGGER.warn("Bad digest for url={} ccindex={} sha1={}", id,
ccIndexRecord.getDigest(), base32Sha1);
}
//TODO: make digest and encoding configurable
try (InputStream is = Files.newInputStream(tmp)) {
targetDigest = DigestUtils.sha256Hex(is);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
long length = -1;
try {
length = Files.size(tmp);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
String targetPath = targetPathRewriter.rewrite(targetDigest);
Metadata metadata = new Metadata();
try (InputStream is = TikaInputStream.get(tmp, metadata)) {
emitter.emit(targetPath, is, new Metadata());
logSuccess(ccIndexRecord, targetDigest, length, targetPath);
} catch (IOException | TikaException e) {
LOGGER.warn("problem writing id={}", id, e);
}
} finally {
try {
Files.delete(tmp);
} catch (IOException e) {
LOGGER.warn("can't delete " + tmp.toAbsolutePath(), e);
}
}
}
private void logSuccess(CCIndexRecord ccIndexRecord, String targetDigest, long length,
String targetPath) {
if (extractTruncated) {
EXTRACTED_ALL_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
ccIndexRecord.getTruncated(), targetDigest, length,
targetPath);
} else {
//new ObjectArray ?
//url,mime_detected,warc_file,warc_offset,warc_length,sha256,length,path
EXTRACTED_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
targetDigest, length,
targetPath);
}
}
private void parseWarc(String id, CCIndexRecord ccIndexRecord, byte[] warcRecordGZBytes)
throws IOException {
//need to leave initial inputstream open while parsing warcrecord
//can't just parse record and return
try (InputStream is = new GZIPInputStream(new ByteArrayInputStream(warcRecordGZBytes))) {
try (WarcReader warcreader = new WarcReader(is)) {
//should be a single warc per file
//return the first
for (WarcRecord warcRecord : warcreader) {
fetchPayload(id, ccIndexRecord, warcRecord);
return;
}
}
}
}
private byte[] fetchWarcBytes(FetchEmitTuple t)
throws TikaException, InterruptedException, IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
FetchKey k = t.getFetchKey();
try (InputStream is = fetcher.fetch(k.getFetchKey(), k.getRangeStart(), k.getRangeEnd(),
new Metadata())) {
IOUtils.copy(is, bos);
}
return bos.toByteArray();
}
}
| src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/io/BackoffHttpFetcher.java",
"retrieved_chunk": " private InputStream fetchWithBackOff(FetchKey fetchKey, Metadata metadata) throws IOException {\n int tries = 0;\n while (tries < throttleSeconds.length) {\n try {\n return _fetch(fetchKey, metadata);\n } catch (IOException e) {\n if (e.getMessage() == null) {\n throw e;\n }\n Matcher m = Pattern.compile(\"bad status code: (\\\\d+)\").matcher(e.getMessage());",
"score": 19.183217304116436
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractor.java",
"retrieved_chunk": " return shouldContinue;\n }\n } catch (IOException e) {\n LOGGER.warn(\"bad json: \" + line);\n }\n lines++;\n line = reader.readLine();\n }\n }\n }",
"score": 18.08084521696998
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " boolean shouldContinue = recordProcessor.process(line);\n if (!shouldContinue) {\n return shouldContinue;\n }\n } catch (IOException e) {\n LOGGER.warn(\"bad json: \" + line);\n }\n lines++;\n line = reader.readLine();\n }",
"score": 17.38748873258613
},
{
"filename": "src/main/java/org/tallison/cc/index/IndexIterator.java",
"retrieved_chunk": " throws IOException, TikaException {\n try (InputStream is = fetcher.fetch(path, new Metadata())) {\n try (BufferedReader reader = getReader(is, path)) {\n String line = reader.readLine();\n while (line != null) {\n if (line.startsWith(\"#\") || !line.endsWith(\".gz\")) {\n //skip comments and paths for index files that do not end in .gz\n line = reader.readLine();\n continue;\n }",
"score": 17.00497915985262
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractor.java",
"retrieved_chunk": " } catch (TikaConfigException | IOException e) {\n LOGGER.error(\"main loop exception\", e);\n throw new RuntimeException(e);\n } catch (ExecutionException e) {\n LOGGER.error(\"main loop exception\", e);\n throw new RuntimeException(e);\n } catch (InterruptedException e) {\n LOGGER.warn(\"main loop interrupted exception\", e);\n throw new RuntimeException(e);\n } finally {",
"score": 16.721295267265006
}
] | java | .equals(ccIndexRecord.getDigest())) { |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.Optional;
import java.util.zip.GZIPInputStream;
import org.apache.commons.codec.binary.Base32;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
import org.netpreserve.jwarc.MediaType;
import org.netpreserve.jwarc.WarcPayload;
import org.netpreserve.jwarc.WarcReader;
import org.netpreserve.jwarc.WarcRecord;
import org.netpreserve.jwarc.WarcResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.tallison.cc.index.io.TargetPathRewriter;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.exception.TikaException;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.pipes.FetchEmitTuple;
import org.apache.tika.pipes.emitter.EmitKey;
import org.apache.tika.pipes.emitter.StreamEmitter;
import org.apache.tika.pipes.fetcher.FetchKey;
import org.apache.tika.pipes.fetcher.RangeFetcher;
public class FileFromCCWarcExtractor {
private static Logger LOGGER =
LoggerFactory.getLogger(FileFromCCWarcExtractor.class);
private static Logger EXTRACTED_LOGGER = LoggerFactory.getLogger("extracted-urls");
private static Logger EXTRACTED_ALL_LOGGER = LoggerFactory.getLogger("extracted-urls-all");
private final StreamEmitter emitter;
private final TargetPathRewriter targetPathRewriter;
private RangeFetcher fetcher;
private final boolean extractTruncated;
private Base32 base32 = new Base32();
private final CCIndexReaderCounter ccIndexReaderCounter;
public FileFromCCWarcExtractor(ExtractorConfig fetcherConfig,
CCIndexReaderCounter ccIndexReaderCounter) throws TikaConfigException {
this.emitter = fetcherConfig.newEmitter();
this.fetcher = (RangeFetcher) fetcherConfig.newFetcher();
this.targetPathRewriter = fetcherConfig.getTargetPathRewriter();
this.extractTruncated = fetcherConfig.isExtractTruncated();
this.ccIndexReaderCounter = ccIndexReaderCounter;
}
public void fetchToPath(CCIndexRecord record) throws InterruptedException {
LOGGER.debug("going to fetch {} {}->{}", record.getFilename(), record.getOffset(),
record.getLength());
FetchEmitTuple t = new FetchEmitTuple(record.getFilename(),
new FetchKey("", record.getFilename(), record.getOffset(),
record | .getOffset() + record.getLength() - 1), new EmitKey()); |
byte[] warcRecordGZBytes;
try {
warcRecordGZBytes = fetchWarcBytes(t);
} catch (TikaException | IOException e) {
LOGGER.warn("couldn't get bytes from cc's warc " + t, e);
return;
}
String id = record.getUrl();
try {
parseWarc(id, record, warcRecordGZBytes);
} catch (IOException e) {
LOGGER.warn("problem parsing warc file", e);
}
}
private void fetchPayload(String id, CCIndexRecord ccIndexRecord, WarcRecord record)
throws IOException {
if (!((record instanceof WarcResponse) &&
record.contentType().base().equals(MediaType.HTTP))) {
return;
}
Optional<WarcPayload> payload = ((WarcResponse) record).payload();
if (!payload.isPresent()) {
LOGGER.debug("no payload {}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
if (payload.get().body().size() == 0) {
LOGGER.debug("empty payload id={}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
Path tmp = Files.createTempFile("ccfile-fetcher-", "");
try {
Files.copy(payload.get().body().stream(), tmp, StandardCopyOption.REPLACE_EXISTING);
String targetDigest = null;
String base32Sha1 = "";
try (InputStream is = Files.newInputStream(tmp)) {
base32Sha1 = base32.encodeAsString(DigestUtils.sha1(is));
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
if (!base32Sha1.equals(ccIndexRecord.getDigest())) {
LOGGER.warn("Bad digest for url={} ccindex={} sha1={}", id,
ccIndexRecord.getDigest(), base32Sha1);
}
//TODO: make digest and encoding configurable
try (InputStream is = Files.newInputStream(tmp)) {
targetDigest = DigestUtils.sha256Hex(is);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
long length = -1;
try {
length = Files.size(tmp);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
String targetPath = targetPathRewriter.rewrite(targetDigest);
Metadata metadata = new Metadata();
try (InputStream is = TikaInputStream.get(tmp, metadata)) {
emitter.emit(targetPath, is, new Metadata());
logSuccess(ccIndexRecord, targetDigest, length, targetPath);
} catch (IOException | TikaException e) {
LOGGER.warn("problem writing id={}", id, e);
}
} finally {
try {
Files.delete(tmp);
} catch (IOException e) {
LOGGER.warn("can't delete " + tmp.toAbsolutePath(), e);
}
}
}
private void logSuccess(CCIndexRecord ccIndexRecord, String targetDigest, long length,
String targetPath) {
if (extractTruncated) {
EXTRACTED_ALL_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
ccIndexRecord.getTruncated(), targetDigest, length,
targetPath);
} else {
//new ObjectArray ?
//url,mime_detected,warc_file,warc_offset,warc_length,sha256,length,path
EXTRACTED_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
targetDigest, length,
targetPath);
}
}
private void parseWarc(String id, CCIndexRecord ccIndexRecord, byte[] warcRecordGZBytes)
throws IOException {
//need to leave initial inputstream open while parsing warcrecord
//can't just parse record and return
try (InputStream is = new GZIPInputStream(new ByteArrayInputStream(warcRecordGZBytes))) {
try (WarcReader warcreader = new WarcReader(is)) {
//should be a single warc per file
//return the first
for (WarcRecord warcRecord : warcreader) {
fetchPayload(id, ccIndexRecord, warcRecord);
return;
}
}
}
}
private byte[] fetchWarcBytes(FetchEmitTuple t)
throws TikaException, InterruptedException, IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
FetchKey k = t.getFetchKey();
try (InputStream is = fetcher.fetch(k.getFetchKey(), k.getRangeStart(), k.getRangeEnd(),
new Metadata())) {
IOUtils.copy(is, bos);
}
return bos.toByteArray();
}
}
| src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/selector/RecordSelector.java",
"retrieved_chunk": " return record.getHost();\n case \"digest\":\n return record.getDigest();\n default:\n throw new IllegalArgumentException(\"Don't yet support key \" + key);\n }\n }\n private static class AcceptAllRecords extends RecordSelector {\n @Override\n public boolean select(CCIndexRecord record) {",
"score": 46.772268573596925
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " String url = r.getUrl();\n TRUNCATED_URLS_LOGGER.info(\"\", url);\n //url,mime,mime_detected,warc_file,warc_offset,warc_length,truncated\n TRUNCATED_URLS_FULL_LOGGER.info(\"\", url,\n r.getNormalizedMime(), r.getNormalizedMimeDetected(), r.getFilename(),\n r.getOffset(), r.getLength(), r.getTruncated());\n return true;\n }\n }\n private void fetchBytes(CCIndexRecord r) throws InterruptedException {",
"score": 44.773592980835105
},
{
"filename": "src/test/java/org/tallison/cc/index/selector/IndexRecordSelectorTest.java",
"retrieved_chunk": " public void testIndexFile() throws Exception {\n Path p = Paths.get(\"/Users/allison/data/cc/index-work/cdx-00000.gz\");\n try (BufferedReader r = new BufferedReader(\n new InputStreamReader(new GZIPInputStream(Files.newInputStream(p)),\n StandardCharsets.UTF_8))) {\n String line = r.readLine();\n while (line != null) {\n Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(line);\n if (record.isPresent()) {\n CCIndexRecord indexRecord = record.get();",
"score": 43.63909877712323
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " LOGGER.info(\"processed: {}\", counter);\n }\n if (fetcherConfig.getMaxRecords() > -1 && totalRead >= fetcherConfig.getMaxRecords()) {\n LOGGER.info(\"hit max read\");\n return false;\n }\n //check for hit max\n //return false;\n Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);\n if (record.isEmpty()) {",
"score": 43.400548742744014
},
{
"filename": "src/main/java/org/tallison/cc/index/selector/RecordSelector.java",
"retrieved_chunk": " return record.getMimeDetected();\n case \"truncated\":\n return record.getTruncated();\n case \"mime\":\n return record.getMime();\n case \"status\":\n return Integer.toString(record.getStatus());\n case \"url\":\n return record.getUrl();\n case \"host\":",
"score": 42.877837892921825
}
] | java | .getOffset() + record.getLength() - 1), new EmitKey()); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.Optional;
import java.util.zip.GZIPInputStream;
import org.apache.commons.codec.binary.Base32;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
import org.netpreserve.jwarc.MediaType;
import org.netpreserve.jwarc.WarcPayload;
import org.netpreserve.jwarc.WarcReader;
import org.netpreserve.jwarc.WarcRecord;
import org.netpreserve.jwarc.WarcResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.tallison.cc.index.io.TargetPathRewriter;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.exception.TikaException;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.pipes.FetchEmitTuple;
import org.apache.tika.pipes.emitter.EmitKey;
import org.apache.tika.pipes.emitter.StreamEmitter;
import org.apache.tika.pipes.fetcher.FetchKey;
import org.apache.tika.pipes.fetcher.RangeFetcher;
public class FileFromCCWarcExtractor {
private static Logger LOGGER =
LoggerFactory.getLogger(FileFromCCWarcExtractor.class);
private static Logger EXTRACTED_LOGGER = LoggerFactory.getLogger("extracted-urls");
private static Logger EXTRACTED_ALL_LOGGER = LoggerFactory.getLogger("extracted-urls-all");
private final StreamEmitter emitter;
private final TargetPathRewriter targetPathRewriter;
private RangeFetcher fetcher;
private final boolean extractTruncated;
private Base32 base32 = new Base32();
private final CCIndexReaderCounter ccIndexReaderCounter;
public FileFromCCWarcExtractor(ExtractorConfig fetcherConfig,
CCIndexReaderCounter ccIndexReaderCounter) throws TikaConfigException {
this.emitter = fetcherConfig.newEmitter();
this.fetcher = (RangeFetcher) fetcherConfig.newFetcher();
this.targetPathRewriter = fetcherConfig.getTargetPathRewriter();
this.extractTruncated = fetcherConfig.isExtractTruncated();
this.ccIndexReaderCounter = ccIndexReaderCounter;
}
public void fetchToPath(CCIndexRecord record) throws InterruptedException {
LOGGER.debug("going to fetch {} {}->{}", record.getFilename(), record.getOffset(),
record.getLength());
FetchEmitTuple t = new FetchEmitTuple(record.getFilename(),
new FetchKey("", record.getFilename(), record.getOffset(),
record.getOffset() + record.getLength() - 1), new EmitKey());
byte[] warcRecordGZBytes;
try {
warcRecordGZBytes = fetchWarcBytes(t);
} catch (TikaException | IOException e) {
LOGGER.warn("couldn't get bytes from cc's warc " + t, e);
return;
}
String id = record.getUrl();
try {
parseWarc(id, record, warcRecordGZBytes);
} catch (IOException e) {
LOGGER.warn("problem parsing warc file", e);
}
}
private void fetchPayload(String id, CCIndexRecord ccIndexRecord, WarcRecord record)
throws IOException {
if (!((record instanceof WarcResponse) &&
record.contentType().base().equals(MediaType.HTTP))) {
return;
}
Optional<WarcPayload> payload = ((WarcResponse) record).payload();
if (!payload.isPresent()) {
LOGGER.debug("no payload {}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
if (payload.get().body().size() == 0) {
LOGGER.debug("empty payload id={}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
Path tmp = Files.createTempFile("ccfile-fetcher-", "");
try {
Files.copy(payload.get().body().stream(), tmp, StandardCopyOption.REPLACE_EXISTING);
String targetDigest = null;
String base32Sha1 = "";
try (InputStream is = Files.newInputStream(tmp)) {
base32Sha1 = base32.encodeAsString(DigestUtils.sha1(is));
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
if (!base32Sha1.equals(ccIndexRecord.getDigest())) {
LOGGER.warn("Bad digest for url={} ccindex={} sha1={}", id,
ccIndexRecord.getDigest(), base32Sha1);
}
//TODO: make digest and encoding configurable
try (InputStream is = Files.newInputStream(tmp)) {
targetDigest = DigestUtils.sha256Hex(is);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
long length = -1;
try {
length = Files.size(tmp);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
String targetPath = targetPathRewriter.rewrite(targetDigest);
Metadata metadata = new Metadata();
try (InputStream is = TikaInputStream.get(tmp, metadata)) {
emitter.emit(targetPath, is, new Metadata());
logSuccess(ccIndexRecord, targetDigest, length, targetPath);
} catch (IOException | TikaException e) {
LOGGER.warn("problem writing id={}", id, e);
}
} finally {
try {
Files.delete(tmp);
} catch (IOException e) {
LOGGER.warn("can't delete " + tmp.toAbsolutePath(), e);
}
}
}
private void logSuccess(CCIndexRecord ccIndexRecord, String targetDigest, long length,
String targetPath) {
if (extractTruncated) {
EXTRACTED_ALL_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
| ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
ccIndexRecord.getTruncated(), targetDigest, length,
targetPath); |
} else {
//new ObjectArray ?
//url,mime_detected,warc_file,warc_offset,warc_length,sha256,length,path
EXTRACTED_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
targetDigest, length,
targetPath);
}
}
private void parseWarc(String id, CCIndexRecord ccIndexRecord, byte[] warcRecordGZBytes)
throws IOException {
//need to leave initial inputstream open while parsing warcrecord
//can't just parse record and return
try (InputStream is = new GZIPInputStream(new ByteArrayInputStream(warcRecordGZBytes))) {
try (WarcReader warcreader = new WarcReader(is)) {
//should be a single warc per file
//return the first
for (WarcRecord warcRecord : warcreader) {
fetchPayload(id, ccIndexRecord, warcRecord);
return;
}
}
}
}
private byte[] fetchWarcBytes(FetchEmitTuple t)
throws TikaException, InterruptedException, IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
FetchKey k = t.getFetchKey();
try (InputStream is = fetcher.fetch(k.getFetchKey(), k.getRangeStart(), k.getRangeEnd(),
new Metadata())) {
IOUtils.copy(is, bos);
}
return bos.toByteArray();
}
}
| src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " String url = r.getUrl();\n TRUNCATED_URLS_LOGGER.info(\"\", url);\n //url,mime,mime_detected,warc_file,warc_offset,warc_length,truncated\n TRUNCATED_URLS_FULL_LOGGER.info(\"\", url,\n r.getNormalizedMime(), r.getNormalizedMimeDetected(), r.getFilename(),\n r.getOffset(), r.getLength(), r.getTruncated());\n return true;\n }\n }\n private void fetchBytes(CCIndexRecord r) throws InterruptedException {",
"score": 39.738381940911005
},
{
"filename": "src/main/java/org/tallison/cc/index/CCIndexRecord.java",
"retrieved_chunk": " public void setLength(long length) {\n this.length = length;\n }\n public Integer getOffset() {\n return offset;\n }\n public void setOffset(int offset) {\n this.offset = offset;\n }\n public String getFilename() {",
"score": 29.75122825319761
},
{
"filename": "src/main/java/org/tallison/cc/index/CCIndexRecord.java",
"retrieved_chunk": " }\n public String getDigest() {\n return digest;\n }\n public void setDigest(String digest) {\n this.digest = digest;\n }\n public Long getLength() {\n return length;\n }",
"score": 18.069619352838995
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " if (!fetcherConfig.getRecordSelector().select(r)) {\n return true;\n }\n increment(totalCounts, r.getNormalizedMimeDetected());\n if (!StringUtils.isBlank(r.getTruncated())) {\n long truncated = counter.getTruncated().incrementAndGet();\n if (fetcherConfig.getMaxFilesTruncated() > -1 &&\n truncated >= fetcherConfig.getMaxFilesTruncated()) {\n LOGGER.info(\"hit max truncated files\");\n return false;",
"score": 15.318731813688501
},
{
"filename": "src/main/java/org/tallison/cc/index/CCIndexRecord.java",
"retrieved_chunk": " return \"bytes=\" + offset + \"-\" + (offset + length - 1);\n }\n @Override\n public String toString() {\n return \"CCIndexRecord{\" + \"url='\" + url + '\\'' + \", mime='\" + mime + '\\'' +\n \", mimeDetected='\" + mimeDetected + '\\'' + \", status=\" + status + \", digest='\" +\n digest + '\\'' + \", length=\" + length + \", offset=\" + offset + \", filename='\" +\n filename + '\\'' + \", charset='\" + charset + '\\'' + \", languages='\" + languages +\n '\\'' + \", truncated='\" + truncated + '\\'' + \", redirect='\" + redirect + '\\'' + '}';\n }",
"score": 14.869806914180984
}
] | java | ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
ccIndexRecord.getTruncated(), targetDigest, length,
targetPath); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.zip.GZIPInputStream;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import org.apache.commons.lang3.mutable.MutableLong;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.AbstractRecordProcessor;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.tallison.cc.index.IndexIterator;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.exception.TikaException;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.pipes.FetchEmitTuple;
import org.apache.tika.pipes.fetcher.Fetcher;
import org.apache.tika.pipes.pipesiterator.CallablePipesIterator;
import org.apache.tika.pipes.pipesiterator.PipesIterator;
import org.apache.tika.utils.StringUtils;
/**
* This counts mime_detected. Use a regular file selector to include
* only urls that had a 200, e.g.
*/
public class CCMimeCounter {
private static final Long INDEX_WORKER_ID = 1l;
private static final Long INDEX_READER_ID = 2l;
private static final Logger LOGGER = LoggerFactory.getLogger(CCMimeCounter.class);
public static void main(String[] args) throws Exception {
ExtractorConfig fetcherConfig =
new ObjectMapper().readValue(new File(args[0]), ExtractorConfig.class);
execute(fetcherConfig);
}
private static void execute(ExtractorConfig fetcherConfig) throws IOException, TikaException {
ArrayBlockingQueue<FetchEmitTuple> indexPathsList = new ArrayBlockingQueue<>(1000);
//IndexPathsReader reads a file containing a list of cc-index.paths files
//and writes the literal gz files (cc-index/collections/CC-MAIN-2023-06/indexes/cdx-00000.gz)
//to indexPathsList
//IndexWorker reads a single index.gz file at a time and processes each record
//It fetches non truncated files and logs truncated files
int totalThreads = fetcherConfig.getNumThreads() + 1;
ExecutorService executorService = Executors.newFixedThreadPool(totalThreads);
ExecutorCompletionService<Long> executorCompletionService =
new ExecutorCompletionService<>(executorService);
IndexIterator indexIterator = fetcherConfig.getIndexIterator();
indexIterator.initialize(Collections.EMPTY_MAP);
executorCompletionService.submit(new CallablePipesIterator(indexIterator, indexPathsList));
CCIndexReaderCounter counter = new CCIndexReaderCounter();
int finishedWorkers = 0;
List<DetectedMimeCounter> detectedMimeCounters = new ArrayList<>();
try {
for (int i = 0; i < fetcherConfig.getNumThreads(); i++) {
DetectedMimeCounter processor = new DetectedMimeCounter(fetcherConfig, counter);
detectedMimeCounters.add(processor);
executorCompletionService.submit(
new IndexWorker(fetcherConfig, indexPathsList, processor));
}
while (finishedWorkers < totalThreads) {
//blocking
Future<Long> future = executorCompletionService.take();
if (future != null) {
Long f = future.get();
LOGGER.debug("completed {}", f);
if (f.equals(INDEX_WORKER_ID)) {
finishedWorkers++;
} else if (f.equals(INDEX_READER_ID)) {
LOGGER.info("Index paths reader successfully completed");
}
}
}
} catch (TikaConfigException e) {
LOGGER.error("main loop exception", e);
throw new RuntimeException(e);
} catch (ExecutionException e) {
LOGGER.error("main loop exception", e);
throw new RuntimeException(e);
} catch (InterruptedException e) {
LOGGER.warn("main loop interrupted exception", e);
throw new RuntimeException(e);
} finally {
executorService.shutdown();
executorService.shutdownNow();
}
summarize(detectedMimeCounters);
}
private static void summarize(List<DetectedMimeCounter> detectedMimeCounters)
throws IOException {
Map<String, Long> total = new HashMap<>();
Map<String, Long> truncated = new HashMap<>();
Map<String, Long> nonTruncated = new HashMap<>();
for (DetectedMimeCounter c : detectedMimeCounters) {
update(c.totalCounts, total);
update(c.truncatedCounts, truncated);
}
calcNonTruncated(truncated, total, nonTruncated);
report("total", total);
report("truncated", truncated);
report("non-truncated", nonTruncated);
}
private static void calcNonTruncated(Map<String, Long> truncated, Map<String, Long> total,
Map<String, Long> nonTruncated) {
for (Map.Entry<String, Long> e : total.entrySet()) {
Long val = e.getValue();
Long t = truncated.getOrDefault(e.getKey(), 0l);
val -= t;
nonTruncated.put(e.getKey(), val);
}
}
private static void report(String name, Map<String, Long> m) throws IOException {
try (BufferedWriter writer = Files.newBufferedWriter(Paths.get(name + ".csv"),
StandardCharsets.UTF_8)) {
try (CSVPrinter printer = new CSVPrinter(writer, CSVFormat.EXCEL)) {
printer.printRecord("mime", "count");
m.entrySet().stream().sorted(Collections.reverseOrder(Map.Entry.comparingByValue()))
.forEach(e -> {
try {
printer.printRecord(e.getKey(), e.getValue());
} catch (IOException ex) {
throw new RuntimeException(ex);
}
});
}
}
}
private static void update(Map<String, MutableLong> from, Map<String, Long> to) {
for (Map.Entry<String, MutableLong> e : from.entrySet()) {
Long cnt = to.get(e.getKey());
if (cnt == null) {
cnt = 0l;
}
cnt += e.getValue().getValue();
to.put(e.getKey(), cnt);
}
}
private static class IndexWorker implements Callable<Long> {
private final ArrayBlockingQueue<FetchEmitTuple> indexUrls;
private final AbstractRecordProcessor recordProcessor;
private final Fetcher fetcher;
IndexWorker(ExtractorConfig fetcherConfig, ArrayBlockingQueue<FetchEmitTuple> indexUrls,
AbstractRecordProcessor recordProcessor) throws TikaException {
this.indexUrls = indexUrls;
this.recordProcessor = recordProcessor;
this.fetcher = fetcherConfig.newFetcher();
}
@Override
public Long call() throws Exception {
boolean shouldContinue = true;
while (shouldContinue) {
FetchEmitTuple indexUrl = indexUrls.poll(120, TimeUnit.MINUTES);
if (indexUrl == null) {
throw new TimeoutException("waited 120 minutes for a new record");
}
if (indexUrl == PipesIterator.COMPLETED_SEMAPHORE) {
recordProcessor.close();
//can hang forever
indexUrls.put(PipesIterator.COMPLETED_SEMAPHORE);
return INDEX_WORKER_ID;
}
shouldContinue = processFile(indexUrl, recordProcessor);
}
return INDEX_WORKER_ID;
}
private boolean processFile(FetchEmitTuple fetchEmitTuple,
AbstractRecordProcessor recordProcessor)
throws InterruptedException {
long start = System.currentTimeMillis();
LOGGER.info("starting to fetch index gz: {}",
fetchEmitTuple.getFetchKey().getFetchKey());
try (TikaInputStream tis = (TikaInputStream) fetcher.fetch(
fetchEmitTuple.getFetchKey().getFetchKey(), new Metadata())) {
try (InputStream is = new BufferedInputStream(new GZIPInputStream(tis))) {
try (BufferedReader reader = new BufferedReader(
new InputStreamReader(is, StandardCharsets.UTF_8))) {
String line = reader.readLine();
int lines = 0;
long elapsed = System.currentTimeMillis() - start;
LOGGER.info("Finished fetching {} bytes in {} ms for index gz: {}",
String.format(Locale.US, "%,d", tis.getLength()),
String.format(Locale.US, "%,d", elapsed),
fetchEmitTuple.getFetchKey().getFetchKey());
while (line != null) {
LOGGER.trace("about to add a line");
if (StringUtils.isBlank(line)) {
line = reader.readLine();
continue;
}
try {
boolean shouldContinue = recordProcessor.process(line);
if (!shouldContinue) {
return shouldContinue;
}
} catch (IOException e) {
LOGGER.warn("bad json: " + line);
}
lines++;
line = reader.readLine();
}
}
}
} catch (TikaException | IOException e) {
LOGGER.error(
"failed while processing " + fetchEmitTuple.getFetchKey().getFetchKey(), e);
}
long elapsed = System.currentTimeMillis() - start;
LOGGER.info("finished processing index gz in ({}) ms: {}",
String.format(Locale.US, "%,d", elapsed),
fetchEmitTuple.getFetchKey().getFetchKey());
return true;
}
}
private static class DetectedMimeCounter extends AbstractRecordProcessor {
private final ExtractorConfig fetcherConfig;
private final CCIndexReaderCounter counter;
private final Map<String, MutableLong> totalCounts = new HashMap<>();
private final Map<String, MutableLong> truncatedCounts = new HashMap<>();
public DetectedMimeCounter(ExtractorConfig fetcherConfig, CCIndexReaderCounter counter) {
this.fetcherConfig = fetcherConfig;
this.counter = counter;
}
@Override
public boolean process(String json) throws IOException, InterruptedException {
long totalRead = counter.getRecordsRead().incrementAndGet();
if (totalRead % 1000000 == 0) {
LOGGER.info("processed: {}", counter);
}
if (fetcherConfig.getMaxRecords() > -1 && totalRead >= fetcherConfig.getMaxRecords()) {
LOGGER.info("hit max read");
return false;
}
//check for hit max
//return false;
Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);
if (record.isEmpty()) {
//problem already logged
return true;
}
CCIndexRecord r = record.get();
if (!fetcherConfig.getRecordSelector().select(r)) {
return true;
}
increment( | totalCounts, r.getNormalizedMimeDetected()); |
if (!StringUtils.isBlank(r.getTruncated())) {
long truncated = counter.getTruncated().incrementAndGet();
if (fetcherConfig.getMaxFilesTruncated() > -1 &&
truncated >= fetcherConfig.getMaxFilesTruncated()) {
LOGGER.info("hit max truncated files");
return false;
}
increment(truncatedCounts, r.getNormalizedMimeDetected());
return true;
}
return true;
}
private void increment(Map<String, MutableLong> m, String k) {
MutableLong cnt = m.get(k);
if (cnt == null) {
cnt = new MutableLong(1);
m.put(k, cnt);
return;
} else {
cnt.increment();
}
}
@Override
public void close() throws IOException {
}
}
}
| src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " //problem already logged\n return true;\n }\n CCIndexRecord r = record.get();\n if (!fetcherConfig.getRecordSelector().select(r)) {\n return true;\n }\n //if truncated, count appropriately and test for limits\n if (!StringUtils.isBlank(r.getTruncated())) {\n long truncated = counter.getTruncated().incrementAndGet();",
"score": 64.11348142372344
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " String url = r.getUrl();\n TRUNCATED_URLS_LOGGER.info(\"\", url);\n //url,mime,mime_detected,warc_file,warc_offset,warc_length,truncated\n TRUNCATED_URLS_FULL_LOGGER.info(\"\", url,\n r.getNormalizedMime(), r.getNormalizedMimeDetected(), r.getFilename(),\n r.getOffset(), r.getLength(), r.getTruncated());\n return true;\n }\n }\n private void fetchBytes(CCIndexRecord r) throws InterruptedException {",
"score": 32.0877663552642
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " LOGGER.info(\"hit max extracted files\");\n return false;\n }\n if (fetcherConfig.isDryRun()) {\n LOGGER.info(\"dry run, but would have extracted {}\", r);\n return true;\n }\n fetchBytes(r);\n return true;\n } else {",
"score": 29.66191762846441
},
{
"filename": "src/test/java/org/tallison/cc/index/selector/IndexRecordSelectorTest.java",
"retrieved_chunk": " public void testIndexFile() throws Exception {\n Path p = Paths.get(\"/Users/allison/data/cc/index-work/cdx-00000.gz\");\n try (BufferedReader r = new BufferedReader(\n new InputStreamReader(new GZIPInputStream(Files.newInputStream(p)),\n StandardCharsets.UTF_8))) {\n String line = r.readLine();\n while (line != null) {\n Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(line);\n if (record.isPresent()) {\n CCIndexRecord indexRecord = record.get();",
"score": 27.524907419316662
},
{
"filename": "src/main/java/org/tallison/cc/index/selector/AcceptAllRecords.java",
"retrieved_chunk": " public boolean select(CCIndexRecord record) {\n return true;\n }\n}",
"score": 24.51181790679686
}
] | java | totalCounts, r.getNormalizedMimeDetected()); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.Optional;
import java.util.zip.GZIPInputStream;
import org.apache.commons.codec.binary.Base32;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
import org.netpreserve.jwarc.MediaType;
import org.netpreserve.jwarc.WarcPayload;
import org.netpreserve.jwarc.WarcReader;
import org.netpreserve.jwarc.WarcRecord;
import org.netpreserve.jwarc.WarcResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.tallison.cc.index.io.TargetPathRewriter;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.exception.TikaException;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.pipes.FetchEmitTuple;
import org.apache.tika.pipes.emitter.EmitKey;
import org.apache.tika.pipes.emitter.StreamEmitter;
import org.apache.tika.pipes.fetcher.FetchKey;
import org.apache.tika.pipes.fetcher.RangeFetcher;
public class FileFromCCWarcExtractor {
private static Logger LOGGER =
LoggerFactory.getLogger(FileFromCCWarcExtractor.class);
private static Logger EXTRACTED_LOGGER = LoggerFactory.getLogger("extracted-urls");
private static Logger EXTRACTED_ALL_LOGGER = LoggerFactory.getLogger("extracted-urls-all");
private final StreamEmitter emitter;
private final TargetPathRewriter targetPathRewriter;
private RangeFetcher fetcher;
private final boolean extractTruncated;
private Base32 base32 = new Base32();
private final CCIndexReaderCounter ccIndexReaderCounter;
public FileFromCCWarcExtractor(ExtractorConfig fetcherConfig,
CCIndexReaderCounter ccIndexReaderCounter) throws TikaConfigException {
this.emitter = fetcherConfig.newEmitter();
this.fetcher = (RangeFetcher) fetcherConfig.newFetcher();
this.targetPathRewriter = fetcherConfig.getTargetPathRewriter();
this.extractTruncated = fetcherConfig.isExtractTruncated();
this.ccIndexReaderCounter = ccIndexReaderCounter;
}
public void fetchToPath(CCIndexRecord record) throws InterruptedException {
LOGGER.debug("going to fetch {} {}->{}", record.getFilename(), record.getOffset(),
record.getLength());
FetchEmitTuple t = new FetchEmitTuple(record.getFilename(),
new FetchKey("", record.getFilename(), record.getOffset(),
record.getOffset() + record.getLength() - 1), new EmitKey());
byte[] warcRecordGZBytes;
try {
warcRecordGZBytes = fetchWarcBytes(t);
} catch (TikaException | IOException e) {
LOGGER.warn("couldn't get bytes from cc's warc " + t, e);
return;
}
String id = record.getUrl();
try {
parseWarc(id, record, warcRecordGZBytes);
} catch (IOException e) {
LOGGER.warn("problem parsing warc file", e);
}
}
private void fetchPayload(String id, CCIndexRecord ccIndexRecord, WarcRecord record)
throws IOException {
if (!((record instanceof WarcResponse) &&
record.contentType().base().equals(MediaType.HTTP))) {
return;
}
Optional<WarcPayload> payload = ((WarcResponse) record).payload();
if (!payload.isPresent()) {
LOGGER.debug("no payload {}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
if (payload.get().body().size() == 0) {
LOGGER.debug("empty payload id={}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
Path tmp = Files.createTempFile("ccfile-fetcher-", "");
try {
Files.copy(payload.get().body().stream(), tmp, StandardCopyOption.REPLACE_EXISTING);
String targetDigest = null;
String base32Sha1 = "";
try (InputStream is = Files.newInputStream(tmp)) {
base32Sha1 = base32.encodeAsString(DigestUtils.sha1(is));
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
if (!base32Sha1.equals(ccIndexRecord.getDigest())) {
LOGGER.warn("Bad digest for url={} ccindex={} sha1={}", id,
ccIndexRecord.getDigest(), base32Sha1);
}
//TODO: make digest and encoding configurable
try (InputStream is = Files.newInputStream(tmp)) {
targetDigest = DigestUtils.sha256Hex(is);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
long length = -1;
try {
length = Files.size(tmp);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
String targetPath = targetPathRewriter.rewrite(targetDigest);
Metadata metadata = new Metadata();
try (InputStream is = TikaInputStream.get(tmp, metadata)) {
emitter.emit(targetPath, is, new Metadata());
logSuccess(ccIndexRecord, targetDigest, length, targetPath);
} catch (IOException | TikaException e) {
LOGGER.warn("problem writing id={}", id, e);
}
} finally {
try {
Files.delete(tmp);
} catch (IOException e) {
LOGGER.warn("can't delete " + tmp.toAbsolutePath(), e);
}
}
}
private void logSuccess(CCIndexRecord ccIndexRecord, String targetDigest, long length,
String targetPath) {
if (extractTruncated) {
EXTRACTED_ALL_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord | .getOffset(), ccIndexRecord.getLength(),
ccIndexRecord.getTruncated(), targetDigest, length,
targetPath); |
} else {
//new ObjectArray ?
//url,mime_detected,warc_file,warc_offset,warc_length,sha256,length,path
EXTRACTED_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
targetDigest, length,
targetPath);
}
}
private void parseWarc(String id, CCIndexRecord ccIndexRecord, byte[] warcRecordGZBytes)
throws IOException {
//need to leave initial inputstream open while parsing warcrecord
//can't just parse record and return
try (InputStream is = new GZIPInputStream(new ByteArrayInputStream(warcRecordGZBytes))) {
try (WarcReader warcreader = new WarcReader(is)) {
//should be a single warc per file
//return the first
for (WarcRecord warcRecord : warcreader) {
fetchPayload(id, ccIndexRecord, warcRecord);
return;
}
}
}
}
private byte[] fetchWarcBytes(FetchEmitTuple t)
throws TikaException, InterruptedException, IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
FetchKey k = t.getFetchKey();
try (InputStream is = fetcher.fetch(k.getFetchKey(), k.getRangeStart(), k.getRangeEnd(),
new Metadata())) {
IOUtils.copy(is, bos);
}
return bos.toByteArray();
}
}
| src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " String url = r.getUrl();\n TRUNCATED_URLS_LOGGER.info(\"\", url);\n //url,mime,mime_detected,warc_file,warc_offset,warc_length,truncated\n TRUNCATED_URLS_FULL_LOGGER.info(\"\", url,\n r.getNormalizedMime(), r.getNormalizedMimeDetected(), r.getFilename(),\n r.getOffset(), r.getLength(), r.getTruncated());\n return true;\n }\n }\n private void fetchBytes(CCIndexRecord r) throws InterruptedException {",
"score": 33.72493599678933
},
{
"filename": "src/main/java/org/tallison/cc/index/CCIndexRecord.java",
"retrieved_chunk": " public void setLength(long length) {\n this.length = length;\n }\n public Integer getOffset() {\n return offset;\n }\n public void setOffset(int offset) {\n this.offset = offset;\n }\n public String getFilename() {",
"score": 17.853680781956086
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " if (!fetcherConfig.getRecordSelector().select(r)) {\n return true;\n }\n increment(totalCounts, r.getNormalizedMimeDetected());\n if (!StringUtils.isBlank(r.getTruncated())) {\n long truncated = counter.getTruncated().incrementAndGet();\n if (fetcherConfig.getMaxFilesTruncated() > -1 &&\n truncated >= fetcherConfig.getMaxFilesTruncated()) {\n LOGGER.info(\"hit max truncated files\");\n return false;",
"score": 13.046932773942334
},
{
"filename": "src/main/java/org/tallison/cc/index/CCIndexRecord.java",
"retrieved_chunk": " }\n public String getDigest() {\n return digest;\n }\n public void setDigest(String digest) {\n this.digest = digest;\n }\n public Long getLength() {\n return length;\n }",
"score": 10.623195490800265
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/ExtractorConfig.java",
"retrieved_chunk": " if (emitConfig == null) {\n emitConfig = new EmitConfig(DEFAULT_FS_DOCS_PATH);\n }\n return emitConfig.newEmitter();\n }\n public void setExtractTruncated(boolean extractTruncated) {\n this.extractTruncated = extractTruncated;\n }\n public boolean isExtractTruncated() {\n return extractTruncated;",
"score": 9.95600169067999
}
] | java | .getOffset(), ccIndexRecord.getLength(),
ccIndexRecord.getTruncated(), targetDigest, length,
targetPath); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.Optional;
import java.util.zip.GZIPInputStream;
import org.apache.commons.codec.binary.Base32;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
import org.netpreserve.jwarc.MediaType;
import org.netpreserve.jwarc.WarcPayload;
import org.netpreserve.jwarc.WarcReader;
import org.netpreserve.jwarc.WarcRecord;
import org.netpreserve.jwarc.WarcResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.tallison.cc.index.io.TargetPathRewriter;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.exception.TikaException;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.pipes.FetchEmitTuple;
import org.apache.tika.pipes.emitter.EmitKey;
import org.apache.tika.pipes.emitter.StreamEmitter;
import org.apache.tika.pipes.fetcher.FetchKey;
import org.apache.tika.pipes.fetcher.RangeFetcher;
public class FileFromCCWarcExtractor {
private static Logger LOGGER =
LoggerFactory.getLogger(FileFromCCWarcExtractor.class);
private static Logger EXTRACTED_LOGGER = LoggerFactory.getLogger("extracted-urls");
private static Logger EXTRACTED_ALL_LOGGER = LoggerFactory.getLogger("extracted-urls-all");
private final StreamEmitter emitter;
private final TargetPathRewriter targetPathRewriter;
private RangeFetcher fetcher;
private final boolean extractTruncated;
private Base32 base32 = new Base32();
private final CCIndexReaderCounter ccIndexReaderCounter;
public FileFromCCWarcExtractor(ExtractorConfig fetcherConfig,
CCIndexReaderCounter ccIndexReaderCounter) throws TikaConfigException {
this.emitter = fetcherConfig.newEmitter();
this.fetcher = (RangeFetcher) fetcherConfig.newFetcher();
this.targetPathRewriter = fetcherConfig.getTargetPathRewriter();
this.extractTruncated = fetcherConfig.isExtractTruncated();
this.ccIndexReaderCounter = ccIndexReaderCounter;
}
public void fetchToPath(CCIndexRecord record) throws InterruptedException {
LOGGER.debug("going to fetch {} {}->{}", record.getFilename(), record.getOffset(),
record.getLength());
FetchEmitTuple t = new FetchEmitTuple(record.getFilename(),
new FetchKey("", record.getFilename(), record.getOffset(),
record.getOffset() + record.getLength() - 1), new EmitKey());
byte[] warcRecordGZBytes;
try {
warcRecordGZBytes = fetchWarcBytes(t);
} catch (TikaException | IOException e) {
LOGGER.warn("couldn't get bytes from cc's warc " + t, e);
return;
}
String id = record.getUrl();
try {
parseWarc(id, record, warcRecordGZBytes);
} catch (IOException e) {
LOGGER.warn("problem parsing warc file", e);
}
}
private void fetchPayload(String id, CCIndexRecord ccIndexRecord, WarcRecord record)
throws IOException {
if (!((record instanceof WarcResponse) &&
record.contentType().base().equals(MediaType.HTTP))) {
return;
}
Optional<WarcPayload> payload = ((WarcResponse) record).payload();
if (!payload.isPresent()) {
LOGGER.debug("no payload {}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
if (payload.get().body().size() == 0) {
LOGGER.debug("empty payload id={}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
Path tmp = Files.createTempFile("ccfile-fetcher-", "");
try {
Files.copy(payload.get().body().stream(), tmp, StandardCopyOption.REPLACE_EXISTING);
String targetDigest = null;
String base32Sha1 = "";
try (InputStream is = Files.newInputStream(tmp)) {
base32Sha1 = base32.encodeAsString(DigestUtils.sha1(is));
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
if (!base32Sha1.equals(ccIndexRecord.getDigest())) {
LOGGER.warn("Bad digest for url={} ccindex={} sha1={}", id,
ccIndexRecord.getDigest(), base32Sha1);
}
//TODO: make digest and encoding configurable
try (InputStream is = Files.newInputStream(tmp)) {
targetDigest = DigestUtils.sha256Hex(is);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
long length = -1;
try {
length = Files.size(tmp);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
String targetPath = targetPathRewriter.rewrite(targetDigest);
Metadata metadata = new Metadata();
try (InputStream is = TikaInputStream.get(tmp, metadata)) {
emitter.emit(targetPath, is, new Metadata());
logSuccess(ccIndexRecord, targetDigest, length, targetPath);
} catch (IOException | TikaException e) {
LOGGER.warn("problem writing id={}", id, e);
}
} finally {
try {
Files.delete(tmp);
} catch (IOException e) {
LOGGER.warn("can't delete " + tmp.toAbsolutePath(), e);
}
}
}
private void logSuccess(CCIndexRecord ccIndexRecord, String targetDigest, long length,
String targetPath) {
if (extractTruncated) {
EXTRACTED_ALL_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
ccIndexRecord.getTruncated(), targetDigest, length,
targetPath);
} else {
//new ObjectArray ?
//url,mime_detected,warc_file,warc_offset,warc_length,sha256,length,path
EXTRACTED_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
| ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
targetDigest, length,
targetPath); |
}
}
private void parseWarc(String id, CCIndexRecord ccIndexRecord, byte[] warcRecordGZBytes)
throws IOException {
//need to leave initial inputstream open while parsing warcrecord
//can't just parse record and return
try (InputStream is = new GZIPInputStream(new ByteArrayInputStream(warcRecordGZBytes))) {
try (WarcReader warcreader = new WarcReader(is)) {
//should be a single warc per file
//return the first
for (WarcRecord warcRecord : warcreader) {
fetchPayload(id, ccIndexRecord, warcRecord);
return;
}
}
}
}
private byte[] fetchWarcBytes(FetchEmitTuple t)
throws TikaException, InterruptedException, IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
FetchKey k = t.getFetchKey();
try (InputStream is = fetcher.fetch(k.getFetchKey(), k.getRangeStart(), k.getRangeEnd(),
new Metadata())) {
IOUtils.copy(is, bos);
}
return bos.toByteArray();
}
}
| src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " String url = r.getUrl();\n TRUNCATED_URLS_LOGGER.info(\"\", url);\n //url,mime,mime_detected,warc_file,warc_offset,warc_length,truncated\n TRUNCATED_URLS_FULL_LOGGER.info(\"\", url,\n r.getNormalizedMime(), r.getNormalizedMimeDetected(), r.getFilename(),\n r.getOffset(), r.getLength(), r.getTruncated());\n return true;\n }\n }\n private void fetchBytes(CCIndexRecord r) throws InterruptedException {",
"score": 55.913774584638226
},
{
"filename": "src/main/java/org/tallison/cc/index/CCIndexRecord.java",
"retrieved_chunk": " public void setLength(long length) {\n this.length = length;\n }\n public Integer getOffset() {\n return offset;\n }\n public void setOffset(int offset) {\n this.offset = offset;\n }\n public String getFilename() {",
"score": 23.08381163019273
},
{
"filename": "src/main/java/org/tallison/cc/index/CCIndexRecord.java",
"retrieved_chunk": " return \"bytes=\" + offset + \"-\" + (offset + length - 1);\n }\n @Override\n public String toString() {\n return \"CCIndexRecord{\" + \"url='\" + url + '\\'' + \", mime='\" + mime + '\\'' +\n \", mimeDetected='\" + mimeDetected + '\\'' + \", status=\" + status + \", digest='\" +\n digest + '\\'' + \", length=\" + length + \", offset=\" + offset + \", filename='\" +\n filename + '\\'' + \", charset='\" + charset + '\\'' + \", languages='\" + languages +\n '\\'' + \", truncated='\" + truncated + '\\'' + \", redirect='\" + redirect + '\\'' + '}';\n }",
"score": 16.12587422219068
},
{
"filename": "src/main/java/org/tallison/cc/index/CCIndexRecord.java",
"retrieved_chunk": " URI uri = new URI(url);\n String host = uri.getHost();\n if (host == null) {\n return \"\";\n }\n int i = host.lastIndexOf(\".\");\n String tld = \"\";\n if (i > -1 && i + 1 < host.length()) {\n tld = host.substring(i + 1);\n } else {",
"score": 14.693289320754415
},
{
"filename": "src/main/java/org/tallison/cc/index/CCIndexRecord.java",
"retrieved_chunk": " }\n public String getDigest() {\n return digest;\n }\n public void setDigest(String digest) {\n this.digest = digest;\n }\n public Long getLength() {\n return length;\n }",
"score": 13.874139740130452
}
] | java | ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
targetDigest, length,
targetPath); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.Optional;
import java.util.zip.GZIPInputStream;
import org.apache.commons.codec.binary.Base32;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
import org.netpreserve.jwarc.MediaType;
import org.netpreserve.jwarc.WarcPayload;
import org.netpreserve.jwarc.WarcReader;
import org.netpreserve.jwarc.WarcRecord;
import org.netpreserve.jwarc.WarcResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.tallison.cc.index.io.TargetPathRewriter;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.exception.TikaException;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.pipes.FetchEmitTuple;
import org.apache.tika.pipes.emitter.EmitKey;
import org.apache.tika.pipes.emitter.StreamEmitter;
import org.apache.tika.pipes.fetcher.FetchKey;
import org.apache.tika.pipes.fetcher.RangeFetcher;
public class FileFromCCWarcExtractor {
private static Logger LOGGER =
LoggerFactory.getLogger(FileFromCCWarcExtractor.class);
private static Logger EXTRACTED_LOGGER = LoggerFactory.getLogger("extracted-urls");
private static Logger EXTRACTED_ALL_LOGGER = LoggerFactory.getLogger("extracted-urls-all");
private final StreamEmitter emitter;
private final TargetPathRewriter targetPathRewriter;
private RangeFetcher fetcher;
private final boolean extractTruncated;
private Base32 base32 = new Base32();
private final CCIndexReaderCounter ccIndexReaderCounter;
public FileFromCCWarcExtractor(ExtractorConfig fetcherConfig,
CCIndexReaderCounter ccIndexReaderCounter) throws TikaConfigException {
this.emitter = fetcherConfig.newEmitter();
this.fetcher = (RangeFetcher) fetcherConfig.newFetcher();
this.targetPathRewriter = fetcherConfig.getTargetPathRewriter();
this.extractTruncated = fetcherConfig.isExtractTruncated();
this.ccIndexReaderCounter = ccIndexReaderCounter;
}
public void fetchToPath(CCIndexRecord record) throws InterruptedException {
LOGGER.debug("going to fetch {} {}->{}", record.getFilename(), record.getOffset(),
record.getLength());
FetchEmitTuple t = new FetchEmitTuple(record.getFilename(),
new FetchKey("", record.getFilename(), record.getOffset(),
record.getOffset() + record.getLength() - 1), new EmitKey());
byte[] warcRecordGZBytes;
try {
warcRecordGZBytes = fetchWarcBytes(t);
} catch (TikaException | IOException e) {
LOGGER.warn("couldn't get bytes from cc's warc " + t, e);
return;
}
String id = record.getUrl();
try {
parseWarc(id, record, warcRecordGZBytes);
} catch (IOException e) {
LOGGER.warn("problem parsing warc file", e);
}
}
private void fetchPayload(String id, CCIndexRecord ccIndexRecord, WarcRecord record)
throws IOException {
if (!((record instanceof WarcResponse) &&
record.contentType().base().equals(MediaType.HTTP))) {
return;
}
Optional<WarcPayload> payload = ((WarcResponse) record).payload();
if (!payload.isPresent()) {
LOGGER.debug("no payload {}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
if (payload.get().body().size() == 0) {
LOGGER.debug("empty payload id={}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
Path tmp = Files.createTempFile("ccfile-fetcher-", "");
try {
Files.copy(payload.get().body().stream(), tmp, StandardCopyOption.REPLACE_EXISTING);
String targetDigest = null;
String base32Sha1 = "";
try (InputStream is = Files.newInputStream(tmp)) {
base32Sha1 = base32.encodeAsString(DigestUtils.sha1(is));
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
if (!base32Sha1.equals(ccIndexRecord.getDigest())) {
LOGGER.warn("Bad digest for url={} ccindex={} sha1={}", id,
ccIndexRecord.getDigest(), base32Sha1);
}
//TODO: make digest and encoding configurable
try (InputStream is = Files.newInputStream(tmp)) {
targetDigest = DigestUtils.sha256Hex(is);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
long length = -1;
try {
length = Files.size(tmp);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
String | targetPath = targetPathRewriter.rewrite(targetDigest); |
Metadata metadata = new Metadata();
try (InputStream is = TikaInputStream.get(tmp, metadata)) {
emitter.emit(targetPath, is, new Metadata());
logSuccess(ccIndexRecord, targetDigest, length, targetPath);
} catch (IOException | TikaException e) {
LOGGER.warn("problem writing id={}", id, e);
}
} finally {
try {
Files.delete(tmp);
} catch (IOException e) {
LOGGER.warn("can't delete " + tmp.toAbsolutePath(), e);
}
}
}
private void logSuccess(CCIndexRecord ccIndexRecord, String targetDigest, long length,
String targetPath) {
if (extractTruncated) {
EXTRACTED_ALL_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
ccIndexRecord.getTruncated(), targetDigest, length,
targetPath);
} else {
//new ObjectArray ?
//url,mime_detected,warc_file,warc_offset,warc_length,sha256,length,path
EXTRACTED_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
targetDigest, length,
targetPath);
}
}
private void parseWarc(String id, CCIndexRecord ccIndexRecord, byte[] warcRecordGZBytes)
throws IOException {
//need to leave initial inputstream open while parsing warcrecord
//can't just parse record and return
try (InputStream is = new GZIPInputStream(new ByteArrayInputStream(warcRecordGZBytes))) {
try (WarcReader warcreader = new WarcReader(is)) {
//should be a single warc per file
//return the first
for (WarcRecord warcRecord : warcreader) {
fetchPayload(id, ccIndexRecord, warcRecord);
return;
}
}
}
}
private byte[] fetchWarcBytes(FetchEmitTuple t)
throws TikaException, InterruptedException, IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
FetchKey k = t.getFetchKey();
try (InputStream is = fetcher.fetch(k.getFetchKey(), k.getRangeStart(), k.getRangeEnd(),
new Metadata())) {
IOUtils.copy(is, bos);
}
return bos.toByteArray();
}
}
| src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/io/BackoffHttpFetcher.java",
"retrieved_chunk": " private InputStream fetchWithBackOff(FetchKey fetchKey, Metadata metadata) throws IOException {\n int tries = 0;\n while (tries < throttleSeconds.length) {\n try {\n return _fetch(fetchKey, metadata);\n } catch (IOException e) {\n if (e.getMessage() == null) {\n throw e;\n }\n Matcher m = Pattern.compile(\"bad status code: (\\\\d+)\").matcher(e.getMessage());",
"score": 20.37387473401319
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractor.java",
"retrieved_chunk": " return shouldContinue;\n }\n } catch (IOException e) {\n LOGGER.warn(\"bad json: \" + line);\n }\n lines++;\n line = reader.readLine();\n }\n }\n }",
"score": 18.08084521696998
},
{
"filename": "src/test/java/org/tallison/cc/index/io/TargetPathRewriterTest.java",
"retrieved_chunk": " targetPathRewriter = new TargetPathRewriter(pat);\n assertEquals(\"ab/cd/e/abcdefgh\", targetPathRewriter.rewrite(txt));\n pat = \"xx/xx//xx\";\n targetPathRewriter = new TargetPathRewriter(pat);\n assertEquals(\"ab/cd//abcdefgh\", targetPathRewriter.rewrite(txt));\n }\n}",
"score": 16.972971005948878
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractor.java",
"retrieved_chunk": " } catch (TikaConfigException | IOException e) {\n LOGGER.error(\"main loop exception\", e);\n throw new RuntimeException(e);\n } catch (ExecutionException e) {\n LOGGER.error(\"main loop exception\", e);\n throw new RuntimeException(e);\n } catch (InterruptedException e) {\n LOGGER.warn(\"main loop interrupted exception\", e);\n throw new RuntimeException(e);\n } finally {",
"score": 16.721295267265006
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " boolean shouldContinue = recordProcessor.process(line);\n if (!shouldContinue) {\n return shouldContinue;\n }\n } catch (IOException e) {\n LOGGER.warn(\"bad json: \" + line);\n }\n lines++;\n line = reader.readLine();\n }",
"score": 16.34862074861257
}
] | java | targetPath = targetPathRewriter.rewrite(targetDigest); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.Optional;
import java.util.zip.GZIPInputStream;
import org.apache.commons.codec.binary.Base32;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
import org.netpreserve.jwarc.MediaType;
import org.netpreserve.jwarc.WarcPayload;
import org.netpreserve.jwarc.WarcReader;
import org.netpreserve.jwarc.WarcRecord;
import org.netpreserve.jwarc.WarcResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.tallison.cc.index.io.TargetPathRewriter;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.exception.TikaException;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.pipes.FetchEmitTuple;
import org.apache.tika.pipes.emitter.EmitKey;
import org.apache.tika.pipes.emitter.StreamEmitter;
import org.apache.tika.pipes.fetcher.FetchKey;
import org.apache.tika.pipes.fetcher.RangeFetcher;
public class FileFromCCWarcExtractor {
private static Logger LOGGER =
LoggerFactory.getLogger(FileFromCCWarcExtractor.class);
private static Logger EXTRACTED_LOGGER = LoggerFactory.getLogger("extracted-urls");
private static Logger EXTRACTED_ALL_LOGGER = LoggerFactory.getLogger("extracted-urls-all");
private final StreamEmitter emitter;
private final TargetPathRewriter targetPathRewriter;
private RangeFetcher fetcher;
private final boolean extractTruncated;
private Base32 base32 = new Base32();
private final CCIndexReaderCounter ccIndexReaderCounter;
public FileFromCCWarcExtractor(ExtractorConfig fetcherConfig,
CCIndexReaderCounter ccIndexReaderCounter) throws TikaConfigException {
this.emitter = fetcherConfig.newEmitter();
this.fetcher = ( | RangeFetcher) fetcherConfig.newFetcher(); |
this.targetPathRewriter = fetcherConfig.getTargetPathRewriter();
this.extractTruncated = fetcherConfig.isExtractTruncated();
this.ccIndexReaderCounter = ccIndexReaderCounter;
}
public void fetchToPath(CCIndexRecord record) throws InterruptedException {
LOGGER.debug("going to fetch {} {}->{}", record.getFilename(), record.getOffset(),
record.getLength());
FetchEmitTuple t = new FetchEmitTuple(record.getFilename(),
new FetchKey("", record.getFilename(), record.getOffset(),
record.getOffset() + record.getLength() - 1), new EmitKey());
byte[] warcRecordGZBytes;
try {
warcRecordGZBytes = fetchWarcBytes(t);
} catch (TikaException | IOException e) {
LOGGER.warn("couldn't get bytes from cc's warc " + t, e);
return;
}
String id = record.getUrl();
try {
parseWarc(id, record, warcRecordGZBytes);
} catch (IOException e) {
LOGGER.warn("problem parsing warc file", e);
}
}
private void fetchPayload(String id, CCIndexRecord ccIndexRecord, WarcRecord record)
throws IOException {
if (!((record instanceof WarcResponse) &&
record.contentType().base().equals(MediaType.HTTP))) {
return;
}
Optional<WarcPayload> payload = ((WarcResponse) record).payload();
if (!payload.isPresent()) {
LOGGER.debug("no payload {}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
if (payload.get().body().size() == 0) {
LOGGER.debug("empty payload id={}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
Path tmp = Files.createTempFile("ccfile-fetcher-", "");
try {
Files.copy(payload.get().body().stream(), tmp, StandardCopyOption.REPLACE_EXISTING);
String targetDigest = null;
String base32Sha1 = "";
try (InputStream is = Files.newInputStream(tmp)) {
base32Sha1 = base32.encodeAsString(DigestUtils.sha1(is));
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
if (!base32Sha1.equals(ccIndexRecord.getDigest())) {
LOGGER.warn("Bad digest for url={} ccindex={} sha1={}", id,
ccIndexRecord.getDigest(), base32Sha1);
}
//TODO: make digest and encoding configurable
try (InputStream is = Files.newInputStream(tmp)) {
targetDigest = DigestUtils.sha256Hex(is);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
long length = -1;
try {
length = Files.size(tmp);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
String targetPath = targetPathRewriter.rewrite(targetDigest);
Metadata metadata = new Metadata();
try (InputStream is = TikaInputStream.get(tmp, metadata)) {
emitter.emit(targetPath, is, new Metadata());
logSuccess(ccIndexRecord, targetDigest, length, targetPath);
} catch (IOException | TikaException e) {
LOGGER.warn("problem writing id={}", id, e);
}
} finally {
try {
Files.delete(tmp);
} catch (IOException e) {
LOGGER.warn("can't delete " + tmp.toAbsolutePath(), e);
}
}
}
private void logSuccess(CCIndexRecord ccIndexRecord, String targetDigest, long length,
String targetPath) {
if (extractTruncated) {
EXTRACTED_ALL_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
ccIndexRecord.getTruncated(), targetDigest, length,
targetPath);
} else {
//new ObjectArray ?
//url,mime_detected,warc_file,warc_offset,warc_length,sha256,length,path
EXTRACTED_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
targetDigest, length,
targetPath);
}
}
private void parseWarc(String id, CCIndexRecord ccIndexRecord, byte[] warcRecordGZBytes)
throws IOException {
//need to leave initial inputstream open while parsing warcrecord
//can't just parse record and return
try (InputStream is = new GZIPInputStream(new ByteArrayInputStream(warcRecordGZBytes))) {
try (WarcReader warcreader = new WarcReader(is)) {
//should be a single warc per file
//return the first
for (WarcRecord warcRecord : warcreader) {
fetchPayload(id, ccIndexRecord, warcRecord);
return;
}
}
}
}
private byte[] fetchWarcBytes(FetchEmitTuple t)
throws TikaException, InterruptedException, IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
FetchKey k = t.getFetchKey();
try (InputStream is = fetcher.fetch(k.getFetchKey(), k.getRangeStart(), k.getRangeEnd(),
new Metadata())) {
IOUtils.copy(is, bos);
}
return bos.toByteArray();
}
}
| src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " LoggerFactory.getLogger(\"truncated-urls-full\");\n private final ExtractorConfig fetcherConfig;\n private final CCIndexReaderCounter counter;\n private final FileFromCCWarcExtractor fileFromCCWarcFetcher;\n private long reportEvery = 100000;\n public CCFileExtractorRecordProcessor(ExtractorConfig fetcherConfig, CCIndexReaderCounter counter)\n throws TikaConfigException, IOException {\n this.fetcherConfig = fetcherConfig;\n this.counter = counter;\n this.fileFromCCWarcFetcher = new FileFromCCWarcExtractor(fetcherConfig, counter);",
"score": 64.28036277006427
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " return true;\n }\n }\n private static class DetectedMimeCounter extends AbstractRecordProcessor {\n private final ExtractorConfig fetcherConfig;\n private final CCIndexReaderCounter counter;\n private final Map<String, MutableLong> totalCounts = new HashMap<>();\n private final Map<String, MutableLong> truncatedCounts = new HashMap<>();\n public DetectedMimeCounter(ExtractorConfig fetcherConfig, CCIndexReaderCounter counter) {\n this.fetcherConfig = fetcherConfig;",
"score": 53.35908891468829
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " private final Fetcher fetcher;\n IndexWorker(ExtractorConfig fetcherConfig, ArrayBlockingQueue<FetchEmitTuple> indexUrls,\n AbstractRecordProcessor recordProcessor) throws TikaException {\n this.indexUrls = indexUrls;\n this.recordProcessor = recordProcessor;\n this.fetcher = fetcherConfig.newFetcher();\n }\n @Override\n public Long call() throws Exception {\n boolean shouldContinue = true;",
"score": 46.62547338894822
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCIndexFetcher.java",
"retrieved_chunk": " private static class IndexFetcher implements Callable<Long> {\n private final ExtractorConfig fetcherConfig;\n private final ArrayBlockingQueue<FetchEmitTuple> indexPathsList;\n public IndexFetcher(ExtractorConfig fetcherConfig,\n ArrayBlockingQueue<FetchEmitTuple> indexPathsList) {\n this.fetcherConfig = fetcherConfig;\n this.indexPathsList = indexPathsList;\n }\n @Override\n public Long call() throws Exception {",
"score": 43.255196163198704
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractor.java",
"retrieved_chunk": " executorService.shutdown();\n executorService.shutdownNow();\n }\n }\n private static class IndexWorker implements Callable<Long> {\n private final ArrayBlockingQueue<FetchEmitTuple> indexUrls;\n private final AbstractRecordProcessor recordProcessor;\n private final Fetcher indexFetcher;\n IndexWorker(ExtractorConfig fetcherConfig, ArrayBlockingQueue<FetchEmitTuple> indexUrls,\n AbstractRecordProcessor recordProcessor) throws TikaException {",
"score": 35.946356195694705
}
] | java | RangeFetcher) fetcherConfig.newFetcher(); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.Optional;
import java.util.zip.GZIPInputStream;
import org.apache.commons.codec.binary.Base32;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
import org.netpreserve.jwarc.MediaType;
import org.netpreserve.jwarc.WarcPayload;
import org.netpreserve.jwarc.WarcReader;
import org.netpreserve.jwarc.WarcRecord;
import org.netpreserve.jwarc.WarcResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.tallison.cc.index.io.TargetPathRewriter;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.exception.TikaException;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.pipes.FetchEmitTuple;
import org.apache.tika.pipes.emitter.EmitKey;
import org.apache.tika.pipes.emitter.StreamEmitter;
import org.apache.tika.pipes.fetcher.FetchKey;
import org.apache.tika.pipes.fetcher.RangeFetcher;
public class FileFromCCWarcExtractor {
private static Logger LOGGER =
LoggerFactory.getLogger(FileFromCCWarcExtractor.class);
private static Logger EXTRACTED_LOGGER = LoggerFactory.getLogger("extracted-urls");
private static Logger EXTRACTED_ALL_LOGGER = LoggerFactory.getLogger("extracted-urls-all");
private final StreamEmitter emitter;
private final TargetPathRewriter targetPathRewriter;
private RangeFetcher fetcher;
private final boolean extractTruncated;
private Base32 base32 = new Base32();
private final CCIndexReaderCounter ccIndexReaderCounter;
public FileFromCCWarcExtractor(ExtractorConfig fetcherConfig,
CCIndexReaderCounter ccIndexReaderCounter) throws TikaConfigException {
this.emitter = fetcherConfig.newEmitter();
this.fetcher = (RangeFetcher) fetcherConfig.newFetcher();
this.targetPathRewriter = fetcherConfig.getTargetPathRewriter();
this.extractTruncated = fetcherConfig.isExtractTruncated();
this.ccIndexReaderCounter = ccIndexReaderCounter;
}
public void fetchToPath(CCIndexRecord record) throws InterruptedException {
LOGGER.debug("going to fetch {} {}->{}", record.getFilename(), record.getOffset(),
record.getLength());
FetchEmitTuple t = new FetchEmitTuple(record.getFilename(),
new FetchKey("", record.getFilename(), record.getOffset(),
record.getOffset() + record.getLength() - 1), new EmitKey());
byte[] warcRecordGZBytes;
try {
warcRecordGZBytes = fetchWarcBytes(t);
} catch (TikaException | IOException e) {
LOGGER.warn("couldn't get bytes from cc's warc " + t, e);
return;
}
String id = record.getUrl();
try {
parseWarc(id, record, warcRecordGZBytes);
} catch (IOException e) {
LOGGER.warn("problem parsing warc file", e);
}
}
private void fetchPayload(String id, CCIndexRecord ccIndexRecord, WarcRecord record)
throws IOException {
if (!((record instanceof WarcResponse) &&
record.contentType().base().equals(MediaType.HTTP))) {
return;
}
Optional<WarcPayload> payload = ((WarcResponse) record).payload();
if (!payload.isPresent()) {
LOGGER.debug("no payload {}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
if (payload.get().body().size() == 0) {
LOGGER.debug("empty payload id={}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
Path tmp = Files.createTempFile("ccfile-fetcher-", "");
try {
Files.copy(payload.get().body().stream(), tmp, StandardCopyOption.REPLACE_EXISTING);
String targetDigest = null;
String base32Sha1 = "";
try (InputStream is = Files.newInputStream(tmp)) {
base32Sha1 = base32.encodeAsString(DigestUtils.sha1(is));
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
if (!base32Sha1.equals(ccIndexRecord.getDigest())) {
LOGGER.warn("Bad digest for url={} ccindex={} sha1={}", id,
ccIndexRecord.getDigest(), base32Sha1);
}
//TODO: make digest and encoding configurable
try (InputStream is = Files.newInputStream(tmp)) {
targetDigest = DigestUtils.sha256Hex(is);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
long length = -1;
try {
length = Files.size(tmp);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
String targetPath = targetPathRewriter.rewrite(targetDigest);
Metadata metadata = new Metadata();
try (InputStream is = TikaInputStream.get(tmp, metadata)) {
emitter.emit(targetPath, is, new Metadata());
logSuccess(ccIndexRecord, targetDigest, length, targetPath);
} catch (IOException | TikaException e) {
LOGGER.warn("problem writing id={}", id, e);
}
} finally {
try {
Files.delete(tmp);
} catch (IOException e) {
LOGGER.warn("can't delete " + tmp.toAbsolutePath(), e);
}
}
}
private void logSuccess(CCIndexRecord ccIndexRecord, String targetDigest, long length,
String targetPath) {
if (extractTruncated) {
EXTRACTED_ALL_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
ccIndexRecord.getTruncated(), targetDigest, length,
targetPath);
} else {
//new ObjectArray ?
//url,mime_detected,warc_file,warc_offset,warc_length,sha256,length,path
EXTRACTED_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
| ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
targetDigest, length,
targetPath); |
}
}
private void parseWarc(String id, CCIndexRecord ccIndexRecord, byte[] warcRecordGZBytes)
throws IOException {
//need to leave initial inputstream open while parsing warcrecord
//can't just parse record and return
try (InputStream is = new GZIPInputStream(new ByteArrayInputStream(warcRecordGZBytes))) {
try (WarcReader warcreader = new WarcReader(is)) {
//should be a single warc per file
//return the first
for (WarcRecord warcRecord : warcreader) {
fetchPayload(id, ccIndexRecord, warcRecord);
return;
}
}
}
}
private byte[] fetchWarcBytes(FetchEmitTuple t)
throws TikaException, InterruptedException, IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
FetchKey k = t.getFetchKey();
try (InputStream is = fetcher.fetch(k.getFetchKey(), k.getRangeStart(), k.getRangeEnd(),
new Metadata())) {
IOUtils.copy(is, bos);
}
return bos.toByteArray();
}
}
| src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " String url = r.getUrl();\n TRUNCATED_URLS_LOGGER.info(\"\", url);\n //url,mime,mime_detected,warc_file,warc_offset,warc_length,truncated\n TRUNCATED_URLS_FULL_LOGGER.info(\"\", url,\n r.getNormalizedMime(), r.getNormalizedMimeDetected(), r.getFilename(),\n r.getOffset(), r.getLength(), r.getTruncated());\n return true;\n }\n }\n private void fetchBytes(CCIndexRecord r) throws InterruptedException {",
"score": 55.913774584638226
},
{
"filename": "src/main/java/org/tallison/cc/index/CCIndexRecord.java",
"retrieved_chunk": " public void setLength(long length) {\n this.length = length;\n }\n public Integer getOffset() {\n return offset;\n }\n public void setOffset(int offset) {\n this.offset = offset;\n }\n public String getFilename() {",
"score": 23.08381163019273
},
{
"filename": "src/main/java/org/tallison/cc/index/CCIndexRecord.java",
"retrieved_chunk": " return \"bytes=\" + offset + \"-\" + (offset + length - 1);\n }\n @Override\n public String toString() {\n return \"CCIndexRecord{\" + \"url='\" + url + '\\'' + \", mime='\" + mime + '\\'' +\n \", mimeDetected='\" + mimeDetected + '\\'' + \", status=\" + status + \", digest='\" +\n digest + '\\'' + \", length=\" + length + \", offset=\" + offset + \", filename='\" +\n filename + '\\'' + \", charset='\" + charset + '\\'' + \", languages='\" + languages +\n '\\'' + \", truncated='\" + truncated + '\\'' + \", redirect='\" + redirect + '\\'' + '}';\n }",
"score": 16.12587422219068
},
{
"filename": "src/main/java/org/tallison/cc/index/CCIndexRecord.java",
"retrieved_chunk": " URI uri = new URI(url);\n String host = uri.getHost();\n if (host == null) {\n return \"\";\n }\n int i = host.lastIndexOf(\".\");\n String tld = \"\";\n if (i > -1 && i + 1 < host.length()) {\n tld = host.substring(i + 1);\n } else {",
"score": 14.693289320754415
},
{
"filename": "src/main/java/org/tallison/cc/index/CCIndexRecord.java",
"retrieved_chunk": " }\n public String getDigest() {\n return digest;\n }\n public void setDigest(String digest) {\n this.digest = digest;\n }\n public Long getLength() {\n return length;\n }",
"score": 13.874139740130452
}
] | java | ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
targetDigest, length,
targetPath); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.Optional;
import java.util.zip.GZIPInputStream;
import org.apache.commons.codec.binary.Base32;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
import org.netpreserve.jwarc.MediaType;
import org.netpreserve.jwarc.WarcPayload;
import org.netpreserve.jwarc.WarcReader;
import org.netpreserve.jwarc.WarcRecord;
import org.netpreserve.jwarc.WarcResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.tallison.cc.index.io.TargetPathRewriter;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.exception.TikaException;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.pipes.FetchEmitTuple;
import org.apache.tika.pipes.emitter.EmitKey;
import org.apache.tika.pipes.emitter.StreamEmitter;
import org.apache.tika.pipes.fetcher.FetchKey;
import org.apache.tika.pipes.fetcher.RangeFetcher;
public class FileFromCCWarcExtractor {
private static Logger LOGGER =
LoggerFactory.getLogger(FileFromCCWarcExtractor.class);
private static Logger EXTRACTED_LOGGER = LoggerFactory.getLogger("extracted-urls");
private static Logger EXTRACTED_ALL_LOGGER = LoggerFactory.getLogger("extracted-urls-all");
private final StreamEmitter emitter;
private final TargetPathRewriter targetPathRewriter;
private RangeFetcher fetcher;
private final boolean extractTruncated;
private Base32 base32 = new Base32();
private final CCIndexReaderCounter ccIndexReaderCounter;
public FileFromCCWarcExtractor(ExtractorConfig fetcherConfig,
CCIndexReaderCounter ccIndexReaderCounter) throws TikaConfigException {
this.emitter = fetcherConfig.newEmitter();
this.fetcher = (RangeFetcher) fetcherConfig.newFetcher();
this | .targetPathRewriter = fetcherConfig.getTargetPathRewriter(); |
this.extractTruncated = fetcherConfig.isExtractTruncated();
this.ccIndexReaderCounter = ccIndexReaderCounter;
}
public void fetchToPath(CCIndexRecord record) throws InterruptedException {
LOGGER.debug("going to fetch {} {}->{}", record.getFilename(), record.getOffset(),
record.getLength());
FetchEmitTuple t = new FetchEmitTuple(record.getFilename(),
new FetchKey("", record.getFilename(), record.getOffset(),
record.getOffset() + record.getLength() - 1), new EmitKey());
byte[] warcRecordGZBytes;
try {
warcRecordGZBytes = fetchWarcBytes(t);
} catch (TikaException | IOException e) {
LOGGER.warn("couldn't get bytes from cc's warc " + t, e);
return;
}
String id = record.getUrl();
try {
parseWarc(id, record, warcRecordGZBytes);
} catch (IOException e) {
LOGGER.warn("problem parsing warc file", e);
}
}
private void fetchPayload(String id, CCIndexRecord ccIndexRecord, WarcRecord record)
throws IOException {
if (!((record instanceof WarcResponse) &&
record.contentType().base().equals(MediaType.HTTP))) {
return;
}
Optional<WarcPayload> payload = ((WarcResponse) record).payload();
if (!payload.isPresent()) {
LOGGER.debug("no payload {}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
if (payload.get().body().size() == 0) {
LOGGER.debug("empty payload id={}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
Path tmp = Files.createTempFile("ccfile-fetcher-", "");
try {
Files.copy(payload.get().body().stream(), tmp, StandardCopyOption.REPLACE_EXISTING);
String targetDigest = null;
String base32Sha1 = "";
try (InputStream is = Files.newInputStream(tmp)) {
base32Sha1 = base32.encodeAsString(DigestUtils.sha1(is));
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
if (!base32Sha1.equals(ccIndexRecord.getDigest())) {
LOGGER.warn("Bad digest for url={} ccindex={} sha1={}", id,
ccIndexRecord.getDigest(), base32Sha1);
}
//TODO: make digest and encoding configurable
try (InputStream is = Files.newInputStream(tmp)) {
targetDigest = DigestUtils.sha256Hex(is);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
long length = -1;
try {
length = Files.size(tmp);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
String targetPath = targetPathRewriter.rewrite(targetDigest);
Metadata metadata = new Metadata();
try (InputStream is = TikaInputStream.get(tmp, metadata)) {
emitter.emit(targetPath, is, new Metadata());
logSuccess(ccIndexRecord, targetDigest, length, targetPath);
} catch (IOException | TikaException e) {
LOGGER.warn("problem writing id={}", id, e);
}
} finally {
try {
Files.delete(tmp);
} catch (IOException e) {
LOGGER.warn("can't delete " + tmp.toAbsolutePath(), e);
}
}
}
private void logSuccess(CCIndexRecord ccIndexRecord, String targetDigest, long length,
String targetPath) {
if (extractTruncated) {
EXTRACTED_ALL_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
ccIndexRecord.getTruncated(), targetDigest, length,
targetPath);
} else {
//new ObjectArray ?
//url,mime_detected,warc_file,warc_offset,warc_length,sha256,length,path
EXTRACTED_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
targetDigest, length,
targetPath);
}
}
private void parseWarc(String id, CCIndexRecord ccIndexRecord, byte[] warcRecordGZBytes)
throws IOException {
//need to leave initial inputstream open while parsing warcrecord
//can't just parse record and return
try (InputStream is = new GZIPInputStream(new ByteArrayInputStream(warcRecordGZBytes))) {
try (WarcReader warcreader = new WarcReader(is)) {
//should be a single warc per file
//return the first
for (WarcRecord warcRecord : warcreader) {
fetchPayload(id, ccIndexRecord, warcRecord);
return;
}
}
}
}
private byte[] fetchWarcBytes(FetchEmitTuple t)
throws TikaException, InterruptedException, IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
FetchKey k = t.getFetchKey();
try (InputStream is = fetcher.fetch(k.getFetchKey(), k.getRangeStart(), k.getRangeEnd(),
new Metadata())) {
IOUtils.copy(is, bos);
}
return bos.toByteArray();
}
}
| src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " LoggerFactory.getLogger(\"truncated-urls-full\");\n private final ExtractorConfig fetcherConfig;\n private final CCIndexReaderCounter counter;\n private final FileFromCCWarcExtractor fileFromCCWarcFetcher;\n private long reportEvery = 100000;\n public CCFileExtractorRecordProcessor(ExtractorConfig fetcherConfig, CCIndexReaderCounter counter)\n throws TikaConfigException, IOException {\n this.fetcherConfig = fetcherConfig;\n this.counter = counter;\n this.fileFromCCWarcFetcher = new FileFromCCWarcExtractor(fetcherConfig, counter);",
"score": 63.648399386584735
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " return true;\n }\n }\n private static class DetectedMimeCounter extends AbstractRecordProcessor {\n private final ExtractorConfig fetcherConfig;\n private final CCIndexReaderCounter counter;\n private final Map<String, MutableLong> totalCounts = new HashMap<>();\n private final Map<String, MutableLong> truncatedCounts = new HashMap<>();\n public DetectedMimeCounter(ExtractorConfig fetcherConfig, CCIndexReaderCounter counter) {\n this.fetcherConfig = fetcherConfig;",
"score": 51.26857457206358
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " private final Fetcher fetcher;\n IndexWorker(ExtractorConfig fetcherConfig, ArrayBlockingQueue<FetchEmitTuple> indexUrls,\n AbstractRecordProcessor recordProcessor) throws TikaException {\n this.indexUrls = indexUrls;\n this.recordProcessor = recordProcessor;\n this.fetcher = fetcherConfig.newFetcher();\n }\n @Override\n public Long call() throws Exception {\n boolean shouldContinue = true;",
"score": 47.593045771452665
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCIndexFetcher.java",
"retrieved_chunk": " private static class IndexFetcher implements Callable<Long> {\n private final ExtractorConfig fetcherConfig;\n private final ArrayBlockingQueue<FetchEmitTuple> indexPathsList;\n public IndexFetcher(ExtractorConfig fetcherConfig,\n ArrayBlockingQueue<FetchEmitTuple> indexPathsList) {\n this.fetcherConfig = fetcherConfig;\n this.indexPathsList = indexPathsList;\n }\n @Override\n public Long call() throws Exception {",
"score": 42.843235407170994
},
{
"filename": "src/test/java/org/tallison/cc/index/FetcherConfigTest.java",
"retrieved_chunk": " assertEquals(BackoffHttpFetcher.class, fetcherConfig.newFetcher().getClass());\n assertEquals(FileSystemEmitter.class, fetcherConfig.newEmitter().getClass());\n }\n @Test\n public void testS3() throws Exception {\n Path p = Paths.get(getClass().getResource(\"/configs/basic-s3.json\").toURI());\n ExtractorConfig fetcherConfig = new ObjectMapper().readValue(p.toFile(), ExtractorConfig.class);\n //TODO -- add actual unit test that tests fetcher and emitter\n assertEquals(S3Fetcher.class, fetcherConfig.newFetcher().getClass());\n assertEquals(S3Emitter.class, fetcherConfig.newEmitter().getClass());",
"score": 34.63509082835452
}
] | java | .targetPathRewriter = fetcherConfig.getTargetPathRewriter(); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.Optional;
import java.util.zip.GZIPInputStream;
import org.apache.commons.codec.binary.Base32;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
import org.netpreserve.jwarc.MediaType;
import org.netpreserve.jwarc.WarcPayload;
import org.netpreserve.jwarc.WarcReader;
import org.netpreserve.jwarc.WarcRecord;
import org.netpreserve.jwarc.WarcResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.tallison.cc.index.io.TargetPathRewriter;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.exception.TikaException;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.pipes.FetchEmitTuple;
import org.apache.tika.pipes.emitter.EmitKey;
import org.apache.tika.pipes.emitter.StreamEmitter;
import org.apache.tika.pipes.fetcher.FetchKey;
import org.apache.tika.pipes.fetcher.RangeFetcher;
public class FileFromCCWarcExtractor {
private static Logger LOGGER =
LoggerFactory.getLogger(FileFromCCWarcExtractor.class);
private static Logger EXTRACTED_LOGGER = LoggerFactory.getLogger("extracted-urls");
private static Logger EXTRACTED_ALL_LOGGER = LoggerFactory.getLogger("extracted-urls-all");
private final StreamEmitter emitter;
private final TargetPathRewriter targetPathRewriter;
private RangeFetcher fetcher;
private final boolean extractTruncated;
private Base32 base32 = new Base32();
private final CCIndexReaderCounter ccIndexReaderCounter;
public FileFromCCWarcExtractor(ExtractorConfig fetcherConfig,
CCIndexReaderCounter ccIndexReaderCounter) throws TikaConfigException {
this.emitter = fetcherConfig.newEmitter();
this.fetcher = (RangeFetcher) fetcherConfig.newFetcher();
this.targetPathRewriter = fetcherConfig.getTargetPathRewriter();
this.extractTruncated = fetcherConfig.isExtractTruncated();
this.ccIndexReaderCounter = ccIndexReaderCounter;
}
public void fetchToPath(CCIndexRecord record) throws InterruptedException {
LOGGER.debug("going to fetch {} {}->{}", record.getFilename(), record.getOffset(),
record.getLength());
FetchEmitTuple t = new FetchEmitTuple(record.getFilename(),
new FetchKey("", record.getFilename(), record.getOffset(),
record.getOffset() + record.getLength() - 1), new EmitKey());
byte[] warcRecordGZBytes;
try {
warcRecordGZBytes = fetchWarcBytes(t);
} catch (TikaException | IOException e) {
LOGGER.warn("couldn't get bytes from cc's warc " + t, e);
return;
}
String id = record.getUrl();
try {
parseWarc(id, record, warcRecordGZBytes);
} catch (IOException e) {
LOGGER.warn("problem parsing warc file", e);
}
}
private void fetchPayload(String id, CCIndexRecord ccIndexRecord, WarcRecord record)
throws IOException {
if (!((record instanceof WarcResponse) &&
record.contentType().base().equals(MediaType.HTTP))) {
return;
}
Optional<WarcPayload> payload = ((WarcResponse) record).payload();
if (!payload.isPresent()) {
LOGGER.debug("no payload {}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
if (payload.get().body().size() == 0) {
LOGGER.debug("empty payload id={}", id);
ccIndexReaderCounter.getEmptyPayload().incrementAndGet();
return;
}
Path tmp = Files.createTempFile("ccfile-fetcher-", "");
try {
Files.copy(payload.get().body().stream(), tmp, StandardCopyOption.REPLACE_EXISTING);
String targetDigest = null;
String base32Sha1 = "";
try (InputStream is = Files.newInputStream(tmp)) {
base32Sha1 = base32.encodeAsString(DigestUtils.sha1(is));
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
if (!base32Sha1.equals(ccIndexRecord.getDigest())) {
LOGGER.warn("Bad digest for url={} ccindex={} sha1={}", id,
ccIndexRecord.getDigest(), base32Sha1);
}
//TODO: make digest and encoding configurable
try (InputStream is = Files.newInputStream(tmp)) {
targetDigest = DigestUtils.sha256Hex(is);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
long length = -1;
try {
length = Files.size(tmp);
} catch (IOException e) {
LOGGER.warn("IOException during digesting: " + tmp.toAbsolutePath());
return;
}
String targetPath = targetPathRewriter.rewrite(targetDigest);
Metadata metadata = new Metadata();
try (InputStream is = TikaInputStream.get(tmp, metadata)) {
emitter.emit(targetPath, is, new Metadata());
logSuccess(ccIndexRecord, targetDigest, length, targetPath);
} catch (IOException | TikaException e) {
LOGGER.warn("problem writing id={}", id, e);
}
} finally {
try {
Files.delete(tmp);
} catch (IOException e) {
LOGGER.warn("can't delete " + tmp.toAbsolutePath(), e);
}
}
}
private void logSuccess(CCIndexRecord ccIndexRecord, String targetDigest, long length,
String targetPath) {
if (extractTruncated) {
EXTRACTED_ALL_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
| ccIndexRecord.getTruncated(), targetDigest, length,
targetPath); |
} else {
//new ObjectArray ?
//url,mime_detected,warc_file,warc_offset,warc_length,sha256,length,path
EXTRACTED_LOGGER.info("", ccIndexRecord.getUrl(),
ccIndexRecord.getNormalizedMime(),
ccIndexRecord.getNormalizedMimeDetected(),
ccIndexRecord.getFilename(),
ccIndexRecord.getOffset(), ccIndexRecord.getLength(),
targetDigest, length,
targetPath);
}
}
private void parseWarc(String id, CCIndexRecord ccIndexRecord, byte[] warcRecordGZBytes)
throws IOException {
//need to leave initial inputstream open while parsing warcrecord
//can't just parse record and return
try (InputStream is = new GZIPInputStream(new ByteArrayInputStream(warcRecordGZBytes))) {
try (WarcReader warcreader = new WarcReader(is)) {
//should be a single warc per file
//return the first
for (WarcRecord warcRecord : warcreader) {
fetchPayload(id, ccIndexRecord, warcRecord);
return;
}
}
}
}
private byte[] fetchWarcBytes(FetchEmitTuple t)
throws TikaException, InterruptedException, IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
FetchKey k = t.getFetchKey();
try (InputStream is = fetcher.fetch(k.getFetchKey(), k.getRangeStart(), k.getRangeEnd(),
new Metadata())) {
IOUtils.copy(is, bos);
}
return bos.toByteArray();
}
}
| src/main/java/org/tallison/cc/index/extractor/FileFromCCWarcExtractor.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " String url = r.getUrl();\n TRUNCATED_URLS_LOGGER.info(\"\", url);\n //url,mime,mime_detected,warc_file,warc_offset,warc_length,truncated\n TRUNCATED_URLS_FULL_LOGGER.info(\"\", url,\n r.getNormalizedMime(), r.getNormalizedMimeDetected(), r.getFilename(),\n r.getOffset(), r.getLength(), r.getTruncated());\n return true;\n }\n }\n private void fetchBytes(CCIndexRecord r) throws InterruptedException {",
"score": 39.738381940911005
},
{
"filename": "src/main/java/org/tallison/cc/index/CCIndexRecord.java",
"retrieved_chunk": " public void setLength(long length) {\n this.length = length;\n }\n public Integer getOffset() {\n return offset;\n }\n public void setOffset(int offset) {\n this.offset = offset;\n }\n public String getFilename() {",
"score": 29.75122825319761
},
{
"filename": "src/main/java/org/tallison/cc/index/CCIndexRecord.java",
"retrieved_chunk": " }\n public String getDigest() {\n return digest;\n }\n public void setDigest(String digest) {\n this.digest = digest;\n }\n public Long getLength() {\n return length;\n }",
"score": 18.069619352838995
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java",
"retrieved_chunk": " if (!fetcherConfig.getRecordSelector().select(r)) {\n return true;\n }\n increment(totalCounts, r.getNormalizedMimeDetected());\n if (!StringUtils.isBlank(r.getTruncated())) {\n long truncated = counter.getTruncated().incrementAndGet();\n if (fetcherConfig.getMaxFilesTruncated() > -1 &&\n truncated >= fetcherConfig.getMaxFilesTruncated()) {\n LOGGER.info(\"hit max truncated files\");\n return false;",
"score": 15.318731813688501
},
{
"filename": "src/main/java/org/tallison/cc/index/CCIndexRecord.java",
"retrieved_chunk": " return \"bytes=\" + offset + \"-\" + (offset + length - 1);\n }\n @Override\n public String toString() {\n return \"CCIndexRecord{\" + \"url='\" + url + '\\'' + \", mime='\" + mime + '\\'' +\n \", mimeDetected='\" + mimeDetected + '\\'' + \", status=\" + status + \", digest='\" +\n digest + '\\'' + \", length=\" + length + \", offset=\" + offset + \", filename='\" +\n filename + '\\'' + \", charset='\" + charset + '\\'' + \", languages='\" + languages +\n '\\'' + \", truncated='\" + truncated + '\\'' + \", redirect='\" + redirect + '\\'' + '}';\n }",
"score": 14.869806914180984
}
] | java | ccIndexRecord.getTruncated(), targetDigest, length,
targetPath); |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tallison.cc.index.extractor;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.zip.GZIPInputStream;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import org.apache.commons.lang3.mutable.MutableLong;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tallison.cc.index.AbstractRecordProcessor;
import org.tallison.cc.index.CCIndexReaderCounter;
import org.tallison.cc.index.CCIndexRecord;
import org.tallison.cc.index.IndexIterator;
import org.apache.tika.exception.TikaConfigException;
import org.apache.tika.exception.TikaException;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.pipes.FetchEmitTuple;
import org.apache.tika.pipes.fetcher.Fetcher;
import org.apache.tika.pipes.pipesiterator.CallablePipesIterator;
import org.apache.tika.pipes.pipesiterator.PipesIterator;
import org.apache.tika.utils.StringUtils;
/**
* This counts mime_detected. Use a regular file selector to include
* only urls that had a 200, e.g.
*/
public class CCMimeCounter {
private static final Long INDEX_WORKER_ID = 1l;
private static final Long INDEX_READER_ID = 2l;
private static final Logger LOGGER = LoggerFactory.getLogger(CCMimeCounter.class);
public static void main(String[] args) throws Exception {
ExtractorConfig fetcherConfig =
new ObjectMapper().readValue(new File(args[0]), ExtractorConfig.class);
execute(fetcherConfig);
}
private static void execute(ExtractorConfig fetcherConfig) throws IOException, TikaException {
ArrayBlockingQueue<FetchEmitTuple> indexPathsList = new ArrayBlockingQueue<>(1000);
//IndexPathsReader reads a file containing a list of cc-index.paths files
//and writes the literal gz files (cc-index/collections/CC-MAIN-2023-06/indexes/cdx-00000.gz)
//to indexPathsList
//IndexWorker reads a single index.gz file at a time and processes each record
//It fetches non truncated files and logs truncated files
int totalThreads = fetcherConfig.getNumThreads() + 1;
ExecutorService executorService = Executors.newFixedThreadPool(totalThreads);
ExecutorCompletionService<Long> executorCompletionService =
new ExecutorCompletionService<>(executorService);
IndexIterator indexIterator = fetcherConfig.getIndexIterator();
indexIterator.initialize(Collections.EMPTY_MAP);
executorCompletionService.submit(new CallablePipesIterator(indexIterator, indexPathsList));
CCIndexReaderCounter counter = new CCIndexReaderCounter();
int finishedWorkers = 0;
List<DetectedMimeCounter> detectedMimeCounters = new ArrayList<>();
try {
for (int i = 0; i < fetcherConfig.getNumThreads(); i++) {
DetectedMimeCounter processor = new DetectedMimeCounter(fetcherConfig, counter);
detectedMimeCounters.add(processor);
executorCompletionService.submit(
new IndexWorker(fetcherConfig, indexPathsList, processor));
}
while (finishedWorkers < totalThreads) {
//blocking
Future<Long> future = executorCompletionService.take();
if (future != null) {
Long f = future.get();
LOGGER.debug("completed {}", f);
if (f.equals(INDEX_WORKER_ID)) {
finishedWorkers++;
} else if (f.equals(INDEX_READER_ID)) {
LOGGER.info("Index paths reader successfully completed");
}
}
}
} catch (TikaConfigException e) {
LOGGER.error("main loop exception", e);
throw new RuntimeException(e);
} catch (ExecutionException e) {
LOGGER.error("main loop exception", e);
throw new RuntimeException(e);
} catch (InterruptedException e) {
LOGGER.warn("main loop interrupted exception", e);
throw new RuntimeException(e);
} finally {
executorService.shutdown();
executorService.shutdownNow();
}
summarize(detectedMimeCounters);
}
private static void summarize(List<DetectedMimeCounter> detectedMimeCounters)
throws IOException {
Map<String, Long> total = new HashMap<>();
Map<String, Long> truncated = new HashMap<>();
Map<String, Long> nonTruncated = new HashMap<>();
for (DetectedMimeCounter c : detectedMimeCounters) {
update(c.totalCounts, total);
update(c.truncatedCounts, truncated);
}
calcNonTruncated(truncated, total, nonTruncated);
report("total", total);
report("truncated", truncated);
report("non-truncated", nonTruncated);
}
private static void calcNonTruncated(Map<String, Long> truncated, Map<String, Long> total,
Map<String, Long> nonTruncated) {
for (Map.Entry<String, Long> e : total.entrySet()) {
Long val = e.getValue();
Long t = truncated.getOrDefault(e.getKey(), 0l);
val -= t;
nonTruncated.put(e.getKey(), val);
}
}
private static void report(String name, Map<String, Long> m) throws IOException {
try (BufferedWriter writer = Files.newBufferedWriter(Paths.get(name + ".csv"),
StandardCharsets.UTF_8)) {
try (CSVPrinter printer = new CSVPrinter(writer, CSVFormat.EXCEL)) {
printer.printRecord("mime", "count");
m.entrySet().stream().sorted(Collections.reverseOrder(Map.Entry.comparingByValue()))
.forEach(e -> {
try {
printer.printRecord(e.getKey(), e.getValue());
} catch (IOException ex) {
throw new RuntimeException(ex);
}
});
}
}
}
private static void update(Map<String, MutableLong> from, Map<String, Long> to) {
for (Map.Entry<String, MutableLong> e : from.entrySet()) {
Long cnt = to.get(e.getKey());
if (cnt == null) {
cnt = 0l;
}
cnt += e.getValue().getValue();
to.put(e.getKey(), cnt);
}
}
private static class IndexWorker implements Callable<Long> {
private final ArrayBlockingQueue<FetchEmitTuple> indexUrls;
private final AbstractRecordProcessor recordProcessor;
private final Fetcher fetcher;
IndexWorker(ExtractorConfig fetcherConfig, ArrayBlockingQueue<FetchEmitTuple> indexUrls,
AbstractRecordProcessor recordProcessor) throws TikaException {
this.indexUrls = indexUrls;
this.recordProcessor = recordProcessor;
this.fetcher = fetcherConfig.newFetcher();
}
@Override
public Long call() throws Exception {
boolean shouldContinue = true;
while (shouldContinue) {
FetchEmitTuple indexUrl = indexUrls.poll(120, TimeUnit.MINUTES);
if (indexUrl == null) {
throw new TimeoutException("waited 120 minutes for a new record");
}
if (indexUrl == PipesIterator.COMPLETED_SEMAPHORE) {
recordProcessor.close();
//can hang forever
indexUrls.put(PipesIterator.COMPLETED_SEMAPHORE);
return INDEX_WORKER_ID;
}
shouldContinue = processFile(indexUrl, recordProcessor);
}
return INDEX_WORKER_ID;
}
private boolean processFile(FetchEmitTuple fetchEmitTuple,
AbstractRecordProcessor recordProcessor)
throws InterruptedException {
long start = System.currentTimeMillis();
LOGGER.info("starting to fetch index gz: {}",
fetchEmitTuple.getFetchKey().getFetchKey());
try (TikaInputStream tis = (TikaInputStream) fetcher.fetch(
fetchEmitTuple.getFetchKey().getFetchKey(), new Metadata())) {
try (InputStream is = new BufferedInputStream(new GZIPInputStream(tis))) {
try (BufferedReader reader = new BufferedReader(
new InputStreamReader(is, StandardCharsets.UTF_8))) {
String line = reader.readLine();
int lines = 0;
long elapsed = System.currentTimeMillis() - start;
LOGGER.info("Finished fetching {} bytes in {} ms for index gz: {}",
String.format(Locale.US, "%,d", tis.getLength()),
String.format(Locale.US, "%,d", elapsed),
fetchEmitTuple.getFetchKey().getFetchKey());
while (line != null) {
LOGGER.trace("about to add a line");
if (StringUtils.isBlank(line)) {
line = reader.readLine();
continue;
}
try {
boolean shouldContinue = recordProcessor.process(line);
if (!shouldContinue) {
return shouldContinue;
}
} catch (IOException e) {
LOGGER.warn("bad json: " + line);
}
lines++;
line = reader.readLine();
}
}
}
} catch (TikaException | IOException e) {
LOGGER.error(
"failed while processing " + fetchEmitTuple.getFetchKey().getFetchKey(), e);
}
long elapsed = System.currentTimeMillis() - start;
LOGGER.info("finished processing index gz in ({}) ms: {}",
String.format(Locale.US, "%,d", elapsed),
fetchEmitTuple.getFetchKey().getFetchKey());
return true;
}
}
private static class DetectedMimeCounter extends AbstractRecordProcessor {
private final ExtractorConfig fetcherConfig;
private final CCIndexReaderCounter counter;
private final Map<String, MutableLong> totalCounts = new HashMap<>();
private final Map<String, MutableLong> truncatedCounts = new HashMap<>();
public DetectedMimeCounter(ExtractorConfig fetcherConfig, CCIndexReaderCounter counter) {
this.fetcherConfig = fetcherConfig;
this.counter = counter;
}
@Override
public boolean process(String json) throws IOException, InterruptedException {
long totalRead = counter.getRecordsRead().incrementAndGet();
if (totalRead % 1000000 == 0) {
LOGGER.info("processed: {}", counter);
}
if (fetcherConfig.getMaxRecords() > -1 && totalRead >= fetcherConfig.getMaxRecords()) {
LOGGER.info("hit max read");
return false;
}
//check for hit max
//return false;
Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);
if (record.isEmpty()) {
//problem already logged
return true;
}
CCIndexRecord r = record.get();
if (!fetcherConfig.getRecordSelector().select(r)) {
return true;
}
increment(totalCounts, r.getNormalizedMimeDetected());
if (!StringUtils.isBlank(r.getTruncated())) {
long truncated = counter.getTruncated().incrementAndGet();
if (fetcherConfig.getMaxFilesTruncated() > -1 &&
truncated >= fetcherConfig.getMaxFilesTruncated()) {
LOGGER.info("hit max truncated files");
return false;
}
increment(truncatedCounts, | r.getNormalizedMimeDetected()); |
return true;
}
return true;
}
private void increment(Map<String, MutableLong> m, String k) {
MutableLong cnt = m.get(k);
if (cnt == null) {
cnt = new MutableLong(1);
m.put(k, cnt);
return;
} else {
cnt.increment();
}
}
@Override
public void close() throws IOException {
}
}
}
| src/main/java/org/tallison/cc/index/extractor/CCMimeCounter.java | tballison-commoncrawl-fetcher-lite-2c106d1 | [
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " if (fetcherConfig.getMaxFilesTruncated() > -1 &&\n truncated >= fetcherConfig.getMaxFilesTruncated()) {\n LOGGER.info(\"hit max truncated files\");\n return false;\n }\n }\n if (fetcherConfig.isExtractTruncated() || StringUtils.isBlank(r.getTruncated())) {\n long extracted = counter.getFilesExtracted().incrementAndGet();\n if (fetcherConfig.getMaxFilesExtracted() > -1 &&\n extracted >= fetcherConfig.getMaxFilesExtracted()) {",
"score": 93.43243061893574
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " //problem already logged\n return true;\n }\n CCIndexRecord r = record.get();\n if (!fetcherConfig.getRecordSelector().select(r)) {\n return true;\n }\n //if truncated, count appropriately and test for limits\n if (!StringUtils.isBlank(r.getTruncated())) {\n long truncated = counter.getTruncated().incrementAndGet();",
"score": 62.07498205521148
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " String url = r.getUrl();\n TRUNCATED_URLS_LOGGER.info(\"\", url);\n //url,mime,mime_detected,warc_file,warc_offset,warc_length,truncated\n TRUNCATED_URLS_FULL_LOGGER.info(\"\", url,\n r.getNormalizedMime(), r.getNormalizedMimeDetected(), r.getFilename(),\n r.getOffset(), r.getLength(), r.getTruncated());\n return true;\n }\n }\n private void fetchBytes(CCIndexRecord r) throws InterruptedException {",
"score": 46.45105304702386
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " LOGGER.info(\"hit max extracted files\");\n return false;\n }\n if (fetcherConfig.isDryRun()) {\n LOGGER.info(\"dry run, but would have extracted {}\", r);\n return true;\n }\n fetchBytes(r);\n return true;\n } else {",
"score": 46.0621435660239
},
{
"filename": "src/main/java/org/tallison/cc/index/extractor/CCFileExtractorRecordProcessor.java",
"retrieved_chunk": " LOGGER.info(\"processed: {}\", counter);\n }\n if (fetcherConfig.getMaxRecords() > -1 && totalRead >= fetcherConfig.getMaxRecords()) {\n LOGGER.info(\"hit max read\");\n return false;\n }\n //check for hit max\n //return false;\n Optional<CCIndexRecord> record = CCIndexRecord.parseRecord(json);\n if (record.isEmpty()) {",
"score": 39.293559879407454
}
] | java | r.getNormalizedMimeDetected()); |
/*
* Copyright © 2023 Deutsche Telekom AG ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.telekom.phonenumbernormalizer;
import de.telekom.phonenumbernormalizer.dto.DeviceContext;
import de.telekom.phonenumbernormalizer.dto.DeviceContextMapper;
import de.telekom.phonenumbernormalizer.numberplans.PhoneLibWrapper;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlan;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlanFactory;
import lombok.RequiredArgsConstructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
/**
* Concrete implementation of {@link PhoneNumberNormalizer} using {@link PhoneLibWrapper} to normalize a number by mitigating some inaccuracies when it comes to number plans of optional NDC and NAC as zero.
* <p>
* Also supports {@link DeviceContext} to enrich a phone number during normalization if the optional NDC is missing.
* </p>
*/
@RequiredArgsConstructor
@Component
public class PhoneNumberNormalizerImpl implements PhoneNumberNormalizer {
private static final Logger LOGGER = LoggerFactory.getLogger(PhoneNumberNormalizerImpl.class);
/**
* Storage for {@link PhoneNumberNormalizer#setFallbackRegionCode(String)}
*/
private String fallbackRegionCode = null;
@Override
public void setFallbackRegionCode(String fallBackRegionCode) {
if (PhoneLibWrapper.getCountryCodeForRegion(fallBackRegionCode) > 0) {
this.fallbackRegionCode = fallBackRegionCode;
} else {
this.fallbackRegionCode = null; //invalid region code!
}
}
/**
* Fallback normalization within the number-plan of the fallback region.
* @param number the original number to be normalized
* @param dialableNumber the original number reduced to dialable digits
* @return E164 formatted phone number or at least a dialable version of the number
*
* @see PhoneNumberNormalizer#setFallbackRegionCode(String)
* @see PhoneNumberNormalizer#normalizePhoneNumber(String, String)
*/
private String fallbackNormalizationFromDeviceContextToDefaultRegionCode(String number, String dialableNumber) {
if (this.fallbackRegionCode == null) {
LOGGER.debug("Fallback Region was set!");
return dialableNumber;
} else {
return this.normalizePhoneNumber(number, this.fallbackRegionCode);
}
}
/**
* Uses wrapper of PhoneLib to identify if special rules apply for normalization.<br/>
* Using device context for enriching the number make it normalizable to E164 format if NDC is optional in the used number plan, but not used in the phone number to be normalized.
* @param wrapper instanced wrapper of PhoneLib
* @param deviceContext information like CC, NDC and {@link de.telekom.phonenumbernormalizer.dto.DeviceContextLineType} from which the number is dialled
* @return E164 formatted phone number or dialable version of it or null
*/
private String normalize(PhoneLibWrapper wrapper, DeviceContext deviceContext) {
// international prefix has been added by PhoneLib even if it's not valid in the number plan.
if (wrapper == null) {
LOGGER.debug("PhoneLipWrapper was not initialized");
return null;
}
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
NumberPlan numberplan = null;
if (deviceContext != null) {
numberplan = NumberPlanFactory | .INSTANCE.getNumberPlan(deviceContext.getLineType(), deviceContext.getCountryCode()); |
}
if (wrapper.isShortNumber(numberplan)) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
if (wrapper.hasRegionNationalAccessCode() && deviceContext != null) {
//Number plan is using a NationalPrefix aka Trunc Code ... so we could add Area Code if not included in the number.
return wrapper.extendNumberByDefaultAreaCodeAndCountryCode(wrapper.getNationalAccessCode(), deviceContext.getNationalDestinationCode());
}
// Number plan is not using NationalPrefix aka Trunc Code ... its also not a short number, so country code can be added:
return wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, String regionCode) {
PhoneLibWrapper wrapper = new PhoneLibWrapper(number, regionCode);
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
if (wrapper.isShortNumber()) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
// international prefix is added by the lib even if it's not valid in the number plan.
//checking if the input number is equal to the nationalNumber based on number plan and trunc code logic.
boolean hasNoCCAndNoNAC = wrapper.hasNoCountryCodeNorNationalAccessCode();
LOGGER.debug("Number has no CC and no NAC: {}.", hasNoCCAndNoNAC);
//if the number is definitely a short number or needs an area code but does not have it, we do not add the country code.
return (hasNoCCAndNoNAC) ?
wrapper.getDialableNumber() : wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, DeviceContext deviceContext) {
// checking if the number has a special format or is not valid at all.
PhoneLibWrapper normalizerPhoneNumber = new PhoneLibWrapper(number, null);
if (! normalizerPhoneNumber.isNormalizingTried()) {
return normalizerPhoneNumber.getDialableNumber();
}
DeviceContext normalizedDeviceContext = DeviceContextMapper.normalized(deviceContext);
if (!normalizedDeviceContext.getCountryCode().equals(DeviceContext.UNKNOWN_VALUE)) {
String regionCode = PhoneLibWrapper.getRegionCodeForCountryCode(normalizedDeviceContext.getCountryCode());
// now working again with the region code
normalizerPhoneNumber = new PhoneLibWrapper(number, regionCode);
if (!normalizedDeviceContext.getNationalDestinationCode().equals(DeviceContext.UNKNOWN_VALUE)) {
// Number needs normalization:
return normalize(normalizerPhoneNumber, deviceContext);
}
// Device Context with CountryCode but without AreaCode ...
if (!(PhoneLibWrapper.UNKNOWN_REGIONCODE.equals(regionCode))) {
return this.normalizePhoneNumber(number, regionCode);
}
}
LOGGER.debug("Normalization based on DeviceContext did not work - falling back to normalization with fallback region.");
return this.fallbackNormalizationFromDeviceContextToDefaultRegionCode(number, normalizerPhoneNumber.getDialableNumber());
}
}
| src/main/java/de/telekom/phonenumbernormalizer/PhoneNumberNormalizerImpl.java | telekom-phonenumber-normalizer-9a1b7ad | [
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n */\n public boolean isShortNumber(NumberPlan numberplan) {\n if (numberplan == null) {\n return this.isShortNumber();\n }\n return numberplan.isMatchingShortNumber(this.getDialableNumber());\n }\n /**",
"score": 33.74523531965447
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/NumberPlanFactory.java",
"retrieved_chunk": " LOGGER.debug(\"no number plan for country available\");\n return null;\n }\n}",
"score": 16.29263419417334
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/NumberPlanFactory.java",
"retrieved_chunk": " */\npublic class NumberPlanFactory {\n private static final Logger LOGGER = LoggerFactory.getLogger(NumberPlanFactory.class);\n public static final NumberPlanFactory INSTANCE = new NumberPlanFactory();\n /**\n * Two-dimensional map - The first key is DeviceContextLineType and second key is the Country Calling Code while the value is a NumberPlan object.\n *\n * @see NumberPlan\n * @see DeviceContextLineType\n */",
"score": 14.719300555433719
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/PhoneNumberAreaLabelImpl.java",
"retrieved_chunk": " try {\n LOGGER.debug(\"init code files\");\n LOGGER.debug(\"read international country codes\");\n this.internationalCountryCodes = this.initResource(countryCodeResource);\n LOGGER.debug(\"read number plans folder\");\n this.areaCodes = new HashMap<>();\n for (Resource res : numberPlanResources) {\n String filename = res.getFilename();\n if (filename!=null) {\n LOGGER.debug(\"read number plan file: {}\", filename);",
"score": 14.580998676854396
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/dto/DeviceContextMapper.java",
"retrieved_chunk": " public static DeviceContext normalized(DeviceContext context) {\n DeviceContext result = new DeviceContextDto();\n if (context==null) {\n result.setLineType(DeviceContextLineType.UNKNOWN);\n result.setCountryCode(DeviceContext.UNKNOWN_VALUE);\n result.setNationalDestinationCode(DeviceContext.UNKNOWN_VALUE);\n } else {\n result.setLineType(normalizeType(context.getLineType()));\n result.setCountryCode(normalizeCountryCode(context.getCountryCode()));\n result.setNationalDestinationCode(normalizeNationalDestinationCode(context.getNationalDestinationCode()));",
"score": 14.47108979012427
}
] | java | .INSTANCE.getNumberPlan(deviceContext.getLineType(), deviceContext.getCountryCode()); |
/*
* Copyright © 2023 Deutsche Telekom AG ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.telekom.phonenumbernormalizer.numberplans;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import de.telekom.phonenumbernormalizer.dto.DeviceContextLineType;
import de.telekom.phonenumbernormalizer.numberplans.constants.DeFixedLineNumberPlan;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.Map;
/**
* A factory to retrieve a NumberPlan for a given line-type and country calling code. Currently supporting:
* <ul>
* <li>German Fixed-Line</li>
* </ul>
*
* @see NumberPlanFactory#getNumberPlan(DeviceContextLineType, String)
*/
public class NumberPlanFactory {
private static final Logger LOGGER = LoggerFactory.getLogger(NumberPlanFactory.class);
public static final NumberPlanFactory INSTANCE = new NumberPlanFactory();
/**
* Two-dimensional map - The first key is DeviceContextLineType and second key is the Country Calling Code while the value is a NumberPlan object.
*
* @see NumberPlan
* @see DeviceContextLineType
*/
private final Map<DeviceContextLineType, Map<String, NumberPlan>> numberPlans = new EnumMap<>(DeviceContextLineType.class);
/**
* Adding all coded NumberPlans to the factory
*/
private NumberPlanFactory() {
this.initFixedLineNumberPlans();
this.initMobileNumberPlans();
this.initFallBackNumberPlans();
}
/**
* Adds coded NumberPlans for fixed-line context to the factory.
*
* @see NumberPlan
* @see DeviceContextLineType#FIXEDLINE
*/
private void initFixedLineNumberPlans() {
Map<String, NumberPlan> fixedLineNumberPlans = new HashMap<>();
fixedLineNumberPlans | .put(DeFixedLineNumberPlan.getCountryCode(), new DeFixedLineNumberPlan()); |
numberPlans.put(DeviceContextLineType.FIXEDLINE, fixedLineNumberPlans);
}
/**
* Adds coded NumberPlans for mobile context to the factory.
*
* @see NumberPlan
* @see DeviceContextLineType#MOBILE
*/
private void initMobileNumberPlans() {
// TODO: Mobile Number Plan
}
/**
* Adds coded NumberPlans for unknown context to the factory. These are just the common rules for mobile and fixed-line, so that they could be applied regardless of which actual line context is used.
*
* @see NumberPlan
* @see DeviceContextLineType#UNKNOWN
*/
private void initFallBackNumberPlans() {
Map<String, NumberPlan> fixedLineNumberPlans = new HashMap<>();
// For Germany all short numbers of the fixed-line are also valid in mobile, so we can reuse it, if unknown.
fixedLineNumberPlans.put(DeFixedLineNumberPlan.getCountryCode(), new DeFixedLineNumberPlan());
numberPlans.put(DeviceContextLineType.UNKNOWN, fixedLineNumberPlans);
}
/**
* Gets a NumberPlan for a line-type of a specific country.
*
* @param numberPlanType line-type where the NumberPlan is valid
* @param countryCode country calling code for which the NumberPlan
* @return {@link NumberPlan} for further checks
*
* @see DeviceContextLineType
* @see NumberPlan#isMatchingShortNumber(String)
*/
public NumberPlan getNumberPlan(DeviceContextLineType numberPlanType, String countryCode) {
if (numberPlans.containsKey(numberPlanType)) {
LOGGER.debug("use number plan for type: {}", numberPlanType);
Map<String, NumberPlan> numberPlan = numberPlans.get(numberPlanType);
if (numberPlan.containsKey(countryCode)) {
LOGGER.debug("use number plan for country code: {}", countryCode);
return numberPlan.get(countryCode);
}
}
LOGGER.debug("no number plan for country available");
return null;
}
}
| src/main/java/de/telekom/phonenumbernormalizer/numberplans/NumberPlanFactory.java | telekom-phonenumber-normalizer-9a1b7ad | [
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/constants/DeFixedLineNumberPlan.java",
"retrieved_chunk": " *\n */\npublic class DeFixedLineNumberPlan extends NumberPlan {\n /**\n * Constant for German Country Calling Code\n */\n private static final String COUNTRY_CODE = \"49\";\n /**\n * Constant for German short numbers in fixed-line\n */",
"score": 28.150035536950366
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/NumberPlan.java",
"retrieved_chunk": " * Additionally, we designed the NumberPlanFactory to have a NumberPlan class for each DeviceContextLineType, so we can support short numbers which are valid only in fixed-line or mobile context.\n * </p>\n * @see NumberPlanFactory\n * @see de.telekom.phonenumbernormalizer.dto.DeviceContextLineType\n */\npublic abstract class NumberPlan {\n private static final Logger LOGGER = LoggerFactory.getLogger(NumberPlan.class);\n /**\n * A subclass needs to provide a Map<String, Integer> as tbe rules to identify short numbers\n * The key (String) is representing a prefix for the number and the value (Integer) is the total length of the short code (including the prefix)",
"score": 21.292782903963726
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/dto/DeviceContextLineType.java",
"retrieved_chunk": "import java.util.List;\nimport java.util.Locale;\n/**\n * The enum values define the line-type in the context of a device involved in the call\n * <p>\n * Currently supported values are for fixed-line, mobile and unknown (also for anything else).\n * </p>\n * @see DeviceContextLineType#FIXEDLINE\n * @see DeviceContextLineType#MOBILE\n * @see DeviceContextLineType#UNKNOWN",
"score": 19.152012024208343
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/dto/DeviceContextLineType.java",
"retrieved_chunk": " */\npublic enum DeviceContextLineType {\n /**\n * If the device from the user is a fixed-line device - like a smart speaker with a DECT connection over a DECT base on a fixed-line access\n *\n * @see DeviceContextLineType#FIXEDLINE_VALUE\n */\n @JsonProperty(DeviceContextLineType.FIXEDLINE_VALUE)\n FIXEDLINE(DeviceContextLineType.FIXEDLINE_VALUE),\n /**",
"score": 17.609230237327253
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/dto/DeviceContextMapper.java",
"retrieved_chunk": " public static DeviceContext normalized(DeviceContext context) {\n DeviceContext result = new DeviceContextDto();\n if (context==null) {\n result.setLineType(DeviceContextLineType.UNKNOWN);\n result.setCountryCode(DeviceContext.UNKNOWN_VALUE);\n result.setNationalDestinationCode(DeviceContext.UNKNOWN_VALUE);\n } else {\n result.setLineType(normalizeType(context.getLineType()));\n result.setCountryCode(normalizeCountryCode(context.getCountryCode()));\n result.setNationalDestinationCode(normalizeNationalDestinationCode(context.getNationalDestinationCode()));",
"score": 15.385509689987433
}
] | java | .put(DeFixedLineNumberPlan.getCountryCode(), new DeFixedLineNumberPlan()); |
/*
* Copyright © 2023 Deutsche Telekom AG ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.telekom.phonenumbernormalizer;
import de.telekom.phonenumbernormalizer.dto.DeviceContext;
import de.telekom.phonenumbernormalizer.dto.DeviceContextMapper;
import de.telekom.phonenumbernormalizer.numberplans.PhoneLibWrapper;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlan;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlanFactory;
import lombok.RequiredArgsConstructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
/**
* Concrete implementation of {@link PhoneNumberNormalizer} using {@link PhoneLibWrapper} to normalize a number by mitigating some inaccuracies when it comes to number plans of optional NDC and NAC as zero.
* <p>
* Also supports {@link DeviceContext} to enrich a phone number during normalization if the optional NDC is missing.
* </p>
*/
@RequiredArgsConstructor
@Component
public class PhoneNumberNormalizerImpl implements PhoneNumberNormalizer {
private static final Logger LOGGER = LoggerFactory.getLogger(PhoneNumberNormalizerImpl.class);
/**
* Storage for {@link PhoneNumberNormalizer#setFallbackRegionCode(String)}
*/
private String fallbackRegionCode = null;
@Override
public void setFallbackRegionCode(String fallBackRegionCode) {
if (PhoneLibWrapper.getCountryCodeForRegion(fallBackRegionCode) > 0) {
this.fallbackRegionCode = fallBackRegionCode;
} else {
this.fallbackRegionCode = null; //invalid region code!
}
}
/**
* Fallback normalization within the number-plan of the fallback region.
* @param number the original number to be normalized
* @param dialableNumber the original number reduced to dialable digits
* @return E164 formatted phone number or at least a dialable version of the number
*
* @see PhoneNumberNormalizer#setFallbackRegionCode(String)
* @see PhoneNumberNormalizer#normalizePhoneNumber(String, String)
*/
private String fallbackNormalizationFromDeviceContextToDefaultRegionCode(String number, String dialableNumber) {
if (this.fallbackRegionCode == null) {
LOGGER.debug("Fallback Region was set!");
return dialableNumber;
} else {
return this.normalizePhoneNumber(number, this.fallbackRegionCode);
}
}
/**
* Uses wrapper of PhoneLib to identify if special rules apply for normalization.<br/>
* Using device context for enriching the number make it normalizable to E164 format if NDC is optional in the used number plan, but not used in the phone number to be normalized.
* @param wrapper instanced wrapper of PhoneLib
* @param deviceContext information like CC, NDC and {@link de.telekom.phonenumbernormalizer.dto.DeviceContextLineType} from which the number is dialled
* @return E164 formatted phone number or dialable version of it or null
*/
private String normalize(PhoneLibWrapper wrapper, DeviceContext deviceContext) {
// international prefix has been added by PhoneLib even if it's not valid in the number plan.
if (wrapper == null) {
LOGGER.debug("PhoneLipWrapper was not initialized");
return null;
}
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
NumberPlan numberplan = null;
if (deviceContext != null) {
numberplan = NumberPlanFactory.INSTANCE.getNumberPlan(deviceContext.getLineType | (), deviceContext.getCountryCode()); |
}
if (wrapper.isShortNumber(numberplan)) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
if (wrapper.hasRegionNationalAccessCode() && deviceContext != null) {
//Number plan is using a NationalPrefix aka Trunc Code ... so we could add Area Code if not included in the number.
return wrapper.extendNumberByDefaultAreaCodeAndCountryCode(wrapper.getNationalAccessCode(), deviceContext.getNationalDestinationCode());
}
// Number plan is not using NationalPrefix aka Trunc Code ... its also not a short number, so country code can be added:
return wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, String regionCode) {
PhoneLibWrapper wrapper = new PhoneLibWrapper(number, regionCode);
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
if (wrapper.isShortNumber()) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
// international prefix is added by the lib even if it's not valid in the number plan.
//checking if the input number is equal to the nationalNumber based on number plan and trunc code logic.
boolean hasNoCCAndNoNAC = wrapper.hasNoCountryCodeNorNationalAccessCode();
LOGGER.debug("Number has no CC and no NAC: {}.", hasNoCCAndNoNAC);
//if the number is definitely a short number or needs an area code but does not have it, we do not add the country code.
return (hasNoCCAndNoNAC) ?
wrapper.getDialableNumber() : wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, DeviceContext deviceContext) {
// checking if the number has a special format or is not valid at all.
PhoneLibWrapper normalizerPhoneNumber = new PhoneLibWrapper(number, null);
if (! normalizerPhoneNumber.isNormalizingTried()) {
return normalizerPhoneNumber.getDialableNumber();
}
DeviceContext normalizedDeviceContext = DeviceContextMapper.normalized(deviceContext);
if (!normalizedDeviceContext.getCountryCode().equals(DeviceContext.UNKNOWN_VALUE)) {
String regionCode = PhoneLibWrapper.getRegionCodeForCountryCode(normalizedDeviceContext.getCountryCode());
// now working again with the region code
normalizerPhoneNumber = new PhoneLibWrapper(number, regionCode);
if (!normalizedDeviceContext.getNationalDestinationCode().equals(DeviceContext.UNKNOWN_VALUE)) {
// Number needs normalization:
return normalize(normalizerPhoneNumber, deviceContext);
}
// Device Context with CountryCode but without AreaCode ...
if (!(PhoneLibWrapper.UNKNOWN_REGIONCODE.equals(regionCode))) {
return this.normalizePhoneNumber(number, regionCode);
}
}
LOGGER.debug("Normalization based on DeviceContext did not work - falling back to normalization with fallback region.");
return this.fallbackNormalizationFromDeviceContextToDefaultRegionCode(number, normalizerPhoneNumber.getDialableNumber());
}
}
| src/main/java/de/telekom/phonenumbernormalizer/PhoneNumberNormalizerImpl.java | telekom-phonenumber-normalizer-9a1b7ad | [
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n */\n public boolean isShortNumber(NumberPlan numberplan) {\n if (numberplan == null) {\n return this.isShortNumber();\n }\n return numberplan.isMatchingShortNumber(this.getDialableNumber());\n }\n /**",
"score": 33.74523531965447
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/NumberPlanFactory.java",
"retrieved_chunk": " LOGGER.debug(\"no number plan for country available\");\n return null;\n }\n}",
"score": 16.29263419417334
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/NumberPlanFactory.java",
"retrieved_chunk": " */\npublic class NumberPlanFactory {\n private static final Logger LOGGER = LoggerFactory.getLogger(NumberPlanFactory.class);\n public static final NumberPlanFactory INSTANCE = new NumberPlanFactory();\n /**\n * Two-dimensional map - The first key is DeviceContextLineType and second key is the Country Calling Code while the value is a NumberPlan object.\n *\n * @see NumberPlan\n * @see DeviceContextLineType\n */",
"score": 14.719300555433719
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/PhoneNumberAreaLabelImpl.java",
"retrieved_chunk": " try {\n LOGGER.debug(\"init code files\");\n LOGGER.debug(\"read international country codes\");\n this.internationalCountryCodes = this.initResource(countryCodeResource);\n LOGGER.debug(\"read number plans folder\");\n this.areaCodes = new HashMap<>();\n for (Resource res : numberPlanResources) {\n String filename = res.getFilename();\n if (filename!=null) {\n LOGGER.debug(\"read number plan file: {}\", filename);",
"score": 14.580998676854396
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/dto/DeviceContextMapper.java",
"retrieved_chunk": " public static DeviceContext normalized(DeviceContext context) {\n DeviceContext result = new DeviceContextDto();\n if (context==null) {\n result.setLineType(DeviceContextLineType.UNKNOWN);\n result.setCountryCode(DeviceContext.UNKNOWN_VALUE);\n result.setNationalDestinationCode(DeviceContext.UNKNOWN_VALUE);\n } else {\n result.setLineType(normalizeType(context.getLineType()));\n result.setCountryCode(normalizeCountryCode(context.getCountryCode()));\n result.setNationalDestinationCode(normalizeNationalDestinationCode(context.getNationalDestinationCode()));",
"score": 14.47108979012427
}
] | java | (), deviceContext.getCountryCode()); |
/*
* Copyright © 2023 Deutsche Telekom AG ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.telekom.phonenumbernormalizer;
import de.telekom.phonenumbernormalizer.dto.DeviceContext;
import de.telekom.phonenumbernormalizer.dto.DeviceContextMapper;
import de.telekom.phonenumbernormalizer.numberplans.PhoneLibWrapper;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlan;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlanFactory;
import lombok.RequiredArgsConstructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
/**
* Concrete implementation of {@link PhoneNumberNormalizer} using {@link PhoneLibWrapper} to normalize a number by mitigating some inaccuracies when it comes to number plans of optional NDC and NAC as zero.
* <p>
* Also supports {@link DeviceContext} to enrich a phone number during normalization if the optional NDC is missing.
* </p>
*/
@RequiredArgsConstructor
@Component
public class PhoneNumberNormalizerImpl implements PhoneNumberNormalizer {
private static final Logger LOGGER = LoggerFactory.getLogger(PhoneNumberNormalizerImpl.class);
/**
* Storage for {@link PhoneNumberNormalizer#setFallbackRegionCode(String)}
*/
private String fallbackRegionCode = null;
@Override
public void setFallbackRegionCode(String fallBackRegionCode) {
if (PhoneLibWrapper.getCountryCodeForRegion(fallBackRegionCode) > 0) {
this.fallbackRegionCode = fallBackRegionCode;
} else {
this.fallbackRegionCode = null; //invalid region code!
}
}
/**
* Fallback normalization within the number-plan of the fallback region.
* @param number the original number to be normalized
* @param dialableNumber the original number reduced to dialable digits
* @return E164 formatted phone number or at least a dialable version of the number
*
* @see PhoneNumberNormalizer#setFallbackRegionCode(String)
* @see PhoneNumberNormalizer#normalizePhoneNumber(String, String)
*/
private String fallbackNormalizationFromDeviceContextToDefaultRegionCode(String number, String dialableNumber) {
if (this.fallbackRegionCode == null) {
LOGGER.debug("Fallback Region was set!");
return dialableNumber;
} else {
return this.normalizePhoneNumber(number, this.fallbackRegionCode);
}
}
/**
* Uses wrapper of PhoneLib to identify if special rules apply for normalization.<br/>
* Using device context for enriching the number make it normalizable to E164 format if NDC is optional in the used number plan, but not used in the phone number to be normalized.
* @param wrapper instanced wrapper of PhoneLib
* @param deviceContext information like CC, NDC and {@link de.telekom.phonenumbernormalizer.dto.DeviceContextLineType} from which the number is dialled
* @return E164 formatted phone number or dialable version of it or null
*/
private String normalize(PhoneLibWrapper wrapper, DeviceContext deviceContext) {
// international prefix has been added by PhoneLib even if it's not valid in the number plan.
if (wrapper == null) {
LOGGER.debug("PhoneLipWrapper was not initialized");
return null;
}
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
NumberPlan numberplan = null;
if (deviceContext != null) {
numberplan = NumberPlanFactory.INSTANCE.getNumberPlan(deviceContext.getLineType(), deviceContext.getCountryCode());
}
if (wrapper.isShortNumber(numberplan)) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
if (wrapper.hasRegionNationalAccessCode() && deviceContext != null) {
//Number plan is using a NationalPrefix aka Trunc Code ... so we could add Area Code if not included in the number.
return wrapper.extendNumberByDefaultAreaCodeAndCountryCode(wrapper.getNationalAccessCode(), | deviceContext.getNationalDestinationCode()); |
}
// Number plan is not using NationalPrefix aka Trunc Code ... its also not a short number, so country code can be added:
return wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, String regionCode) {
PhoneLibWrapper wrapper = new PhoneLibWrapper(number, regionCode);
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
if (wrapper.isShortNumber()) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
// international prefix is added by the lib even if it's not valid in the number plan.
//checking if the input number is equal to the nationalNumber based on number plan and trunc code logic.
boolean hasNoCCAndNoNAC = wrapper.hasNoCountryCodeNorNationalAccessCode();
LOGGER.debug("Number has no CC and no NAC: {}.", hasNoCCAndNoNAC);
//if the number is definitely a short number or needs an area code but does not have it, we do not add the country code.
return (hasNoCCAndNoNAC) ?
wrapper.getDialableNumber() : wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, DeviceContext deviceContext) {
// checking if the number has a special format or is not valid at all.
PhoneLibWrapper normalizerPhoneNumber = new PhoneLibWrapper(number, null);
if (! normalizerPhoneNumber.isNormalizingTried()) {
return normalizerPhoneNumber.getDialableNumber();
}
DeviceContext normalizedDeviceContext = DeviceContextMapper.normalized(deviceContext);
if (!normalizedDeviceContext.getCountryCode().equals(DeviceContext.UNKNOWN_VALUE)) {
String regionCode = PhoneLibWrapper.getRegionCodeForCountryCode(normalizedDeviceContext.getCountryCode());
// now working again with the region code
normalizerPhoneNumber = new PhoneLibWrapper(number, regionCode);
if (!normalizedDeviceContext.getNationalDestinationCode().equals(DeviceContext.UNKNOWN_VALUE)) {
// Number needs normalization:
return normalize(normalizerPhoneNumber, deviceContext);
}
// Device Context with CountryCode but without AreaCode ...
if (!(PhoneLibWrapper.UNKNOWN_REGIONCODE.equals(regionCode))) {
return this.normalizePhoneNumber(number, regionCode);
}
}
LOGGER.debug("Normalization based on DeviceContext did not work - falling back to normalization with fallback region.");
return this.fallbackNormalizationFromDeviceContextToDefaultRegionCode(number, normalizerPhoneNumber.getDialableNumber());
}
}
| src/main/java/de/telekom/phonenumbernormalizer/PhoneNumberNormalizerImpl.java | telekom-phonenumber-normalizer-9a1b7ad | [
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " //it seems we have nationalnumber with national prefix, so we could add country code:\n return getE164Formatted();\n }\n }\n /**\n * Some Special dial-able characters make a number either not necessary to be normalized (\"+\" is already normalized) or can't be normalized (\"*\" control codes)\n * @param value phone number representation\n * @return if phone number starts with special characters which makes normalization unable / not necessary\n */\n static boolean isSpecialFormat(String value) {",
"score": 36.76550375754628
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n */\n public boolean isShortNumber(NumberPlan numberplan) {\n if (numberplan == null) {\n return this.isShortNumber();\n }\n return numberplan.isMatchingShortNumber(this.getDialableNumber());\n }\n /**",
"score": 32.00877705895958
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " /**\n * Storing if PhoneLib has been used to parse the given number into semiNormalizedNumber.\n *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n * @see PhoneLibWrapper#semiNormalizedNumber\n * @see PhoneLibWrapper#isNormalizingTried()\n */\n private boolean isNormalizingTried = false;\n /**\n * Initialize the wrapper by giving a phone number to be analyzed against a number plan of a given region",
"score": 30.4362488623633
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n */\n public String getE164Formatted() {\n return phoneUtil.format(this.semiNormalizedNumber, PhoneNumberUtil.PhoneNumberFormat.E164);\n }\n /**\n * If we know the given region for the given number {@link PhoneLibWrapper#hasRegionNationalAccessCode()}, this method checks if the given number does not start with a NAC nor a CC,\n * so we could permanently add a default NDC and NAC to the given number and for this new value the method directly return a E164 formatted representation.\n * @param nationalAccessCode the NAC to be added e.g. for Germany it would be \"0\"",
"score": 28.479901786827643
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " try {\n this.semiNormalizedNumber = phoneUtil.parse(extendedNumber, regionCode);\n // after area code has been added, we can add the country code by the lib:\n return getE164Formatted();\n } catch (NumberParseException e) {\n LOGGER.warn(\"could not parse extended number: {}\", extendedNumber);\n LOGGER.debug(\"{}\", e.getMessage());\n return dialableNumber;\n }\n } else {",
"score": 28.28183156330121
}
] | java | deviceContext.getNationalDestinationCode()); |
/*
* Copyright © 2023 Deutsche Telekom AG ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.telekom.phonenumbernormalizer;
import de.telekom.phonenumbernormalizer.dto.DeviceContext;
import de.telekom.phonenumbernormalizer.dto.DeviceContextMapper;
import de.telekom.phonenumbernormalizer.numberplans.PhoneLibWrapper;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlan;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlanFactory;
import lombok.RequiredArgsConstructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
/**
* Concrete implementation of {@link PhoneNumberNormalizer} using {@link PhoneLibWrapper} to normalize a number by mitigating some inaccuracies when it comes to number plans of optional NDC and NAC as zero.
* <p>
* Also supports {@link DeviceContext} to enrich a phone number during normalization if the optional NDC is missing.
* </p>
*/
@RequiredArgsConstructor
@Component
public class PhoneNumberNormalizerImpl implements PhoneNumberNormalizer {
private static final Logger LOGGER = LoggerFactory.getLogger(PhoneNumberNormalizerImpl.class);
/**
* Storage for {@link PhoneNumberNormalizer#setFallbackRegionCode(String)}
*/
private String fallbackRegionCode = null;
@Override
public void setFallbackRegionCode(String fallBackRegionCode) {
if (PhoneLibWrapper.getCountryCodeForRegion(fallBackRegionCode) > 0) {
this.fallbackRegionCode = fallBackRegionCode;
} else {
this.fallbackRegionCode = null; //invalid region code!
}
}
/**
* Fallback normalization within the number-plan of the fallback region.
* @param number the original number to be normalized
* @param dialableNumber the original number reduced to dialable digits
* @return E164 formatted phone number or at least a dialable version of the number
*
* @see PhoneNumberNormalizer#setFallbackRegionCode(String)
* @see PhoneNumberNormalizer#normalizePhoneNumber(String, String)
*/
private String fallbackNormalizationFromDeviceContextToDefaultRegionCode(String number, String dialableNumber) {
if (this.fallbackRegionCode == null) {
LOGGER.debug("Fallback Region was set!");
return dialableNumber;
} else {
return this.normalizePhoneNumber(number, this.fallbackRegionCode);
}
}
/**
* Uses wrapper of PhoneLib to identify if special rules apply for normalization.<br/>
* Using device context for enriching the number make it normalizable to E164 format if NDC is optional in the used number plan, but not used in the phone number to be normalized.
* @param wrapper instanced wrapper of PhoneLib
* @param deviceContext information like CC, NDC and {@link de.telekom.phonenumbernormalizer.dto.DeviceContextLineType} from which the number is dialled
* @return E164 formatted phone number or dialable version of it or null
*/
private String normalize(PhoneLibWrapper wrapper, DeviceContext deviceContext) {
// international prefix has been added by PhoneLib even if it's not valid in the number plan.
if (wrapper == null) {
LOGGER.debug("PhoneLipWrapper was not initialized");
return null;
}
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
NumberPlan numberplan = null;
if (deviceContext != null) {
numberplan = NumberPlanFactory.INSTANCE.getNumberPlan(deviceContext.getLineType(), deviceContext.getCountryCode());
}
if (wrapper.isShortNumber(numberplan)) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
if (wrapper.hasRegionNationalAccessCode() && deviceContext != null) {
//Number plan is using a NationalPrefix aka Trunc Code ... so we could add Area Code if not included in the number.
return wrapper.extendNumberByDefaultAreaCodeAndCountryCode(wrapper.getNationalAccessCode(), deviceContext.getNationalDestinationCode());
}
// Number plan is not using NationalPrefix aka Trunc Code ... its also not a short number, so country code can be added:
return wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, String regionCode) {
PhoneLibWrapper wrapper = new PhoneLibWrapper(number, regionCode);
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
if (wrapper.isShortNumber()) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
// international prefix is added by the lib even if it's not valid in the number plan.
//checking if the input number is equal to the nationalNumber based on number plan and trunc code logic.
boolean hasNoCCAndNoNAC = wrapper.hasNoCountryCodeNorNationalAccessCode();
LOGGER.debug("Number has no CC and no NAC: {}.", hasNoCCAndNoNAC);
//if the number is definitely a short number or needs an area code but does not have it, we do not add the country code.
return (hasNoCCAndNoNAC) ?
wrapper.getDialableNumber() : wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, DeviceContext deviceContext) {
// checking if the number has a special format or is not valid at all.
PhoneLibWrapper normalizerPhoneNumber = new PhoneLibWrapper(number, null);
if (! normalizerPhoneNumber.isNormalizingTried()) {
return normalizerPhoneNumber.getDialableNumber();
}
DeviceContext normalizedDeviceContext = DeviceContextMapper.normalized(deviceContext);
if | (!normalizedDeviceContext.getCountryCode().equals(DeviceContext.UNKNOWN_VALUE)) { |
String regionCode = PhoneLibWrapper.getRegionCodeForCountryCode(normalizedDeviceContext.getCountryCode());
// now working again with the region code
normalizerPhoneNumber = new PhoneLibWrapper(number, regionCode);
if (!normalizedDeviceContext.getNationalDestinationCode().equals(DeviceContext.UNKNOWN_VALUE)) {
// Number needs normalization:
return normalize(normalizerPhoneNumber, deviceContext);
}
// Device Context with CountryCode but without AreaCode ...
if (!(PhoneLibWrapper.UNKNOWN_REGIONCODE.equals(regionCode))) {
return this.normalizePhoneNumber(number, regionCode);
}
}
LOGGER.debug("Normalization based on DeviceContext did not work - falling back to normalization with fallback region.");
return this.fallbackNormalizationFromDeviceContextToDefaultRegionCode(number, normalizerPhoneNumber.getDialableNumber());
}
}
| src/main/java/de/telekom/phonenumbernormalizer/PhoneNumberNormalizerImpl.java | telekom-phonenumber-normalizer-9a1b7ad | [
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/PhoneNumberNormalizer.java",
"retrieved_chunk": " */\n String normalizePhoneNumber(String number, DeviceContext deviceContext);\n /**\n * Normalizes the number using PhoneLib with some additions to compensate.\n * <p>\n * Not as powerful as {@link PhoneNumberNormalizer#normalizePhoneNumber(String, DeviceContext)}, because no default NDC can be set.\n * </p>\n * @param number plain number to normalize\n * @param regionCode ISO2 code of the country, which number-plan is used for normalization\n * @return E164 formatted phone number or at least a dialable version of the number",
"score": 26.282384567934017
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " /**\n * If PhoneLib has been used to parse the given number into semiNormalizedNumber.\n *\n * @return {@link PhoneLibWrapper#isNormalizingTried}\n *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n */\n public boolean isNormalizingTried() {\n return isNormalizingTried;\n }",
"score": 24.337573881920676
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/dto/DeviceContextMapper.java",
"retrieved_chunk": " public static DeviceContext normalized(DeviceContext context) {\n DeviceContext result = new DeviceContextDto();\n if (context==null) {\n result.setLineType(DeviceContextLineType.UNKNOWN);\n result.setCountryCode(DeviceContext.UNKNOWN_VALUE);\n result.setNationalDestinationCode(DeviceContext.UNKNOWN_VALUE);\n } else {\n result.setLineType(normalizeType(context.getLineType()));\n result.setCountryCode(normalizeCountryCode(context.getCountryCode()));\n result.setNationalDestinationCode(normalizeNationalDestinationCode(context.getNationalDestinationCode()));",
"score": 23.881483764947234
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " /**\n * Storing if PhoneLib has been used to parse the given number into semiNormalizedNumber.\n *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n * @see PhoneLibWrapper#semiNormalizedNumber\n * @see PhoneLibWrapper#isNormalizingTried()\n */\n private boolean isNormalizingTried = false;\n /**\n * Initialize the wrapper by giving a phone number to be analyzed against a number plan of a given region",
"score": 22.14975941166715
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n */\n public boolean isShortNumber(NumberPlan numberplan) {\n if (numberplan == null) {\n return this.isShortNumber();\n }\n return numberplan.isMatchingShortNumber(this.getDialableNumber());\n }\n /**",
"score": 20.77919573296747
}
] | java | (!normalizedDeviceContext.getCountryCode().equals(DeviceContext.UNKNOWN_VALUE)) { |
/*
* Copyright © 2023 Deutsche Telekom AG ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.telekom.phonenumbernormalizer;
import de.telekom.phonenumbernormalizer.dto.DeviceContext;
import de.telekom.phonenumbernormalizer.dto.DeviceContextMapper;
import de.telekom.phonenumbernormalizer.numberplans.PhoneLibWrapper;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlan;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlanFactory;
import lombok.RequiredArgsConstructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
/**
* Concrete implementation of {@link PhoneNumberNormalizer} using {@link PhoneLibWrapper} to normalize a number by mitigating some inaccuracies when it comes to number plans of optional NDC and NAC as zero.
* <p>
* Also supports {@link DeviceContext} to enrich a phone number during normalization if the optional NDC is missing.
* </p>
*/
@RequiredArgsConstructor
@Component
public class PhoneNumberNormalizerImpl implements PhoneNumberNormalizer {
private static final Logger LOGGER = LoggerFactory.getLogger(PhoneNumberNormalizerImpl.class);
/**
* Storage for {@link PhoneNumberNormalizer#setFallbackRegionCode(String)}
*/
private String fallbackRegionCode = null;
@Override
public void setFallbackRegionCode(String fallBackRegionCode) {
if (PhoneLibWrapper.getCountryCodeForRegion(fallBackRegionCode) > 0) {
this.fallbackRegionCode = fallBackRegionCode;
} else {
this.fallbackRegionCode = null; //invalid region code!
}
}
/**
* Fallback normalization within the number-plan of the fallback region.
* @param number the original number to be normalized
* @param dialableNumber the original number reduced to dialable digits
* @return E164 formatted phone number or at least a dialable version of the number
*
* @see PhoneNumberNormalizer#setFallbackRegionCode(String)
* @see PhoneNumberNormalizer#normalizePhoneNumber(String, String)
*/
private String fallbackNormalizationFromDeviceContextToDefaultRegionCode(String number, String dialableNumber) {
if (this.fallbackRegionCode == null) {
LOGGER.debug("Fallback Region was set!");
return dialableNumber;
} else {
return this.normalizePhoneNumber(number, this.fallbackRegionCode);
}
}
/**
* Uses wrapper of PhoneLib to identify if special rules apply for normalization.<br/>
* Using device context for enriching the number make it normalizable to E164 format if NDC is optional in the used number plan, but not used in the phone number to be normalized.
* @param wrapper instanced wrapper of PhoneLib
* @param deviceContext information like CC, NDC and {@link de.telekom.phonenumbernormalizer.dto.DeviceContextLineType} from which the number is dialled
* @return E164 formatted phone number or dialable version of it or null
*/
private String normalize(PhoneLibWrapper wrapper, DeviceContext deviceContext) {
// international prefix has been added by PhoneLib even if it's not valid in the number plan.
if (wrapper == null) {
LOGGER.debug("PhoneLipWrapper was not initialized");
return null;
}
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
NumberPlan numberplan = null;
if (deviceContext != null) {
numberplan = NumberPlanFactory.INSTANCE.getNumberPlan(deviceContext.getLineType(), deviceContext.getCountryCode());
}
if (wrapper.isShortNumber(numberplan)) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
if (wrapper.hasRegionNationalAccessCode() && deviceContext != null) {
//Number plan is using a NationalPrefix aka Trunc Code ... so we could add Area Code if not included in the number.
return wrapper.extendNumberByDefaultAreaCodeAndCountryCode(wrapper.getNationalAccessCode(), deviceContext.getNationalDestinationCode());
}
// Number plan is not using NationalPrefix aka Trunc Code ... its also not a short number, so country code can be added:
return wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, String regionCode) {
PhoneLibWrapper wrapper = new PhoneLibWrapper(number, regionCode);
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
if (wrapper.isShortNumber()) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
// international prefix is added by the lib even if it's not valid in the number plan.
//checking if the input number is equal to the nationalNumber based on number plan and trunc code logic.
boolean hasNoCCAndNoNAC = wrapper.hasNoCountryCodeNorNationalAccessCode();
LOGGER.debug("Number has no CC and no NAC: {}.", hasNoCCAndNoNAC);
//if the number is definitely a short number or needs an area code but does not have it, we do not add the country code.
return (hasNoCCAndNoNAC) ?
wrapper.getDialableNumber() : wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, DeviceContext deviceContext) {
// checking if the number has a special format or is not valid at all.
PhoneLibWrapper normalizerPhoneNumber = new PhoneLibWrapper(number, null);
if (! normalizerPhoneNumber.isNormalizingTried()) {
return normalizerPhoneNumber.getDialableNumber();
}
DeviceContext normalizedDeviceContext = DeviceContextMapper.normalized(deviceContext);
if (!normalizedDeviceContext.getCountryCode().equals(DeviceContext.UNKNOWN_VALUE)) {
String regionCode | = PhoneLibWrapper.getRegionCodeForCountryCode(normalizedDeviceContext.getCountryCode()); |
// now working again with the region code
normalizerPhoneNumber = new PhoneLibWrapper(number, regionCode);
if (!normalizedDeviceContext.getNationalDestinationCode().equals(DeviceContext.UNKNOWN_VALUE)) {
// Number needs normalization:
return normalize(normalizerPhoneNumber, deviceContext);
}
// Device Context with CountryCode but without AreaCode ...
if (!(PhoneLibWrapper.UNKNOWN_REGIONCODE.equals(regionCode))) {
return this.normalizePhoneNumber(number, regionCode);
}
}
LOGGER.debug("Normalization based on DeviceContext did not work - falling back to normalization with fallback region.");
return this.fallbackNormalizationFromDeviceContextToDefaultRegionCode(number, normalizerPhoneNumber.getDialableNumber());
}
}
| src/main/java/de/telekom/phonenumbernormalizer/PhoneNumberNormalizerImpl.java | telekom-phonenumber-normalizer-9a1b7ad | [
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " /**\n * If PhoneLib has been used to parse the given number into semiNormalizedNumber.\n *\n * @return {@link PhoneLibWrapper#isNormalizingTried}\n *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n */\n public boolean isNormalizingTried() {\n return isNormalizingTried;\n }",
"score": 29.45323342463658
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/PhoneNumberNormalizer.java",
"retrieved_chunk": " */\n String normalizePhoneNumber(String number, DeviceContext deviceContext);\n /**\n * Normalizes the number using PhoneLib with some additions to compensate.\n * <p>\n * Not as powerful as {@link PhoneNumberNormalizer#normalizePhoneNumber(String, DeviceContext)}, because no default NDC can be set.\n * </p>\n * @param number plain number to normalize\n * @param regionCode ISO2 code of the country, which number-plan is used for normalization\n * @return E164 formatted phone number or at least a dialable version of the number",
"score": 29.393257838144493
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " /**\n * Storing if PhoneLib has been used to parse the given number into semiNormalizedNumber.\n *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n * @see PhoneLibWrapper#semiNormalizedNumber\n * @see PhoneLibWrapper#isNormalizingTried()\n */\n private boolean isNormalizingTried = false;\n /**\n * Initialize the wrapper by giving a phone number to be analyzed against a number plan of a given region",
"score": 26.94213112851205
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/dto/DeviceContextMapper.java",
"retrieved_chunk": " public static DeviceContext normalized(DeviceContext context) {\n DeviceContext result = new DeviceContextDto();\n if (context==null) {\n result.setLineType(DeviceContextLineType.UNKNOWN);\n result.setCountryCode(DeviceContext.UNKNOWN_VALUE);\n result.setNationalDestinationCode(DeviceContext.UNKNOWN_VALUE);\n } else {\n result.setLineType(normalizeType(context.getLineType()));\n result.setCountryCode(normalizeCountryCode(context.getCountryCode()));\n result.setNationalDestinationCode(normalizeNationalDestinationCode(context.getNationalDestinationCode()));",
"score": 26.589631795097297
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " String dialableNumber;\n /**\n * The given number normalized with PhoneLib, risking we get a incorrect normalization\n *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n * @see PhoneLibWrapper#isNormalizingTried()\n * @see PhoneLibWrapper#getSemiNormalizedNumber()\n */\n Phonenumber.PhoneNumber semiNormalizedNumber;\n /**",
"score": 26.109301344306452
}
] | java | = PhoneLibWrapper.getRegionCodeForCountryCode(normalizedDeviceContext.getCountryCode()); |
/*
* Copyright © 2023 Deutsche Telekom AG ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.telekom.phonenumbernormalizer;
import de.telekom.phonenumbernormalizer.dto.DeviceContext;
import de.telekom.phonenumbernormalizer.dto.DeviceContextMapper;
import de.telekom.phonenumbernormalizer.numberplans.PhoneLibWrapper;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlan;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlanFactory;
import lombok.RequiredArgsConstructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
/**
* Concrete implementation of {@link PhoneNumberNormalizer} using {@link PhoneLibWrapper} to normalize a number by mitigating some inaccuracies when it comes to number plans of optional NDC and NAC as zero.
* <p>
* Also supports {@link DeviceContext} to enrich a phone number during normalization if the optional NDC is missing.
* </p>
*/
@RequiredArgsConstructor
@Component
public class PhoneNumberNormalizerImpl implements PhoneNumberNormalizer {
private static final Logger LOGGER = LoggerFactory.getLogger(PhoneNumberNormalizerImpl.class);
/**
* Storage for {@link PhoneNumberNormalizer#setFallbackRegionCode(String)}
*/
private String fallbackRegionCode = null;
@Override
public void setFallbackRegionCode(String fallBackRegionCode) {
if (PhoneLibWrapper.getCountryCodeForRegion(fallBackRegionCode) > 0) {
this.fallbackRegionCode = fallBackRegionCode;
} else {
this.fallbackRegionCode = null; //invalid region code!
}
}
/**
* Fallback normalization within the number-plan of the fallback region.
* @param number the original number to be normalized
* @param dialableNumber the original number reduced to dialable digits
* @return E164 formatted phone number or at least a dialable version of the number
*
* @see PhoneNumberNormalizer#setFallbackRegionCode(String)
* @see PhoneNumberNormalizer#normalizePhoneNumber(String, String)
*/
private String fallbackNormalizationFromDeviceContextToDefaultRegionCode(String number, String dialableNumber) {
if (this.fallbackRegionCode == null) {
LOGGER.debug("Fallback Region was set!");
return dialableNumber;
} else {
return this.normalizePhoneNumber(number, this.fallbackRegionCode);
}
}
/**
* Uses wrapper of PhoneLib to identify if special rules apply for normalization.<br/>
* Using device context for enriching the number make it normalizable to E164 format if NDC is optional in the used number plan, but not used in the phone number to be normalized.
* @param wrapper instanced wrapper of PhoneLib
* @param deviceContext information like CC, NDC and {@link de.telekom.phonenumbernormalizer.dto.DeviceContextLineType} from which the number is dialled
* @return E164 formatted phone number or dialable version of it or null
*/
private String normalize(PhoneLibWrapper wrapper, DeviceContext deviceContext) {
// international prefix has been added by PhoneLib even if it's not valid in the number plan.
if (wrapper == null) {
LOGGER.debug("PhoneLipWrapper was not initialized");
return null;
}
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
NumberPlan numberplan = null;
if (deviceContext != null) {
numberplan = NumberPlanFactory.INSTANCE.getNumberPlan(deviceContext.getLineType(), deviceContext.getCountryCode());
}
if (wrapper.isShortNumber(numberplan)) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
if (wrapper.hasRegionNationalAccessCode() && deviceContext != null) {
//Number plan is using a NationalPrefix aka Trunc Code ... so we could add Area Code if not included in the number.
return wrapper.extendNumberByDefaultAreaCodeAndCountryCode(wrapper.getNationalAccessCode(), deviceContext.getNationalDestinationCode());
}
// Number plan is not using NationalPrefix aka Trunc Code ... its also not a short number, so country code can be added:
return wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, String regionCode) {
PhoneLibWrapper wrapper = new PhoneLibWrapper(number, regionCode);
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
if (wrapper.isShortNumber()) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
// international prefix is added by the lib even if it's not valid in the number plan.
//checking if the input number is equal to the nationalNumber based on number plan and trunc code logic.
boolean hasNoCCAndNoNAC = wrapper.hasNoCountryCodeNorNationalAccessCode();
LOGGER.debug("Number has no CC and no NAC: {}.", hasNoCCAndNoNAC);
//if the number is definitely a short number or needs an area code but does not have it, we do not add the country code.
return (hasNoCCAndNoNAC) ?
wrapper.getDialableNumber() : wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, DeviceContext deviceContext) {
// checking if the number has a special format or is not valid at all.
PhoneLibWrapper normalizerPhoneNumber = new PhoneLibWrapper(number, null);
| if (! normalizerPhoneNumber.isNormalizingTried()) { |
return normalizerPhoneNumber.getDialableNumber();
}
DeviceContext normalizedDeviceContext = DeviceContextMapper.normalized(deviceContext);
if (!normalizedDeviceContext.getCountryCode().equals(DeviceContext.UNKNOWN_VALUE)) {
String regionCode = PhoneLibWrapper.getRegionCodeForCountryCode(normalizedDeviceContext.getCountryCode());
// now working again with the region code
normalizerPhoneNumber = new PhoneLibWrapper(number, regionCode);
if (!normalizedDeviceContext.getNationalDestinationCode().equals(DeviceContext.UNKNOWN_VALUE)) {
// Number needs normalization:
return normalize(normalizerPhoneNumber, deviceContext);
}
// Device Context with CountryCode but without AreaCode ...
if (!(PhoneLibWrapper.UNKNOWN_REGIONCODE.equals(regionCode))) {
return this.normalizePhoneNumber(number, regionCode);
}
}
LOGGER.debug("Normalization based on DeviceContext did not work - falling back to normalization with fallback region.");
return this.fallbackNormalizationFromDeviceContextToDefaultRegionCode(number, normalizerPhoneNumber.getDialableNumber());
}
}
| src/main/java/de/telekom/phonenumbernormalizer/PhoneNumberNormalizerImpl.java | telekom-phonenumber-normalizer-9a1b7ad | [
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n */\n public String getE164Formatted() {\n return phoneUtil.format(this.semiNormalizedNumber, PhoneNumberUtil.PhoneNumberFormat.E164);\n }\n /**\n * If we know the given region for the given number {@link PhoneLibWrapper#hasRegionNationalAccessCode()}, this method checks if the given number does not start with a NAC nor a CC,\n * so we could permanently add a default NDC and NAC to the given number and for this new value the method directly return a E164 formatted representation.\n * @param nationalAccessCode the NAC to be added e.g. for Germany it would be \"0\"",
"score": 42.10451876996135
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " * If we have a plain national number based on regions number plan and potential NAC logic.\n * <p>\n * For a number plan without NAC logic, it will always return false!\n * </p>\n * @return if given number could have CC and NAC, but does not have any of them.\n */\n public boolean hasNoCountryCodeNorNationalAccessCode() {\n // if given number has no NAC and no CC, it equals national phone number (without NAC).\n if (! Objects.equals(dialableNumber, this.getNationalPhoneNumberWithoutNationalAccessCode())) {\n return false;",
"score": 40.848286653229756
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " try {\n this.semiNormalizedNumber = phoneUtil.parse(extendedNumber, regionCode);\n // after area code has been added, we can add the country code by the lib:\n return getE164Formatted();\n } catch (NumberParseException e) {\n LOGGER.warn(\"could not parse extended number: {}\", extendedNumber);\n LOGGER.debug(\"{}\", e.getMessage());\n return dialableNumber;\n }\n } else {",
"score": 38.677199009400645
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " /**\n * Storing if PhoneLib has been used to parse the given number into semiNormalizedNumber.\n *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n * @see PhoneLibWrapper#semiNormalizedNumber\n * @see PhoneLibWrapper#isNormalizingTried()\n */\n private boolean isNormalizingTried = false;\n /**\n * Initialize the wrapper by giving a phone number to be analyzed against a number plan of a given region",
"score": 38.584378590526406
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " /**\n * Using PhoneLib short number utility if it identifies the given number as a short number, which would not need a NAC.\n * <p>\n * This is a fallback for {@link PhoneLibWrapper#isShortNumber(NumberPlan)}, when we do not have an own number plan information.\n * </p>\n * @return if PhoneLib identifies given number as a short number\n *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n * @see PhoneLibWrapper#isShortNumber(NumberPlan)\n */",
"score": 38.354958940126394
}
] | java | if (! normalizerPhoneNumber.isNormalizingTried()) { |
/*
* Copyright © 2023 Deutsche Telekom AG ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.telekom.phonenumbernormalizer;
import de.telekom.phonenumbernormalizer.dto.DeviceContext;
import de.telekom.phonenumbernormalizer.dto.DeviceContextMapper;
import de.telekom.phonenumbernormalizer.numberplans.PhoneLibWrapper;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlan;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlanFactory;
import lombok.RequiredArgsConstructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
/**
* Concrete implementation of {@link PhoneNumberNormalizer} using {@link PhoneLibWrapper} to normalize a number by mitigating some inaccuracies when it comes to number plans of optional NDC and NAC as zero.
* <p>
* Also supports {@link DeviceContext} to enrich a phone number during normalization if the optional NDC is missing.
* </p>
*/
@RequiredArgsConstructor
@Component
public class PhoneNumberNormalizerImpl implements PhoneNumberNormalizer {
private static final Logger LOGGER = LoggerFactory.getLogger(PhoneNumberNormalizerImpl.class);
/**
* Storage for {@link PhoneNumberNormalizer#setFallbackRegionCode(String)}
*/
private String fallbackRegionCode = null;
@Override
public void setFallbackRegionCode(String fallBackRegionCode) {
if | (PhoneLibWrapper.getCountryCodeForRegion(fallBackRegionCode) > 0) { |
this.fallbackRegionCode = fallBackRegionCode;
} else {
this.fallbackRegionCode = null; //invalid region code!
}
}
/**
* Fallback normalization within the number-plan of the fallback region.
* @param number the original number to be normalized
* @param dialableNumber the original number reduced to dialable digits
* @return E164 formatted phone number or at least a dialable version of the number
*
* @see PhoneNumberNormalizer#setFallbackRegionCode(String)
* @see PhoneNumberNormalizer#normalizePhoneNumber(String, String)
*/
private String fallbackNormalizationFromDeviceContextToDefaultRegionCode(String number, String dialableNumber) {
if (this.fallbackRegionCode == null) {
LOGGER.debug("Fallback Region was set!");
return dialableNumber;
} else {
return this.normalizePhoneNumber(number, this.fallbackRegionCode);
}
}
/**
* Uses wrapper of PhoneLib to identify if special rules apply for normalization.<br/>
* Using device context for enriching the number make it normalizable to E164 format if NDC is optional in the used number plan, but not used in the phone number to be normalized.
* @param wrapper instanced wrapper of PhoneLib
* @param deviceContext information like CC, NDC and {@link de.telekom.phonenumbernormalizer.dto.DeviceContextLineType} from which the number is dialled
* @return E164 formatted phone number or dialable version of it or null
*/
private String normalize(PhoneLibWrapper wrapper, DeviceContext deviceContext) {
// international prefix has been added by PhoneLib even if it's not valid in the number plan.
if (wrapper == null) {
LOGGER.debug("PhoneLipWrapper was not initialized");
return null;
}
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
NumberPlan numberplan = null;
if (deviceContext != null) {
numberplan = NumberPlanFactory.INSTANCE.getNumberPlan(deviceContext.getLineType(), deviceContext.getCountryCode());
}
if (wrapper.isShortNumber(numberplan)) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
if (wrapper.hasRegionNationalAccessCode() && deviceContext != null) {
//Number plan is using a NationalPrefix aka Trunc Code ... so we could add Area Code if not included in the number.
return wrapper.extendNumberByDefaultAreaCodeAndCountryCode(wrapper.getNationalAccessCode(), deviceContext.getNationalDestinationCode());
}
// Number plan is not using NationalPrefix aka Trunc Code ... its also not a short number, so country code can be added:
return wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, String regionCode) {
PhoneLibWrapper wrapper = new PhoneLibWrapper(number, regionCode);
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
if (wrapper.isShortNumber()) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
// international prefix is added by the lib even if it's not valid in the number plan.
//checking if the input number is equal to the nationalNumber based on number plan and trunc code logic.
boolean hasNoCCAndNoNAC = wrapper.hasNoCountryCodeNorNationalAccessCode();
LOGGER.debug("Number has no CC and no NAC: {}.", hasNoCCAndNoNAC);
//if the number is definitely a short number or needs an area code but does not have it, we do not add the country code.
return (hasNoCCAndNoNAC) ?
wrapper.getDialableNumber() : wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, DeviceContext deviceContext) {
// checking if the number has a special format or is not valid at all.
PhoneLibWrapper normalizerPhoneNumber = new PhoneLibWrapper(number, null);
if (! normalizerPhoneNumber.isNormalizingTried()) {
return normalizerPhoneNumber.getDialableNumber();
}
DeviceContext normalizedDeviceContext = DeviceContextMapper.normalized(deviceContext);
if (!normalizedDeviceContext.getCountryCode().equals(DeviceContext.UNKNOWN_VALUE)) {
String regionCode = PhoneLibWrapper.getRegionCodeForCountryCode(normalizedDeviceContext.getCountryCode());
// now working again with the region code
normalizerPhoneNumber = new PhoneLibWrapper(number, regionCode);
if (!normalizedDeviceContext.getNationalDestinationCode().equals(DeviceContext.UNKNOWN_VALUE)) {
// Number needs normalization:
return normalize(normalizerPhoneNumber, deviceContext);
}
// Device Context with CountryCode but without AreaCode ...
if (!(PhoneLibWrapper.UNKNOWN_REGIONCODE.equals(regionCode))) {
return this.normalizePhoneNumber(number, regionCode);
}
}
LOGGER.debug("Normalization based on DeviceContext did not work - falling back to normalization with fallback region.");
return this.fallbackNormalizationFromDeviceContextToDefaultRegionCode(number, normalizerPhoneNumber.getDialableNumber());
}
}
| src/main/java/de/telekom/phonenumbernormalizer/PhoneNumberNormalizerImpl.java | telekom-phonenumber-normalizer-9a1b7ad | [
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/PhoneNumberNormalizer.java",
"retrieved_chunk": "public interface PhoneNumberNormalizer {\n /**\n * Sets the ISO2 country code, which is used if the {@link DeviceContext} does not indicate one.\n * The country might represent a region, like \"US\" for North America.\n *\n * @param fallBackRegionCode ISO2 code of the country\n *\n * @see PhoneNumberNormalizer#normalizePhoneNumber(String, DeviceContext)\n */\n void setFallbackRegionCode(String fallBackRegionCode);",
"score": 43.42498579561563
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " */\n@Data\npublic class PhoneLibWrapper {\n private static final Logger LOGGER = LoggerFactory.getLogger(PhoneLibWrapper.class);\n public static final String UNKNOWN_REGIONCODE = \"ZZ\"; // see https://github.com/google/libphonenumber/blob/5e9507a46051405120bc73fcc13d0b0be1b93c29/java/libphonenumber/test/com/google/i18n/phonenumbers/RegionCode.java#L62\n /**\n * The given number reduced to characters which could be dialed\n *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n */",
"score": 33.861334002401414
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/NumberPlanFactory.java",
"retrieved_chunk": " */\npublic class NumberPlanFactory {\n private static final Logger LOGGER = LoggerFactory.getLogger(NumberPlanFactory.class);\n public static final NumberPlanFactory INSTANCE = new NumberPlanFactory();\n /**\n * Two-dimensional map - The first key is DeviceContextLineType and second key is the Country Calling Code while the value is a NumberPlan object.\n *\n * @see NumberPlan\n * @see DeviceContextLineType\n */",
"score": 27.176261683286697
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/dto/DeviceContextMapper.java",
"retrieved_chunk": "/**\n * Helper class providing some static methods for setting up a DeviceContext instance with the correct value for unknown value if an attribute is not provided\n */\npublic class DeviceContextMapper {\n private static final Logger LOGGER = LoggerFactory.getLogger(DeviceContextMapper.class);\n /**\n * Since the class only provides some static methods, it mustn't be instantiated.\n * The initializer will always throw an IllegalStateException\n *\n * @see IllegalStateException",
"score": 25.58311917466099
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/PhoneNumberAreaLabelImpl.java",
"retrieved_chunk": "@RequiredArgsConstructor\n@Component\npublic class PhoneNumberAreaLabelImpl implements PhoneNumberAreaLabel {\n private static final Logger LOGGER = LoggerFactory.getLogger(PhoneNumberAreaLabelImpl.class);\n /**\n * Array of full (path + filename + extension) files, where:<br/>\n * filename: matches ISO2 country code of country\n * content: JSON array with one object. Each key is a phone number prefix (similar to NDC, but without NAC) and the value its corresponding label. If keys have overlapping, the one with the longest key will be used.\n */\n @Value(\"classpath:${service.areaLabel.nationalLabels}\")",
"score": 22.934987336496548
}
] | java | (PhoneLibWrapper.getCountryCodeForRegion(fallBackRegionCode) > 0) { |
/*
* Copyright © 2023 Deutsche Telekom AG ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.telekom.phonenumbernormalizer;
import de.telekom.phonenumbernormalizer.dto.DeviceContext;
import de.telekom.phonenumbernormalizer.dto.DeviceContextMapper;
import de.telekom.phonenumbernormalizer.numberplans.PhoneLibWrapper;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlan;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlanFactory;
import lombok.RequiredArgsConstructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
/**
* Concrete implementation of {@link PhoneNumberNormalizer} using {@link PhoneLibWrapper} to normalize a number by mitigating some inaccuracies when it comes to number plans of optional NDC and NAC as zero.
* <p>
* Also supports {@link DeviceContext} to enrich a phone number during normalization if the optional NDC is missing.
* </p>
*/
@RequiredArgsConstructor
@Component
public class PhoneNumberNormalizerImpl implements PhoneNumberNormalizer {
private static final Logger LOGGER = LoggerFactory.getLogger(PhoneNumberNormalizerImpl.class);
/**
* Storage for {@link PhoneNumberNormalizer#setFallbackRegionCode(String)}
*/
private String fallbackRegionCode = null;
@Override
public void setFallbackRegionCode(String fallBackRegionCode) {
if (PhoneLibWrapper.getCountryCodeForRegion(fallBackRegionCode) > 0) {
this.fallbackRegionCode = fallBackRegionCode;
} else {
this.fallbackRegionCode = null; //invalid region code!
}
}
/**
* Fallback normalization within the number-plan of the fallback region.
* @param number the original number to be normalized
* @param dialableNumber the original number reduced to dialable digits
* @return E164 formatted phone number or at least a dialable version of the number
*
* @see PhoneNumberNormalizer#setFallbackRegionCode(String)
* @see PhoneNumberNormalizer#normalizePhoneNumber(String, String)
*/
private String fallbackNormalizationFromDeviceContextToDefaultRegionCode(String number, String dialableNumber) {
if (this.fallbackRegionCode == null) {
LOGGER.debug("Fallback Region was set!");
return dialableNumber;
} else {
return this.normalizePhoneNumber(number, this.fallbackRegionCode);
}
}
/**
* Uses wrapper of PhoneLib to identify if special rules apply for normalization.<br/>
* Using device context for enriching the number make it normalizable to E164 format if NDC is optional in the used number plan, but not used in the phone number to be normalized.
* @param wrapper instanced wrapper of PhoneLib
* @param deviceContext information like CC, NDC and {@link de.telekom.phonenumbernormalizer.dto.DeviceContextLineType} from which the number is dialled
* @return E164 formatted phone number or dialable version of it or null
*/
private String normalize(PhoneLibWrapper wrapper, DeviceContext deviceContext) {
// international prefix has been added by PhoneLib even if it's not valid in the number plan.
if (wrapper == null) {
LOGGER.debug("PhoneLipWrapper was not initialized");
return null;
}
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
NumberPlan numberplan = null;
if (deviceContext != null) {
numberplan = NumberPlanFactory.INSTANCE.getNumberPlan(deviceContext.getLineType(), deviceContext.getCountryCode());
}
if (wrapper.isShortNumber(numberplan)) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
if | (wrapper.hasRegionNationalAccessCode() && deviceContext != null) { |
//Number plan is using a NationalPrefix aka Trunc Code ... so we could add Area Code if not included in the number.
return wrapper.extendNumberByDefaultAreaCodeAndCountryCode(wrapper.getNationalAccessCode(), deviceContext.getNationalDestinationCode());
}
// Number plan is not using NationalPrefix aka Trunc Code ... its also not a short number, so country code can be added:
return wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, String regionCode) {
PhoneLibWrapper wrapper = new PhoneLibWrapper(number, regionCode);
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
if (wrapper.isShortNumber()) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
// international prefix is added by the lib even if it's not valid in the number plan.
//checking if the input number is equal to the nationalNumber based on number plan and trunc code logic.
boolean hasNoCCAndNoNAC = wrapper.hasNoCountryCodeNorNationalAccessCode();
LOGGER.debug("Number has no CC and no NAC: {}.", hasNoCCAndNoNAC);
//if the number is definitely a short number or needs an area code but does not have it, we do not add the country code.
return (hasNoCCAndNoNAC) ?
wrapper.getDialableNumber() : wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, DeviceContext deviceContext) {
// checking if the number has a special format or is not valid at all.
PhoneLibWrapper normalizerPhoneNumber = new PhoneLibWrapper(number, null);
if (! normalizerPhoneNumber.isNormalizingTried()) {
return normalizerPhoneNumber.getDialableNumber();
}
DeviceContext normalizedDeviceContext = DeviceContextMapper.normalized(deviceContext);
if (!normalizedDeviceContext.getCountryCode().equals(DeviceContext.UNKNOWN_VALUE)) {
String regionCode = PhoneLibWrapper.getRegionCodeForCountryCode(normalizedDeviceContext.getCountryCode());
// now working again with the region code
normalizerPhoneNumber = new PhoneLibWrapper(number, regionCode);
if (!normalizedDeviceContext.getNationalDestinationCode().equals(DeviceContext.UNKNOWN_VALUE)) {
// Number needs normalization:
return normalize(normalizerPhoneNumber, deviceContext);
}
// Device Context with CountryCode but without AreaCode ...
if (!(PhoneLibWrapper.UNKNOWN_REGIONCODE.equals(regionCode))) {
return this.normalizePhoneNumber(number, regionCode);
}
}
LOGGER.debug("Normalization based on DeviceContext did not work - falling back to normalization with fallback region.");
return this.fallbackNormalizationFromDeviceContextToDefaultRegionCode(number, normalizerPhoneNumber.getDialableNumber());
}
}
| src/main/java/de/telekom/phonenumbernormalizer/PhoneNumberNormalizerImpl.java | telekom-phonenumber-normalizer-9a1b7ad | [
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n */\n public boolean isShortNumber(NumberPlan numberplan) {\n if (numberplan == null) {\n return this.isShortNumber();\n }\n return numberplan.isMatchingShortNumber(this.getDialableNumber());\n }\n /**",
"score": 46.56794549226088
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " public boolean isShortNumber() {\n return shortNumberUtil.isPossibleShortNumber(this.getSemiNormalizedNumber());\n }\n /**\n * Using own {@link NumberPlan} to identify if the given number is a short number, which would not need a NAC.\n * <p>\n * If no number plan is given, {@link PhoneLibWrapper#isShortNumber} is used as fallback.\n * </p>\n * @param numberplan the number plan we identified to be used for a check\n * @return if number plan or as fallback PhoneLib identifies given number as a short number",
"score": 24.56940566057481
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " //it seems we have nationalnumber with national prefix, so we could add country code:\n return getE164Formatted();\n }\n }\n /**\n * Some Special dial-able characters make a number either not necessary to be normalized (\"+\" is already normalized) or can't be normalized (\"*\" control codes)\n * @param value phone number representation\n * @return if phone number starts with special characters which makes normalization unable / not necessary\n */\n static boolean isSpecialFormat(String value) {",
"score": 24.525746736060263
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/PhoneNumberNormalizer.java",
"retrieved_chunk": " */\n String normalizePhoneNumber(String number, DeviceContext deviceContext);\n /**\n * Normalizes the number using PhoneLib with some additions to compensate.\n * <p>\n * Not as powerful as {@link PhoneNumberNormalizer#normalizePhoneNumber(String, DeviceContext)}, because no default NDC can be set.\n * </p>\n * @param number plain number to normalize\n * @param regionCode ISO2 code of the country, which number-plan is used for normalization\n * @return E164 formatted phone number or at least a dialable version of the number",
"score": 22.23512020858455
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/PhoneNumberNormalizer.java",
"retrieved_chunk": " /**\n * Normalizes the number using PhoneLib with some additions to compensate.\n * <p>\n * Preferable to {@link PhoneNumberNormalizer#normalizePhoneNumber(String, String)}, because default NDC can be provided, so that more compensation for generating a valid E164 can be done.\n * </p>\n * @param number plain number to normalize\n * @param deviceContext information like CC, NDC and {@link de.telekom.phonenumbernormalizer.dto.DeviceContextLineType} from which the number is dialled\n * @return E164 formatted phone number or at least a dialable version of the number\n *\n * @see PhoneNumberNormalizer#setFallbackRegionCode(String)",
"score": 19.51684242504281
}
] | java | (wrapper.hasRegionNationalAccessCode() && deviceContext != null) { |
/*
* Copyright © 2023 Deutsche Telekom AG ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.telekom.phonenumbernormalizer;
import de.telekom.phonenumbernormalizer.dto.DeviceContext;
import de.telekom.phonenumbernormalizer.dto.DeviceContextMapper;
import de.telekom.phonenumbernormalizer.numberplans.PhoneLibWrapper;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlan;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlanFactory;
import lombok.RequiredArgsConstructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
/**
* Concrete implementation of {@link PhoneNumberNormalizer} using {@link PhoneLibWrapper} to normalize a number by mitigating some inaccuracies when it comes to number plans of optional NDC and NAC as zero.
* <p>
* Also supports {@link DeviceContext} to enrich a phone number during normalization if the optional NDC is missing.
* </p>
*/
@RequiredArgsConstructor
@Component
public class PhoneNumberNormalizerImpl implements PhoneNumberNormalizer {
private static final Logger LOGGER = LoggerFactory.getLogger(PhoneNumberNormalizerImpl.class);
/**
* Storage for {@link PhoneNumberNormalizer#setFallbackRegionCode(String)}
*/
private String fallbackRegionCode = null;
@Override
public void setFallbackRegionCode(String fallBackRegionCode) {
if (PhoneLibWrapper.getCountryCodeForRegion(fallBackRegionCode) > 0) {
this.fallbackRegionCode = fallBackRegionCode;
} else {
this.fallbackRegionCode = null; //invalid region code!
}
}
/**
* Fallback normalization within the number-plan of the fallback region.
* @param number the original number to be normalized
* @param dialableNumber the original number reduced to dialable digits
* @return E164 formatted phone number or at least a dialable version of the number
*
* @see PhoneNumberNormalizer#setFallbackRegionCode(String)
* @see PhoneNumberNormalizer#normalizePhoneNumber(String, String)
*/
private String fallbackNormalizationFromDeviceContextToDefaultRegionCode(String number, String dialableNumber) {
if (this.fallbackRegionCode == null) {
LOGGER.debug("Fallback Region was set!");
return dialableNumber;
} else {
return this.normalizePhoneNumber(number, this.fallbackRegionCode);
}
}
/**
* Uses wrapper of PhoneLib to identify if special rules apply for normalization.<br/>
* Using device context for enriching the number make it normalizable to E164 format if NDC is optional in the used number plan, but not used in the phone number to be normalized.
* @param wrapper instanced wrapper of PhoneLib
* @param deviceContext information like CC, NDC and {@link de.telekom.phonenumbernormalizer.dto.DeviceContextLineType} from which the number is dialled
* @return E164 formatted phone number or dialable version of it or null
*/
private String normalize(PhoneLibWrapper wrapper, DeviceContext deviceContext) {
// international prefix has been added by PhoneLib even if it's not valid in the number plan.
if (wrapper == null) {
LOGGER.debug("PhoneLipWrapper was not initialized");
return null;
}
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
NumberPlan numberplan = null;
if (deviceContext != null) {
numberplan = NumberPlanFactory.INSTANCE.getNumberPlan(deviceContext.getLineType(), deviceContext.getCountryCode());
}
if (wrapper.isShortNumber(numberplan)) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
if (wrapper.hasRegionNationalAccessCode() && deviceContext != null) {
//Number plan is using a NationalPrefix aka Trunc Code ... so we could add Area Code if not included in the number.
return wrapper | .extendNumberByDefaultAreaCodeAndCountryCode(wrapper.getNationalAccessCode(), deviceContext.getNationalDestinationCode()); |
}
// Number plan is not using NationalPrefix aka Trunc Code ... its also not a short number, so country code can be added:
return wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, String regionCode) {
PhoneLibWrapper wrapper = new PhoneLibWrapper(number, regionCode);
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
if (wrapper.isShortNumber()) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
// international prefix is added by the lib even if it's not valid in the number plan.
//checking if the input number is equal to the nationalNumber based on number plan and trunc code logic.
boolean hasNoCCAndNoNAC = wrapper.hasNoCountryCodeNorNationalAccessCode();
LOGGER.debug("Number has no CC and no NAC: {}.", hasNoCCAndNoNAC);
//if the number is definitely a short number or needs an area code but does not have it, we do not add the country code.
return (hasNoCCAndNoNAC) ?
wrapper.getDialableNumber() : wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, DeviceContext deviceContext) {
// checking if the number has a special format or is not valid at all.
PhoneLibWrapper normalizerPhoneNumber = new PhoneLibWrapper(number, null);
if (! normalizerPhoneNumber.isNormalizingTried()) {
return normalizerPhoneNumber.getDialableNumber();
}
DeviceContext normalizedDeviceContext = DeviceContextMapper.normalized(deviceContext);
if (!normalizedDeviceContext.getCountryCode().equals(DeviceContext.UNKNOWN_VALUE)) {
String regionCode = PhoneLibWrapper.getRegionCodeForCountryCode(normalizedDeviceContext.getCountryCode());
// now working again with the region code
normalizerPhoneNumber = new PhoneLibWrapper(number, regionCode);
if (!normalizedDeviceContext.getNationalDestinationCode().equals(DeviceContext.UNKNOWN_VALUE)) {
// Number needs normalization:
return normalize(normalizerPhoneNumber, deviceContext);
}
// Device Context with CountryCode but without AreaCode ...
if (!(PhoneLibWrapper.UNKNOWN_REGIONCODE.equals(regionCode))) {
return this.normalizePhoneNumber(number, regionCode);
}
}
LOGGER.debug("Normalization based on DeviceContext did not work - falling back to normalization with fallback region.");
return this.fallbackNormalizationFromDeviceContextToDefaultRegionCode(number, normalizerPhoneNumber.getDialableNumber());
}
}
| src/main/java/de/telekom/phonenumbernormalizer/PhoneNumberNormalizerImpl.java | telekom-phonenumber-normalizer-9a1b7ad | [
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " //it seems we have nationalnumber with national prefix, so we could add country code:\n return getE164Formatted();\n }\n }\n /**\n * Some Special dial-able characters make a number either not necessary to be normalized (\"+\" is already normalized) or can't be normalized (\"*\" control codes)\n * @param value phone number representation\n * @return if phone number starts with special characters which makes normalization unable / not necessary\n */\n static boolean isSpecialFormat(String value) {",
"score": 36.76550375754628
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n */\n public boolean isShortNumber(NumberPlan numberplan) {\n if (numberplan == null) {\n return this.isShortNumber();\n }\n return numberplan.isMatchingShortNumber(this.getDialableNumber());\n }\n /**",
"score": 32.00877705895958
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " /**\n * Storing if PhoneLib has been used to parse the given number into semiNormalizedNumber.\n *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n * @see PhoneLibWrapper#semiNormalizedNumber\n * @see PhoneLibWrapper#isNormalizingTried()\n */\n private boolean isNormalizingTried = false;\n /**\n * Initialize the wrapper by giving a phone number to be analyzed against a number plan of a given region",
"score": 30.4362488623633
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n */\n public String getE164Formatted() {\n return phoneUtil.format(this.semiNormalizedNumber, PhoneNumberUtil.PhoneNumberFormat.E164);\n }\n /**\n * If we know the given region for the given number {@link PhoneLibWrapper#hasRegionNationalAccessCode()}, this method checks if the given number does not start with a NAC nor a CC,\n * so we could permanently add a default NDC and NAC to the given number and for this new value the method directly return a E164 formatted representation.\n * @param nationalAccessCode the NAC to be added e.g. for Germany it would be \"0\"",
"score": 28.479901786827643
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " try {\n this.semiNormalizedNumber = phoneUtil.parse(extendedNumber, regionCode);\n // after area code has been added, we can add the country code by the lib:\n return getE164Formatted();\n } catch (NumberParseException e) {\n LOGGER.warn(\"could not parse extended number: {}\", extendedNumber);\n LOGGER.debug(\"{}\", e.getMessage());\n return dialableNumber;\n }\n } else {",
"score": 28.28183156330121
}
] | java | .extendNumberByDefaultAreaCodeAndCountryCode(wrapper.getNationalAccessCode(), deviceContext.getNationalDestinationCode()); |
/*
* Copyright © 2023 Deutsche Telekom AG ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.telekom.phonenumbernormalizer;
import de.telekom.phonenumbernormalizer.dto.DeviceContext;
import de.telekom.phonenumbernormalizer.dto.DeviceContextMapper;
import de.telekom.phonenumbernormalizer.numberplans.PhoneLibWrapper;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlan;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlanFactory;
import lombok.RequiredArgsConstructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
/**
* Concrete implementation of {@link PhoneNumberNormalizer} using {@link PhoneLibWrapper} to normalize a number by mitigating some inaccuracies when it comes to number plans of optional NDC and NAC as zero.
* <p>
* Also supports {@link DeviceContext} to enrich a phone number during normalization if the optional NDC is missing.
* </p>
*/
@RequiredArgsConstructor
@Component
public class PhoneNumberNormalizerImpl implements PhoneNumberNormalizer {
private static final Logger LOGGER = LoggerFactory.getLogger(PhoneNumberNormalizerImpl.class);
/**
* Storage for {@link PhoneNumberNormalizer#setFallbackRegionCode(String)}
*/
private String fallbackRegionCode = null;
@Override
public void setFallbackRegionCode(String fallBackRegionCode) {
if (PhoneLibWrapper.getCountryCodeForRegion(fallBackRegionCode) > 0) {
this.fallbackRegionCode = fallBackRegionCode;
} else {
this.fallbackRegionCode = null; //invalid region code!
}
}
/**
* Fallback normalization within the number-plan of the fallback region.
* @param number the original number to be normalized
* @param dialableNumber the original number reduced to dialable digits
* @return E164 formatted phone number or at least a dialable version of the number
*
* @see PhoneNumberNormalizer#setFallbackRegionCode(String)
* @see PhoneNumberNormalizer#normalizePhoneNumber(String, String)
*/
private String fallbackNormalizationFromDeviceContextToDefaultRegionCode(String number, String dialableNumber) {
if (this.fallbackRegionCode == null) {
LOGGER.debug("Fallback Region was set!");
return dialableNumber;
} else {
return this.normalizePhoneNumber(number, this.fallbackRegionCode);
}
}
/**
* Uses wrapper of PhoneLib to identify if special rules apply for normalization.<br/>
* Using device context for enriching the number make it normalizable to E164 format if NDC is optional in the used number plan, but not used in the phone number to be normalized.
* @param wrapper instanced wrapper of PhoneLib
* @param deviceContext information like CC, NDC and {@link de.telekom.phonenumbernormalizer.dto.DeviceContextLineType} from which the number is dialled
* @return E164 formatted phone number or dialable version of it or null
*/
private String normalize(PhoneLibWrapper wrapper, DeviceContext deviceContext) {
// international prefix has been added by PhoneLib even if it's not valid in the number plan.
if (wrapper == null) {
LOGGER.debug("PhoneLipWrapper was not initialized");
return null;
}
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
NumberPlan numberplan = null;
if (deviceContext != null) {
numberplan = NumberPlanFactory.INSTANCE.getNumberPlan(deviceContext.getLineType(), deviceContext.getCountryCode());
}
if (wrapper.isShortNumber(numberplan)) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
if (wrapper.hasRegionNationalAccessCode() && deviceContext != null) {
//Number plan is using a NationalPrefix aka Trunc Code ... so we could add Area Code if not included in the number.
return wrapper.extendNumberByDefaultAreaCodeAndCountryCode(wrapper.getNationalAccessCode(), deviceContext.getNationalDestinationCode());
}
// Number plan is not using NationalPrefix aka Trunc Code ... its also not a short number, so country code can be added:
return wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, String regionCode) {
PhoneLibWrapper wrapper = new PhoneLibWrapper(number, regionCode);
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
if (wrapper.isShortNumber()) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
// international prefix is added by the lib even if it's not valid in the number plan.
//checking if the input number is equal to the nationalNumber based on number plan and trunc code logic.
boolean hasNoCCAndNoNAC = wrapper.hasNoCountryCodeNorNationalAccessCode();
LOGGER.debug("Number has no CC and no NAC: {}.", hasNoCCAndNoNAC);
//if the number is definitely a short number or needs an area code but does not have it, we do not add the country code.
return (hasNoCCAndNoNAC) ?
wrapper.getDialableNumber( | ) : wrapper.getE164Formatted(); |
}
@Override
public String normalizePhoneNumber(String number, DeviceContext deviceContext) {
// checking if the number has a special format or is not valid at all.
PhoneLibWrapper normalizerPhoneNumber = new PhoneLibWrapper(number, null);
if (! normalizerPhoneNumber.isNormalizingTried()) {
return normalizerPhoneNumber.getDialableNumber();
}
DeviceContext normalizedDeviceContext = DeviceContextMapper.normalized(deviceContext);
if (!normalizedDeviceContext.getCountryCode().equals(DeviceContext.UNKNOWN_VALUE)) {
String regionCode = PhoneLibWrapper.getRegionCodeForCountryCode(normalizedDeviceContext.getCountryCode());
// now working again with the region code
normalizerPhoneNumber = new PhoneLibWrapper(number, regionCode);
if (!normalizedDeviceContext.getNationalDestinationCode().equals(DeviceContext.UNKNOWN_VALUE)) {
// Number needs normalization:
return normalize(normalizerPhoneNumber, deviceContext);
}
// Device Context with CountryCode but without AreaCode ...
if (!(PhoneLibWrapper.UNKNOWN_REGIONCODE.equals(regionCode))) {
return this.normalizePhoneNumber(number, regionCode);
}
}
LOGGER.debug("Normalization based on DeviceContext did not work - falling back to normalization with fallback region.");
return this.fallbackNormalizationFromDeviceContextToDefaultRegionCode(number, normalizerPhoneNumber.getDialableNumber());
}
}
| src/main/java/de/telekom/phonenumbernormalizer/PhoneNumberNormalizerImpl.java | telekom-phonenumber-normalizer-9a1b7ad | [
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " * If we have a plain national number based on regions number plan and potential NAC logic.\n * <p>\n * For a number plan without NAC logic, it will always return false!\n * </p>\n * @return if given number could have CC and NAC, but does not have any of them.\n */\n public boolean hasNoCountryCodeNorNationalAccessCode() {\n // if given number has no NAC and no CC, it equals national phone number (without NAC).\n if (! Objects.equals(dialableNumber, this.getNationalPhoneNumberWithoutNationalAccessCode())) {\n return false;",
"score": 60.4282814064965
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " } else {\n if (!isSpecialFormat(dialableNumber)) {\n // Number needs normalization:\n // international prefix is added by the lib even if it's not valid in the number plan.\n this.isNormalizingTried = true;\n this.semiNormalizedNumber = PhoneLibWrapper.parseNumber(dialableNumber, regionCode);\n }\n }\n }\n }",
"score": 58.47769577563028
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " * @param regionCode ISO2 code for the regions number plan used for parsing the number\n * @return either the parsed {@link Phonenumber.PhoneNumber} or null\n */\n private static Phonenumber.PhoneNumber parseNumber(String number, String regionCode) {\n try {\n return phoneUtil.parse(number, regionCode);\n // international prefix is added by the lib even if it's not valid in the number plan.\n } catch (NumberParseException e) {\n LOGGER.warn(\"could not parse normalize number: {}\", number);\n LOGGER.debug(\"{}\", e.getMessage());",
"score": 51.7842478914959
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " try {\n this.semiNormalizedNumber = phoneUtil.parse(extendedNumber, regionCode);\n // after area code has been added, we can add the country code by the lib:\n return getE164Formatted();\n } catch (NumberParseException e) {\n LOGGER.warn(\"could not parse extended number: {}\", extendedNumber);\n LOGGER.debug(\"{}\", e.getMessage());\n return dialableNumber;\n }\n } else {",
"score": 50.43919474173701
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " /**\n * Storing if PhoneLib has been used to parse the given number into semiNormalizedNumber.\n *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n * @see PhoneLibWrapper#semiNormalizedNumber\n * @see PhoneLibWrapper#isNormalizingTried()\n */\n private boolean isNormalizingTried = false;\n /**\n * Initialize the wrapper by giving a phone number to be analyzed against a number plan of a given region",
"score": 42.03369540612384
}
] | java | ) : wrapper.getE164Formatted(); |
/*
* Copyright © 2023 Deutsche Telekom AG ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.telekom.phonenumbernormalizer;
import de.telekom.phonenumbernormalizer.dto.DeviceContext;
import de.telekom.phonenumbernormalizer.dto.DeviceContextMapper;
import de.telekom.phonenumbernormalizer.numberplans.PhoneLibWrapper;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlan;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlanFactory;
import lombok.RequiredArgsConstructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
/**
* Concrete implementation of {@link PhoneNumberNormalizer} using {@link PhoneLibWrapper} to normalize a number by mitigating some inaccuracies when it comes to number plans of optional NDC and NAC as zero.
* <p>
* Also supports {@link DeviceContext} to enrich a phone number during normalization if the optional NDC is missing.
* </p>
*/
@RequiredArgsConstructor
@Component
public class PhoneNumberNormalizerImpl implements PhoneNumberNormalizer {
private static final Logger LOGGER = LoggerFactory.getLogger(PhoneNumberNormalizerImpl.class);
/**
* Storage for {@link PhoneNumberNormalizer#setFallbackRegionCode(String)}
*/
private String fallbackRegionCode = null;
@Override
public void setFallbackRegionCode(String fallBackRegionCode) {
if (PhoneLibWrapper.getCountryCodeForRegion(fallBackRegionCode) > 0) {
this.fallbackRegionCode = fallBackRegionCode;
} else {
this.fallbackRegionCode = null; //invalid region code!
}
}
/**
* Fallback normalization within the number-plan of the fallback region.
* @param number the original number to be normalized
* @param dialableNumber the original number reduced to dialable digits
* @return E164 formatted phone number or at least a dialable version of the number
*
* @see PhoneNumberNormalizer#setFallbackRegionCode(String)
* @see PhoneNumberNormalizer#normalizePhoneNumber(String, String)
*/
private String fallbackNormalizationFromDeviceContextToDefaultRegionCode(String number, String dialableNumber) {
if (this.fallbackRegionCode == null) {
LOGGER.debug("Fallback Region was set!");
return dialableNumber;
} else {
return this.normalizePhoneNumber(number, this.fallbackRegionCode);
}
}
/**
* Uses wrapper of PhoneLib to identify if special rules apply for normalization.<br/>
* Using device context for enriching the number make it normalizable to E164 format if NDC is optional in the used number plan, but not used in the phone number to be normalized.
* @param wrapper instanced wrapper of PhoneLib
* @param deviceContext information like CC, NDC and {@link de.telekom.phonenumbernormalizer.dto.DeviceContextLineType} from which the number is dialled
* @return E164 formatted phone number or dialable version of it or null
*/
private String normalize(PhoneLibWrapper wrapper, DeviceContext deviceContext) {
// international prefix has been added by PhoneLib even if it's not valid in the number plan.
if (wrapper == null) {
LOGGER.debug("PhoneLipWrapper was not initialized");
return null;
}
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
NumberPlan numberplan = null;
if (deviceContext != null) {
numberplan = NumberPlanFactory.INSTANCE.getNumberPlan(deviceContext.getLineType(), deviceContext.getCountryCode());
}
if (wrapper.isShortNumber(numberplan)) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
if (wrapper.hasRegionNationalAccessCode() && deviceContext != null) {
//Number plan is using a NationalPrefix aka Trunc Code ... so we could add Area Code if not included in the number.
return wrapper.extendNumberByDefaultAreaCodeAndCountryCode(wrapper.getNationalAccessCode(), deviceContext.getNationalDestinationCode());
}
// Number plan is not using NationalPrefix aka Trunc Code ... its also not a short number, so country code can be added:
| return wrapper.getE164Formatted(); |
}
@Override
public String normalizePhoneNumber(String number, String regionCode) {
PhoneLibWrapper wrapper = new PhoneLibWrapper(number, regionCode);
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
if (wrapper.isShortNumber()) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
// international prefix is added by the lib even if it's not valid in the number plan.
//checking if the input number is equal to the nationalNumber based on number plan and trunc code logic.
boolean hasNoCCAndNoNAC = wrapper.hasNoCountryCodeNorNationalAccessCode();
LOGGER.debug("Number has no CC and no NAC: {}.", hasNoCCAndNoNAC);
//if the number is definitely a short number or needs an area code but does not have it, we do not add the country code.
return (hasNoCCAndNoNAC) ?
wrapper.getDialableNumber() : wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, DeviceContext deviceContext) {
// checking if the number has a special format or is not valid at all.
PhoneLibWrapper normalizerPhoneNumber = new PhoneLibWrapper(number, null);
if (! normalizerPhoneNumber.isNormalizingTried()) {
return normalizerPhoneNumber.getDialableNumber();
}
DeviceContext normalizedDeviceContext = DeviceContextMapper.normalized(deviceContext);
if (!normalizedDeviceContext.getCountryCode().equals(DeviceContext.UNKNOWN_VALUE)) {
String regionCode = PhoneLibWrapper.getRegionCodeForCountryCode(normalizedDeviceContext.getCountryCode());
// now working again with the region code
normalizerPhoneNumber = new PhoneLibWrapper(number, regionCode);
if (!normalizedDeviceContext.getNationalDestinationCode().equals(DeviceContext.UNKNOWN_VALUE)) {
// Number needs normalization:
return normalize(normalizerPhoneNumber, deviceContext);
}
// Device Context with CountryCode but without AreaCode ...
if (!(PhoneLibWrapper.UNKNOWN_REGIONCODE.equals(regionCode))) {
return this.normalizePhoneNumber(number, regionCode);
}
}
LOGGER.debug("Normalization based on DeviceContext did not work - falling back to normalization with fallback region.");
return this.fallbackNormalizationFromDeviceContextToDefaultRegionCode(number, normalizerPhoneNumber.getDialableNumber());
}
}
| src/main/java/de/telekom/phonenumbernormalizer/PhoneNumberNormalizerImpl.java | telekom-phonenumber-normalizer-9a1b7ad | [
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " //it seems we have nationalnumber with national prefix, so we could add country code:\n return getE164Formatted();\n }\n }\n /**\n * Some Special dial-able characters make a number either not necessary to be normalized (\"+\" is already normalized) or can't be normalized (\"*\" control codes)\n * @param value phone number representation\n * @return if phone number starts with special characters which makes normalization unable / not necessary\n */\n static boolean isSpecialFormat(String value) {",
"score": 48.86867944726136
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " try {\n this.semiNormalizedNumber = phoneUtil.parse(extendedNumber, regionCode);\n // after area code has been added, we can add the country code by the lib:\n return getE164Formatted();\n } catch (NumberParseException e) {\n LOGGER.warn(\"could not parse extended number: {}\", extendedNumber);\n LOGGER.debug(\"{}\", e.getMessage());\n return dialableNumber;\n }\n } else {",
"score": 43.85113094247566
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n */\n public String getE164Formatted() {\n return phoneUtil.format(this.semiNormalizedNumber, PhoneNumberUtil.PhoneNumberFormat.E164);\n }\n /**\n * If we know the given region for the given number {@link PhoneLibWrapper#hasRegionNationalAccessCode()}, this method checks if the given number does not start with a NAC nor a CC,\n * so we could permanently add a default NDC and NAC to the given number and for this new value the method directly return a E164 formatted representation.\n * @param nationalAccessCode the NAC to be added e.g. for Germany it would be \"0\"",
"score": 38.803199717684784
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " /**\n * Storing if PhoneLib has been used to parse the given number into semiNormalizedNumber.\n *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n * @see PhoneLibWrapper#semiNormalizedNumber\n * @see PhoneLibWrapper#isNormalizingTried()\n */\n private boolean isNormalizingTried = false;\n /**\n * Initialize the wrapper by giving a phone number to be analyzed against a number plan of a given region",
"score": 38.75296116622141
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " /**\n * Using PhoneLib to get the national number from a parsed phone number with leading zeros, if those are not representing a National Access Code.\n * <p/>\n * This is necessary, because PhoneLib is storing the national number as a long, so leading \"0\" Digits as part of it are stored in other attributes.\n * @param phoneNumber A PhoneLib parsed phone number\n * @return national number part without NationalPrefix (aka NAC) but any other leading zero.\n */\n private static String nationalPhoneNumberWithoutNationalPrefix(Phonenumber.PhoneNumber phoneNumber) {\n if (phoneNumber==null) {\n return null;",
"score": 34.9073691308199
}
] | java | return wrapper.getE164Formatted(); |
/*
* Copyright © 2023 Deutsche Telekom AG ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.telekom.phonenumbernormalizer;
import de.telekom.phonenumbernormalizer.dto.DeviceContext;
import de.telekom.phonenumbernormalizer.dto.DeviceContextMapper;
import de.telekom.phonenumbernormalizer.numberplans.PhoneLibWrapper;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlan;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlanFactory;
import lombok.RequiredArgsConstructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
/**
* Concrete implementation of {@link PhoneNumberNormalizer} using {@link PhoneLibWrapper} to normalize a number by mitigating some inaccuracies when it comes to number plans of optional NDC and NAC as zero.
* <p>
* Also supports {@link DeviceContext} to enrich a phone number during normalization if the optional NDC is missing.
* </p>
*/
@RequiredArgsConstructor
@Component
public class PhoneNumberNormalizerImpl implements PhoneNumberNormalizer {
private static final Logger LOGGER = LoggerFactory.getLogger(PhoneNumberNormalizerImpl.class);
/**
* Storage for {@link PhoneNumberNormalizer#setFallbackRegionCode(String)}
*/
private String fallbackRegionCode = null;
@Override
public void setFallbackRegionCode(String fallBackRegionCode) {
if (PhoneLibWrapper.getCountryCodeForRegion(fallBackRegionCode) > 0) {
this.fallbackRegionCode = fallBackRegionCode;
} else {
this.fallbackRegionCode = null; //invalid region code!
}
}
/**
* Fallback normalization within the number-plan of the fallback region.
* @param number the original number to be normalized
* @param dialableNumber the original number reduced to dialable digits
* @return E164 formatted phone number or at least a dialable version of the number
*
* @see PhoneNumberNormalizer#setFallbackRegionCode(String)
* @see PhoneNumberNormalizer#normalizePhoneNumber(String, String)
*/
private String fallbackNormalizationFromDeviceContextToDefaultRegionCode(String number, String dialableNumber) {
if (this.fallbackRegionCode == null) {
LOGGER.debug("Fallback Region was set!");
return dialableNumber;
} else {
return this.normalizePhoneNumber(number, this.fallbackRegionCode);
}
}
/**
* Uses wrapper of PhoneLib to identify if special rules apply for normalization.<br/>
* Using device context for enriching the number make it normalizable to E164 format if NDC is optional in the used number plan, but not used in the phone number to be normalized.
* @param wrapper instanced wrapper of PhoneLib
* @param deviceContext information like CC, NDC and {@link de.telekom.phonenumbernormalizer.dto.DeviceContextLineType} from which the number is dialled
* @return E164 formatted phone number or dialable version of it or null
*/
private String normalize(PhoneLibWrapper wrapper, DeviceContext deviceContext) {
// international prefix has been added by PhoneLib even if it's not valid in the number plan.
if (wrapper == null) {
LOGGER.debug("PhoneLipWrapper was not initialized");
return null;
}
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
NumberPlan numberplan = null;
if (deviceContext != null) {
numberplan = NumberPlanFactory.INSTANCE.getNumberPlan(deviceContext.getLineType(), deviceContext.getCountryCode());
}
if (wrapper.isShortNumber(numberplan)) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
if (wrapper.hasRegionNationalAccessCode() && deviceContext != null) {
//Number plan is using a NationalPrefix aka Trunc Code ... so we could add Area Code if not included in the number.
return wrapper.extendNumberByDefaultAreaCodeAndCountryCode(wrapper.getNationalAccessCode(), deviceContext.getNationalDestinationCode());
}
// Number plan is not using NationalPrefix aka Trunc Code ... its also not a short number, so country code can be added:
return wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, String regionCode) {
PhoneLibWrapper wrapper = new PhoneLibWrapper(number, regionCode);
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
if (wrapper.isShortNumber()) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
// international prefix is added by the lib even if it's not valid in the number plan.
//checking if the input number is equal to the nationalNumber based on number plan and trunc code logic.
| boolean hasNoCCAndNoNAC = wrapper.hasNoCountryCodeNorNationalAccessCode(); |
LOGGER.debug("Number has no CC and no NAC: {}.", hasNoCCAndNoNAC);
//if the number is definitely a short number or needs an area code but does not have it, we do not add the country code.
return (hasNoCCAndNoNAC) ?
wrapper.getDialableNumber() : wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, DeviceContext deviceContext) {
// checking if the number has a special format or is not valid at all.
PhoneLibWrapper normalizerPhoneNumber = new PhoneLibWrapper(number, null);
if (! normalizerPhoneNumber.isNormalizingTried()) {
return normalizerPhoneNumber.getDialableNumber();
}
DeviceContext normalizedDeviceContext = DeviceContextMapper.normalized(deviceContext);
if (!normalizedDeviceContext.getCountryCode().equals(DeviceContext.UNKNOWN_VALUE)) {
String regionCode = PhoneLibWrapper.getRegionCodeForCountryCode(normalizedDeviceContext.getCountryCode());
// now working again with the region code
normalizerPhoneNumber = new PhoneLibWrapper(number, regionCode);
if (!normalizedDeviceContext.getNationalDestinationCode().equals(DeviceContext.UNKNOWN_VALUE)) {
// Number needs normalization:
return normalize(normalizerPhoneNumber, deviceContext);
}
// Device Context with CountryCode but without AreaCode ...
if (!(PhoneLibWrapper.UNKNOWN_REGIONCODE.equals(regionCode))) {
return this.normalizePhoneNumber(number, regionCode);
}
}
LOGGER.debug("Normalization based on DeviceContext did not work - falling back to normalization with fallback region.");
return this.fallbackNormalizationFromDeviceContextToDefaultRegionCode(number, normalizerPhoneNumber.getDialableNumber());
}
}
| src/main/java/de/telekom/phonenumbernormalizer/PhoneNumberNormalizerImpl.java | telekom-phonenumber-normalizer-9a1b7ad | [
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " } else {\n if (!isSpecialFormat(dialableNumber)) {\n // Number needs normalization:\n // international prefix is added by the lib even if it's not valid in the number plan.\n this.isNormalizingTried = true;\n this.semiNormalizedNumber = PhoneLibWrapper.parseNumber(dialableNumber, regionCode);\n }\n }\n }\n }",
"score": 48.3125607271687
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " * @param regionCode ISO2 code for the regions number plan used for parsing the number\n * @return either the parsed {@link Phonenumber.PhoneNumber} or null\n */\n private static Phonenumber.PhoneNumber parseNumber(String number, String regionCode) {\n try {\n return phoneUtil.parse(number, regionCode);\n // international prefix is added by the lib even if it's not valid in the number plan.\n } catch (NumberParseException e) {\n LOGGER.warn(\"could not parse normalize number: {}\", number);\n LOGGER.debug(\"{}\", e.getMessage());",
"score": 44.647773921468264
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " /**\n * Storing if PhoneLib has been used to parse the given number into semiNormalizedNumber.\n *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n * @see PhoneLibWrapper#semiNormalizedNumber\n * @see PhoneLibWrapper#isNormalizingTried()\n */\n private boolean isNormalizingTried = false;\n /**\n * Initialize the wrapper by giving a phone number to be analyzed against a number plan of a given region",
"score": 42.37324167097835
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " try {\n this.semiNormalizedNumber = phoneUtil.parse(extendedNumber, regionCode);\n // after area code has been added, we can add the country code by the lib:\n return getE164Formatted();\n } catch (NumberParseException e) {\n LOGGER.warn(\"could not parse extended number: {}\", extendedNumber);\n LOGGER.debug(\"{}\", e.getMessage());\n return dialableNumber;\n }\n } else {",
"score": 35.165678772604664
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " //it seems we have nationalnumber with national prefix, so we could add country code:\n return getE164Formatted();\n }\n }\n /**\n * Some Special dial-able characters make a number either not necessary to be normalized (\"+\" is already normalized) or can't be normalized (\"*\" control codes)\n * @param value phone number representation\n * @return if phone number starts with special characters which makes normalization unable / not necessary\n */\n static boolean isSpecialFormat(String value) {",
"score": 34.556999354808745
}
] | java | boolean hasNoCCAndNoNAC = wrapper.hasNoCountryCodeNorNationalAccessCode(); |
package com.rosymaple.hitindication.latesthits;
import com.rosymaple.hitindication.config.HitIndicatorClientConfigs;
import net.minecraft.client.Minecraft;
import net.minecraft.world.entity.Entity;
import net.minecraftforge.api.distmarker.Dist;
import net.minecraftforge.api.distmarker.OnlyIn;
import java.util.ArrayList;
import java.util.UUID;
@OnlyIn(Dist.CLIENT)
public class ClientLatestHits {
public static ArrayList<HitIndicator> latestHitIndicators = new ArrayList<>();
public static HitMarker currentHitMarker = null;
public static void addHitIndicator(double x, double y, double z, int type, int damagePercent, boolean hasNegativeEffects) {
HitIndicatorType hitIndicatorType = HitIndicatorType.fromInt(type);
if(!HitIndicatorClientConfigs.EnableHitIndication.get())
return;
if(hitIndicatorType == HitIndicatorType.BLUE && !HitIndicatorClientConfigs.ShowBlueIndicators.get())
return;
if(hasNegativeEffects && !HitIndicatorClientConfigs.DisplayHitsFromNegativePotions.get())
return;
latestHitIndicators.add(new HitIndicator(x, y, z, hitIndicatorType, damagePercent));
if(HitIndicatorClientConfigs.MaxIndicatorCount.get() > 0 && latestHitIndicators.size() > HitIndicatorClientConfigs.MaxIndicatorCount.get())
latestHitIndicators.remove(0);
}
public static void setHitMarker(int type) {
HitMarkerType hitMarkerType = HitMarkerType.fromInt(type);
if(!HitIndicatorClientConfigs.EnableHitMarkers.get())
return;
currentHitMarker = new HitMarker(hitMarkerType);
}
public static void tick() {
for(int i = latestHitIndicators.size()-1; i >= 0; i--) {
HitIndicator hitIndicator = latestHitIndicators.get(i);
hitIndicator.tick();
if | (hitIndicator.expired())
latestHitIndicators.remove(i); |
}
if(currentHitMarker != null) {
currentHitMarker.tick();
if(currentHitMarker.expired())
currentHitMarker = null;
}
}
public static void clear() {
latestHitIndicators.clear();
currentHitMarker = null;
}
}
| src/main/java/com/rosymaple/hitindication/latesthits/ClientLatestHits.java | TheHamester-HitIndicator-261bcb9 | [
{
"filename": "src/main/java/com/rosymaple/hitindication/event/HitEvents.java",
"retrieved_chunk": " return false;\n }\n private static float applyPotionDamageCalculations(ServerPlayer player, DamageSource pSource, float pDamage)\n {\n if (pSource.isBypassMagic()) {\n return pDamage;\n } else {\n if (player.hasEffect(MobEffects.DAMAGE_RESISTANCE) && pSource != DamageSource.OUT_OF_WORLD) {\n int i = (player.getEffect(MobEffects.DAMAGE_RESISTANCE).getAmplifier() + 1) * 5;\n int j = 25 - i;",
"score": 32.525168241599616
},
{
"filename": "src/main/java/com/rosymaple/hitindication/event/RenderEvents.java",
"retrieved_chunk": " for(HitIndicator hit : ClientLatestHits.latestHitIndicators) {\n drawIndicator(event.getMatrixStack(), hit, screenMiddleX, screenMiddleY, playerPos, lookVec);\n }\n if(ClientLatestHits.currentHitMarker != null)\n drawHitMarker(event.getMatrixStack(), ClientLatestHits.currentHitMarker, screenMiddleX, screenMiddleY);\n }\n private static void drawHitMarker(PoseStack stack, HitMarker hitMarker, int screenMiddleX, int screenMiddleY) {\n float opacity = hitMarker.getType() == HitMarkerType.CRIT ? 30 : 60;\n opacity /= 100.0f;\n bindMarkerTexture(hitMarker.getType(), hitMarker.getLifeTime());",
"score": 26.468162599973695
},
{
"filename": "src/main/java/com/rosymaple/hitindication/latesthits/HitMarker.java",
"retrieved_chunk": "package com.rosymaple.hitindication.latesthits;\npublic class HitMarker {\n private HitMarkerType hitMarkerType;\n private int lifetime;\n public HitMarker(HitMarkerType hitIndicatorType) {\n this.hitMarkerType = hitIndicatorType;\n lifetime = 9;\n }\n public void tick() {\n lifetime--;",
"score": 18.997966886507605
},
{
"filename": "src/main/java/com/rosymaple/hitindication/latesthits/HitIndicator.java",
"retrieved_chunk": " this.hitIndicatorType = hitIndicatorType;\n this.damagePercent = damagePercent;\n lifetime = HitIndicatorClientConfigs.FadeRate.get();\n }\n public void tick() {\n lifetime--;\n }\n public boolean expired() {\n return lifetime <= 0;\n }",
"score": 17.47148222656506
},
{
"filename": "src/main/java/com/rosymaple/hitindication/event/ClientEvents.java",
"retrieved_chunk": " public static void onTick(TickEvent.ClientTickEvent event) {\n if(event.phase == TickEvent.Phase.END)\n return;\n ClientLatestHits.tick();\n }\n}",
"score": 11.66195827439625
}
] | java | (hitIndicator.expired())
latestHitIndicators.remove(i); |
/*
* Copyright © 2023 Deutsche Telekom AG ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.telekom.phonenumbernormalizer;
import de.telekom.phonenumbernormalizer.dto.DeviceContext;
import de.telekom.phonenumbernormalizer.dto.DeviceContextMapper;
import de.telekom.phonenumbernormalizer.numberplans.PhoneLibWrapper;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlan;
import de.telekom.phonenumbernormalizer.numberplans.NumberPlanFactory;
import lombok.RequiredArgsConstructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
/**
* Concrete implementation of {@link PhoneNumberNormalizer} using {@link PhoneLibWrapper} to normalize a number by mitigating some inaccuracies when it comes to number plans of optional NDC and NAC as zero.
* <p>
* Also supports {@link DeviceContext} to enrich a phone number during normalization if the optional NDC is missing.
* </p>
*/
@RequiredArgsConstructor
@Component
public class PhoneNumberNormalizerImpl implements PhoneNumberNormalizer {
private static final Logger LOGGER = LoggerFactory.getLogger(PhoneNumberNormalizerImpl.class);
/**
* Storage for {@link PhoneNumberNormalizer#setFallbackRegionCode(String)}
*/
private String fallbackRegionCode = null;
@Override
public void setFallbackRegionCode(String fallBackRegionCode) {
if (PhoneLibWrapper.getCountryCodeForRegion(fallBackRegionCode) > 0) {
this.fallbackRegionCode = fallBackRegionCode;
} else {
this.fallbackRegionCode = null; //invalid region code!
}
}
/**
* Fallback normalization within the number-plan of the fallback region.
* @param number the original number to be normalized
* @param dialableNumber the original number reduced to dialable digits
* @return E164 formatted phone number or at least a dialable version of the number
*
* @see PhoneNumberNormalizer#setFallbackRegionCode(String)
* @see PhoneNumberNormalizer#normalizePhoneNumber(String, String)
*/
private String fallbackNormalizationFromDeviceContextToDefaultRegionCode(String number, String dialableNumber) {
if (this.fallbackRegionCode == null) {
LOGGER.debug("Fallback Region was set!");
return dialableNumber;
} else {
return this.normalizePhoneNumber(number, this.fallbackRegionCode);
}
}
/**
* Uses wrapper of PhoneLib to identify if special rules apply for normalization.<br/>
* Using device context for enriching the number make it normalizable to E164 format if NDC is optional in the used number plan, but not used in the phone number to be normalized.
* @param wrapper instanced wrapper of PhoneLib
* @param deviceContext information like CC, NDC and {@link de.telekom.phonenumbernormalizer.dto.DeviceContextLineType} from which the number is dialled
* @return E164 formatted phone number or dialable version of it or null
*/
private String normalize(PhoneLibWrapper wrapper, DeviceContext deviceContext) {
// international prefix has been added by PhoneLib even if it's not valid in the number plan.
if (wrapper == null) {
LOGGER.debug("PhoneLipWrapper was not initialized");
return null;
}
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
NumberPlan numberplan = null;
if (deviceContext != null) {
numberplan = NumberPlanFactory.INSTANCE.getNumberPlan(deviceContext.getLineType(), deviceContext.getCountryCode());
}
if (wrapper.isShortNumber(numberplan)) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
if (wrapper.hasRegionNationalAccessCode() && deviceContext != null) {
//Number plan is using a NationalPrefix aka Trunc Code ... so we could add Area Code if not included in the number.
return wrapper.extendNumberByDefaultAreaCodeAndCountryCode(wrapper.getNationalAccessCode(), deviceContext.getNationalDestinationCode());
}
// Number plan is not using NationalPrefix aka Trunc Code ... its also not a short number, so country code can be added:
return wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, String regionCode) {
PhoneLibWrapper wrapper = new PhoneLibWrapper(number, regionCode);
if (wrapper.getSemiNormalizedNumber() == null) {
return wrapper.getDialableNumber();
}
if (wrapper.isShortNumber()) {
//if it is a short number, we can't add area code nor country code, so returning the dialable.
return wrapper.getDialableNumber();
}
// international prefix is added by the lib even if it's not valid in the number plan.
//checking if the input number is equal to the nationalNumber based on number plan and trunc code logic.
boolean hasNoCCAndNoNAC = wrapper.hasNoCountryCodeNorNationalAccessCode();
LOGGER.debug("Number has no CC and no NAC: {}.", hasNoCCAndNoNAC);
//if the number is definitely a short number or needs an area code but does not have it, we do not add the country code.
return (hasNoCCAndNoNAC) ?
wrapper.getDialableNumber() : wrapper.getE164Formatted();
}
@Override
public String normalizePhoneNumber(String number, DeviceContext deviceContext) {
// checking if the number has a special format or is not valid at all.
PhoneLibWrapper normalizerPhoneNumber = new PhoneLibWrapper(number, null);
if (! normalizerPhoneNumber.isNormalizingTried()) {
return normalizerPhoneNumber.getDialableNumber();
}
DeviceContext normalizedDeviceContext = DeviceContextMapper.normalized(deviceContext);
if (!normalizedDeviceContext.getCountryCode().equals(DeviceContext.UNKNOWN_VALUE)) {
String regionCode = PhoneLibWrapper.getRegionCodeForCountryCode(normalizedDeviceContext.getCountryCode());
// now working again with the region code
normalizerPhoneNumber = new PhoneLibWrapper(number, regionCode);
if ( | !normalizedDeviceContext.getNationalDestinationCode().equals(DeviceContext.UNKNOWN_VALUE)) { |
// Number needs normalization:
return normalize(normalizerPhoneNumber, deviceContext);
}
// Device Context with CountryCode but without AreaCode ...
if (!(PhoneLibWrapper.UNKNOWN_REGIONCODE.equals(regionCode))) {
return this.normalizePhoneNumber(number, regionCode);
}
}
LOGGER.debug("Normalization based on DeviceContext did not work - falling back to normalization with fallback region.");
return this.fallbackNormalizationFromDeviceContextToDefaultRegionCode(number, normalizerPhoneNumber.getDialableNumber());
}
}
| src/main/java/de/telekom/phonenumbernormalizer/PhoneNumberNormalizerImpl.java | telekom-phonenumber-normalizer-9a1b7ad | [
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/dto/DeviceContextMapper.java",
"retrieved_chunk": " public static DeviceContext normalized(DeviceContext context) {\n DeviceContext result = new DeviceContextDto();\n if (context==null) {\n result.setLineType(DeviceContextLineType.UNKNOWN);\n result.setCountryCode(DeviceContext.UNKNOWN_VALUE);\n result.setNationalDestinationCode(DeviceContext.UNKNOWN_VALUE);\n } else {\n result.setLineType(normalizeType(context.getLineType()));\n result.setCountryCode(normalizeCountryCode(context.getCountryCode()));\n result.setNationalDestinationCode(normalizeNationalDestinationCode(context.getNationalDestinationCode()));",
"score": 30.001861332220106
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/numberplans/PhoneLibWrapper.java",
"retrieved_chunk": " * The given region code for which the given number should be normalized.<br/>\n * This is an ISO2 code for the country.\n *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)\n */\n String regionCode;\n /**\n * The number plan metadata which PhoneLib is using for the given region code.\n *\n * @see PhoneLibWrapper#PhoneLibWrapper(String, String)",
"score": 23.217122025418504
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/dto/DeviceContext.java",
"retrieved_chunk": " * @see DeviceContextLineType\n */\npublic interface DeviceContext {\n /**\n * Indicates the value to be hold in an attribute is not known.\n *\n * @see DeviceContext#getCountryCode()\n * @see DeviceContext#setCountryCode(String)\n * @see DeviceContext#getNationalDestinationCode()\n * @see DeviceContext#setNationalDestinationCode(String)",
"score": 22.589927930204755
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/dto/DeviceContext.java",
"retrieved_chunk": " * Without international dialing prefix nor trunc code. If not known or not set, it should return DeviceContext.UNKNOWN_VALUE.\n * <p>\n * E.G. \"49\" for Germany\n * </p>\n * @return either a string containing one to three digits representing a country calling code or \"unknown\"\n *\n * @see DeviceContext#getCountryCode()\n * @see DeviceContext#UNKNOWN_VALUE\n */\n String getCountryCode();",
"score": 21.808358345993692
},
{
"filename": "src/main/java/de/telekom/phonenumbernormalizer/PhoneNumberAreaLabelImpl.java",
"retrieved_chunk": " try {\n Phonenumber.PhoneNumber pn = phoneUtil.parse(e164number, \"\");\n Optional<String> locationName = Optional.empty();\n if (pn!=null) {\n String regionCode=phoneUtil.getRegionCodeForCountryCode(pn.getCountryCode());\n locationName = this.getLocationByNationalNumberAndRegionCode(String.valueOf(pn.getNationalNumber()), regionCode);\n if (locationName.isEmpty()) {\n return this.getCountryNameByCountryCode(String.valueOf(pn.getCountryCode()));\n }\n }",
"score": 21.059647464633553
}
] | java | !normalizedDeviceContext.getNationalDestinationCode().equals(DeviceContext.UNKNOWN_VALUE)) { |
package net.chauvedev.woodencog.mixin;
import com.simibubi.create.content.kinetics.KineticNetwork;
import com.simibubi.create.content.kinetics.base.KineticBlockEntity;
import com.simibubi.create.content.kinetics.crusher.CrushingWheelBlockEntity;
import com.simibubi.create.content.kinetics.transmission.ClutchBlockEntity;
import net.chauvedev.woodencog.WoodenCog;
import net.chauvedev.woodencog.capability.MachineCapacity;
import net.chauvedev.woodencog.capability.MachineCapacityProvider;
import net.chauvedev.woodencog.core.MachineCapacityEntry;
import net.chauvedev.woodencog.core.MachineCapacityStorage;
import net.minecraft.core.BlockPos;
import net.minecraft.core.particles.ParticleTypes;
import net.minecraft.nbt.CompoundTag;
import net.minecraft.sounds.SoundEvent;
import net.minecraft.sounds.SoundEvents;
import net.minecraft.sounds.SoundSource;
import net.minecraft.world.level.Level;
import net.minecraft.world.level.block.Block;
import net.minecraft.world.level.block.Blocks;
import net.minecraft.world.level.block.entity.BlockEntity;
import net.minecraft.world.level.block.entity.BlockEntityType;
import net.minecraft.world.level.block.state.BlockState;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import java.awt.*;
import java.util.Objects;
@Mixin(value = KineticBlockEntity.class, remap = false)
public abstract class MixinKineticBlockEntity{
private boolean lifeBlockBlocked = false;
private float lifeBlock = 20*10;
@Shadow public abstract KineticNetwork getOrCreateNetwork();
@Shadow protected float speed;
@Shadow protected float stress;
@Shadow protected boolean overStressed;
@Shadow public abstract void setSpeed(float speed);
@Shadow protected float capacity;
@Shadow public abstract float calculateAddedStressCapacity();
@Shadow public abstract float calculateStressApplied();
@Shadow protected float lastStressApplied;
@Shadow protected float lastCapacityProvided;
@Shadow public abstract void detachKinetics();
@Shadow public abstract float getSpeed();
boolean destroyed = false;
@Inject(
method = {"tick"},
at = {@At("HEAD")},
remap = false)
public void tick(CallbackInfo ci){
KineticBlockEntity block = (KineticBlockEntity)((Object)this) ;
try{
this.tickDamagedTick(block);
}catch (Error error){
WoodenCog.LOGGER.debug("Error pendant le system tick");
error.printStackTrace();
}
}
public void tickDamagedTick(KineticBlockEntity block){
if (!MachineCapacityStorage.getInstance().active){
return;
}
if (MachineCapacityStorage.getInstance().isBlackListBlock(block)){
return;
}
MachineCapacityEntry config = MachineCapacityStorage.getInstance().getCapacity(block.getBlockState().getBlock());
MachineCapacity capacity = block.getCapability(MachineCapacityProvider.MACHINE_CAPACITY).resolve().get();
int chance = block.getLevel().random.nextInt(0,101);
float left = config.durabilityMax - capacity.getDurability();
if (chance > (100-config.damageChance) && left > 0 && block.getSpeed() > 0){
int damage_for_speed = (int) getSpeed()/10;
capacity.setDurability(capacity.getDurability()+damage_for_speed);
}
left = config.durabilityMax - capacity.getDurability();
if(left >= 10 && left < config.durabilityMax/2) {
block.getLevel().addParticle(ParticleTypes.SMOKE,false,block.getBlockPos().getX()+0.5f,block.getBlockPos().getY()+0.5f,block.getBlockPos().getZ()+0.5f,0,0.01,0);
}
if (left<10){
if (getOrCreateNetwork()!=null){
| if (!destroyed || !capacity.isDestroyed() || this.stress != Integer.MAX_VALUE){ |
destroyed = true;
getOrCreateNetwork().updateCapacityFor(block,0);
getOrCreateNetwork().updateStressFor(block, Integer.MAX_VALUE);
getOrCreateNetwork().updateNetwork();
getOrCreateNetwork().sync();
capacity.setDestroyed(true);
}
}
block.getLevel().addParticle(ParticleTypes.CAMPFIRE_SIGNAL_SMOKE,false,block.getBlockPos().getX()+0.5f,block.getBlockPos().getY()+0.5f,block.getBlockPos().getZ()+0.5f,0,0.01,0);
}
}
}
| src/main/java/net/chauvedev/woodencog/mixin/MixinKineticBlockEntity.java | Sparks-and-Rotation-woodencog-bb3d3ef | [
{
"filename": "src/main/java/net/chauvedev/woodencog/mixin/MixinKineticBlock.java",
"retrieved_chunk": " } else {\n capacity.setDurability(0);\n int durability_to_remove = (int)(capacity.getDurability() / 10) + 1;\n itemInHand.hurtAndBreak(durability_to_remove, pPlayer, (player -> player.broadcastBreakEvent(player.getUsedItemHand())));\n pLevel.playLocalSound(\n pPos.getX(),\n pPos.getY(),\n pPos.getZ(),\n SoundEvents.ANVIL_USE,\n SoundSource.BLOCKS,",
"score": 86.84307226694133
},
{
"filename": "src/main/java/net/chauvedev/woodencog/ponder/Heating.java",
"retrieved_chunk": " ItemStack ingot = Objects.requireNonNull(ForgeRegistries.ITEMS.getValue(\n new ResourceLocation(\"tfc:metal/ingot/copper\")\n )).getDefaultInstance();\n scene.configureBasePlate(0, 0, 5);\n scene.world.showSection(util.select.fromTo(0, 0, 0, 4, 0, 4), Direction.UP);\n scene.idle(5);\n scene.world.showSection(util.select.fromTo(2, 1, 0, 4, 1, 4), Direction.DOWN);\n scene.idle(10);\n scene.world.showSection(util.select.fromTo(0, 1, 0, 1, 1, 4), Direction.DOWN);\n scene.idle(10);",
"score": 42.69520478773765
},
{
"filename": "src/main/java/net/chauvedev/woodencog/item/fluids/can/FireclayCrucibleModel.java",
"retrieved_chunk": " int luminosity = this.applyFluidLuminosity ? attributes.getLuminosity(this.fluid) : 0;\n int color = attributes.getColor(this.fluid);\n builder.addQuads(ItemLayerModel.getLayerRenderType(luminosity > 0), ItemTextureQuadConverter.convertTexture(transform, baseSprite, fluidSprite, 0.468625F, Direction.NORTH, color, -1, luminosity));\n builder.addQuads(ItemLayerModel.getLayerRenderType(luminosity > 0), ItemTextureQuadConverter.convertTexture(transform, baseSprite, fluidSprite, 0.531375F, Direction.SOUTH, color, -1, luminosity));\n }\n }\n if (this.coverIsMask) {\n if (coverSprite != null) {\n baseSprite = spriteGetter.apply(baseLocation);\n builder.addQuads(ItemLayerModel.getLayerRenderType(false), ItemTextureQuadConverter.convertTexture(transform, coverSprite, baseSprite, 0.4685F, Direction.NORTH, -1, 2));",
"score": 42.208030670085584
},
{
"filename": "src/main/java/net/chauvedev/woodencog/ponder/Heating.java",
"retrieved_chunk": " scene.idle(30);\n scene.overlay.showSelectionWithText(item_stand_selection, 70)\n .attachKeyFrame()\n .colored(PonderPalette.BLUE)\n .text(\"Heated item can be cooled down with water.\");\n scene.effects.emitParticles(\n itemVec,\n Emitter.simple(ParticleTypes.SMOKE, new Vec3(0, 0.01, 0)),\n 1,\n 25",
"score": 38.72235390966863
},
{
"filename": "src/main/java/net/chauvedev/woodencog/item/fluids/can/FireclayCrucibleModel.java",
"retrieved_chunk": " builder.addQuads(ItemLayerModel.getLayerRenderType(false), ItemTextureQuadConverter.convertTexture(transform, coverSprite, baseSprite, 0.5315F, Direction.SOUTH, -1, 2));\n }\n } else if (coverSprite != null) {\n builder.addQuads(ItemLayerModel.getLayerRenderType(false), new BakedQuad[]{ItemTextureQuadConverter.genQuad(transform, 0.0F, 0.0F, 16.0F, 16.0F, 0.4685F, coverSprite, Direction.NORTH, -1, 2)});\n builder.addQuads(ItemLayerModel.getLayerRenderType(false), new BakedQuad[]{ItemTextureQuadConverter.genQuad(transform, 0.0F, 0.0F, 16.0F, 16.0F, 0.5315F, coverSprite, Direction.SOUTH, -1, 2)});\n }\n builder.setParticle(particleSprite);\n return builder.build();\n }\n public Collection<Material> getTextures(IModelConfiguration owner, Function<ResourceLocation, UnbakedModel> modelGetter, Set<Pair<String, String>> missingTextureErrors) {",
"score": 36.77740219397565
}
] | java | if (!destroyed || !capacity.isDestroyed() || this.stress != Integer.MAX_VALUE){ |
package net.chauvedev.woodencog.mixin;
import com.simibubi.create.content.kinetics.KineticNetwork;
import com.simibubi.create.content.kinetics.base.KineticBlockEntity;
import com.simibubi.create.content.kinetics.crusher.CrushingWheelBlockEntity;
import com.simibubi.create.content.kinetics.transmission.ClutchBlockEntity;
import net.chauvedev.woodencog.WoodenCog;
import net.chauvedev.woodencog.capability.MachineCapacity;
import net.chauvedev.woodencog.capability.MachineCapacityProvider;
import net.chauvedev.woodencog.core.MachineCapacityEntry;
import net.chauvedev.woodencog.core.MachineCapacityStorage;
import net.minecraft.core.BlockPos;
import net.minecraft.core.particles.ParticleTypes;
import net.minecraft.nbt.CompoundTag;
import net.minecraft.sounds.SoundEvent;
import net.minecraft.sounds.SoundEvents;
import net.minecraft.sounds.SoundSource;
import net.minecraft.world.level.Level;
import net.minecraft.world.level.block.Block;
import net.minecraft.world.level.block.Blocks;
import net.minecraft.world.level.block.entity.BlockEntity;
import net.minecraft.world.level.block.entity.BlockEntityType;
import net.minecraft.world.level.block.state.BlockState;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import java.awt.*;
import java.util.Objects;
@Mixin(value = KineticBlockEntity.class, remap = false)
public abstract class MixinKineticBlockEntity{
private boolean lifeBlockBlocked = false;
private float lifeBlock = 20*10;
@Shadow public abstract KineticNetwork getOrCreateNetwork();
@Shadow protected float speed;
@Shadow protected float stress;
@Shadow protected boolean overStressed;
@Shadow public abstract void setSpeed(float speed);
@Shadow protected float capacity;
@Shadow public abstract float calculateAddedStressCapacity();
@Shadow public abstract float calculateStressApplied();
@Shadow protected float lastStressApplied;
@Shadow protected float lastCapacityProvided;
@Shadow public abstract void detachKinetics();
@Shadow public abstract float getSpeed();
boolean destroyed = false;
@Inject(
method = {"tick"},
at = {@At("HEAD")},
remap = false)
public void tick(CallbackInfo ci){
KineticBlockEntity block = (KineticBlockEntity)((Object)this) ;
try{
this.tickDamagedTick(block);
}catch (Error error){
WoodenCog.LOGGER.debug("Error pendant le system tick");
error.printStackTrace();
}
}
public void tickDamagedTick(KineticBlockEntity block){
if (!MachineCapacityStorage.getInstance().active){
return;
}
if (MachineCapacityStorage.getInstance().isBlackListBlock(block)){
return;
}
MachineCapacityEntry config = MachineCapacityStorage.getInstance().getCapacity(block.getBlockState().getBlock());
MachineCapacity capacity = block.getCapability(MachineCapacityProvider.MACHINE_CAPACITY).resolve().get();
int chance = block.getLevel().random.nextInt(0,101);
float left = config.durabilityMax - capacity.getDurability();
if (chance > (100-config.damageChance) && left > 0 && block.getSpeed() > 0){
int damage_for_speed = (int) getSpeed()/10;
capacity.setDurability(capacity.getDurability()+damage_for_speed);
}
left = config.durabilityMax - capacity.getDurability();
if(left >= 10 && left < config.durabilityMax/2) {
block.getLevel().addParticle(ParticleTypes.SMOKE,false,block.getBlockPos().getX()+0.5f,block.getBlockPos().getY()+0.5f,block.getBlockPos().getZ()+0.5f,0,0.01,0);
}
if (left<10){
if (getOrCreateNetwork()!=null){
if (!destroyed || !capacity.isDestroyed() || this.stress != Integer.MAX_VALUE){
destroyed = true;
getOrCreateNetwork().updateCapacityFor(block,0);
getOrCreateNetwork().updateStressFor(block, Integer.MAX_VALUE);
getOrCreateNetwork().updateNetwork();
getOrCreateNetwork().sync();
| capacity.setDestroyed(true); |
}
}
block.getLevel().addParticle(ParticleTypes.CAMPFIRE_SIGNAL_SMOKE,false,block.getBlockPos().getX()+0.5f,block.getBlockPos().getY()+0.5f,block.getBlockPos().getZ()+0.5f,0,0.01,0);
}
}
}
| src/main/java/net/chauvedev/woodencog/mixin/MixinKineticBlockEntity.java | Sparks-and-Rotation-woodencog-bb3d3ef | [
{
"filename": "src/main/java/net/chauvedev/woodencog/capability/MachineCapacity.java",
"retrieved_chunk": " this.machine_capacity = var1;\n }\n public boolean isDestroyed(){\n return this.destroyed;\n }\n public void setDestroyed(boolean var1){\n this.destroyed = var1;\n }\n public CompoundTag toTag(){\n CompoundTag tag = new CompoundTag();",
"score": 25.75479857887546
},
{
"filename": "src/main/java/net/chauvedev/woodencog/mixin/MixinKineticBlock.java",
"retrieved_chunk": " super(pProperties);\n }\n @Override\n public @NotNull InteractionResult use(BlockState pState, Level pLevel, BlockPos pPos, Player pPlayer, InteractionHand pHand, BlockHitResult pHit) {\n BlockEntity blockEntity = pLevel.getBlockEntity(pPos);\n if (blockEntity instanceof KineticBlockEntity kineticBlockEntity) {\n MachineCapacity capacity = kineticBlockEntity.getCapability(MachineCapacityProvider.MACHINE_CAPACITY).resolve().get();\n ItemStack itemInHand = pPlayer.getMainHandItem();\n if(itemInHand.is(TFCTags.Items.HAMMERS)) {\n if(capacity.isDestroyed()) {",
"score": 19.471871877056707
},
{
"filename": "src/main/java/net/chauvedev/woodencog/mixin/MixinKineticBlock.java",
"retrieved_chunk": " } else {\n capacity.setDurability(0);\n int durability_to_remove = (int)(capacity.getDurability() / 10) + 1;\n itemInHand.hurtAndBreak(durability_to_remove, pPlayer, (player -> player.broadcastBreakEvent(player.getUsedItemHand())));\n pLevel.playLocalSound(\n pPos.getX(),\n pPos.getY(),\n pPos.getZ(),\n SoundEvents.ANVIL_USE,\n SoundSource.BLOCKS,",
"score": 18.984884480585247
},
{
"filename": "src/main/java/net/chauvedev/woodencog/capability/MachineCapacity.java",
"retrieved_chunk": " tag.putFloat(\"machine_usage\",this.machine_capacity);\n tag.putBoolean(\"machine_destroyed\",this.destroyed);\n return tag;\n }\n public void toTag(CompoundTag tag){\n if (tag.contains(\"machine_usage\")){\n this.machine_capacity = tag.getFloat(\"machine_usage\");\n }\n if (tag.contains(\"machine_destroyed\")){\n this.destroyed = tag.getBoolean(\"machine_destroyed\");",
"score": 17.927611932228615
},
{
"filename": "src/main/java/net/chauvedev/woodencog/core/MachineCapacityStorage.java",
"retrieved_chunk": " public boolean active = true;\n public MachineCapacityStorage(){\n MachineCapacityStorage.INSTANCE = this;\n this.machineCapacityEntryMap_cache = new HashMap<>();\n }\n public boolean isBlackListBlock(BlockEntity blockEntity){\n Block block = blockEntity.getBlockState().getBlock();\n String full = block.getRegistryName().getNamespace()+\":\"+block.getRegistryName().getPath();\n return WoodenCogCommonConfigs.WEAR_BLACKLIST.get().contains(full);\n }",
"score": 12.992120634760672
}
] | java | capacity.setDestroyed(true); |
package net.chauvedev.woodencog.mixin;
import com.simibubi.create.content.kinetics.KineticNetwork;
import com.simibubi.create.content.kinetics.base.KineticBlockEntity;
import com.simibubi.create.content.kinetics.crusher.CrushingWheelBlockEntity;
import com.simibubi.create.content.kinetics.transmission.ClutchBlockEntity;
import net.chauvedev.woodencog.WoodenCog;
import net.chauvedev.woodencog.capability.MachineCapacity;
import net.chauvedev.woodencog.capability.MachineCapacityProvider;
import net.chauvedev.woodencog.core.MachineCapacityEntry;
import net.chauvedev.woodencog.core.MachineCapacityStorage;
import net.minecraft.core.BlockPos;
import net.minecraft.core.particles.ParticleTypes;
import net.minecraft.nbt.CompoundTag;
import net.minecraft.sounds.SoundEvent;
import net.minecraft.sounds.SoundEvents;
import net.minecraft.sounds.SoundSource;
import net.minecraft.world.level.Level;
import net.minecraft.world.level.block.Block;
import net.minecraft.world.level.block.Blocks;
import net.minecraft.world.level.block.entity.BlockEntity;
import net.minecraft.world.level.block.entity.BlockEntityType;
import net.minecraft.world.level.block.state.BlockState;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import java.awt.*;
import java.util.Objects;
@Mixin(value = KineticBlockEntity.class, remap = false)
public abstract class MixinKineticBlockEntity{
private boolean lifeBlockBlocked = false;
private float lifeBlock = 20*10;
@Shadow public abstract KineticNetwork getOrCreateNetwork();
@Shadow protected float speed;
@Shadow protected float stress;
@Shadow protected boolean overStressed;
@Shadow public abstract void setSpeed(float speed);
@Shadow protected float capacity;
@Shadow public abstract float calculateAddedStressCapacity();
@Shadow public abstract float calculateStressApplied();
@Shadow protected float lastStressApplied;
@Shadow protected float lastCapacityProvided;
@Shadow public abstract void detachKinetics();
@Shadow public abstract float getSpeed();
boolean destroyed = false;
@Inject(
method = {"tick"},
at = {@At("HEAD")},
remap = false)
public void tick(CallbackInfo ci){
KineticBlockEntity block = (KineticBlockEntity)((Object)this) ;
try{
this.tickDamagedTick(block);
}catch (Error error){
WoodenCog.LOGGER.debug("Error pendant le system tick");
error.printStackTrace();
}
}
public void tickDamagedTick(KineticBlockEntity block){
if (!MachineCapacityStorage.getInstance().active){
return;
}
if (MachineCapacityStorage.getInstance().isBlackListBlock(block)){
return;
}
| MachineCapacityEntry config = MachineCapacityStorage.getInstance().getCapacity(block.getBlockState().getBlock()); |
MachineCapacity capacity = block.getCapability(MachineCapacityProvider.MACHINE_CAPACITY).resolve().get();
int chance = block.getLevel().random.nextInt(0,101);
float left = config.durabilityMax - capacity.getDurability();
if (chance > (100-config.damageChance) && left > 0 && block.getSpeed() > 0){
int damage_for_speed = (int) getSpeed()/10;
capacity.setDurability(capacity.getDurability()+damage_for_speed);
}
left = config.durabilityMax - capacity.getDurability();
if(left >= 10 && left < config.durabilityMax/2) {
block.getLevel().addParticle(ParticleTypes.SMOKE,false,block.getBlockPos().getX()+0.5f,block.getBlockPos().getY()+0.5f,block.getBlockPos().getZ()+0.5f,0,0.01,0);
}
if (left<10){
if (getOrCreateNetwork()!=null){
if (!destroyed || !capacity.isDestroyed() || this.stress != Integer.MAX_VALUE){
destroyed = true;
getOrCreateNetwork().updateCapacityFor(block,0);
getOrCreateNetwork().updateStressFor(block, Integer.MAX_VALUE);
getOrCreateNetwork().updateNetwork();
getOrCreateNetwork().sync();
capacity.setDestroyed(true);
}
}
block.getLevel().addParticle(ParticleTypes.CAMPFIRE_SIGNAL_SMOKE,false,block.getBlockPos().getX()+0.5f,block.getBlockPos().getY()+0.5f,block.getBlockPos().getZ()+0.5f,0,0.01,0);
}
}
}
| src/main/java/net/chauvedev/woodencog/mixin/MixinKineticBlockEntity.java | Sparks-and-Rotation-woodencog-bb3d3ef | [
{
"filename": "src/main/java/net/chauvedev/woodencog/core/MachineCapacityStorage.java",
"retrieved_chunk": " public boolean active = true;\n public MachineCapacityStorage(){\n MachineCapacityStorage.INSTANCE = this;\n this.machineCapacityEntryMap_cache = new HashMap<>();\n }\n public boolean isBlackListBlock(BlockEntity blockEntity){\n Block block = blockEntity.getBlockState().getBlock();\n String full = block.getRegistryName().getNamespace()+\":\"+block.getRegistryName().getPath();\n return WoodenCogCommonConfigs.WEAR_BLACKLIST.get().contains(full);\n }",
"score": 44.586582741151155
},
{
"filename": "src/main/java/net/chauvedev/woodencog/core/MachineCapacityStorage.java",
"retrieved_chunk": "import java.util.HashMap;\nimport java.util.Map;\nimport java.util.UUID;\nimport java.util.logging.Logger;\npublic class MachineCapacityStorage {\n private static MachineCapacityStorage INSTANCE = null;\n public static MachineCapacityStorage getInstance(){\n return INSTANCE;\n }\n public HashMap<String,MachineCapacityEntry> machineCapacityEntryMap_cache;",
"score": 36.70899454696873
},
{
"filename": "src/main/java/net/chauvedev/woodencog/core/MachineCapacityStorage.java",
"retrieved_chunk": " public MachineCapacityEntry getCapacity(Block block){\n String full = block.getRegistryName().getNamespace()+\":\"+block.getRegistryName().getPath();\n try {\n CustomBlockConfig.BlockInformation info = CustomBlockConfig.registeredBlocks.get(full);\n return MachineCapacityEntry.createEntryBlock(false,\n info.durability,\n info.chance\n );\n } catch (NullPointerException e) {\n return MachineCapacityEntry.createEntryBlock(false,",
"score": 23.671722180272255
},
{
"filename": "src/main/java/net/chauvedev/woodencog/WoodenCog.java",
"retrieved_chunk": " final MachineCapacityStorage machineCapacityStorage;\n public WoodenCog()\n {\n FMLJavaModLoadingContext.get().getModEventBus().addListener(this::setup);\n CustomArmInteractionPointTypes.registerAll();\n MinecraftForge.EVENT_BUS.register(this);\n CustomBlockConfig.init(new File(FMLPaths.CONFIGDIR.get().toString(), \"woodencog-custom-block.json\"));\n CustomArmInteractionPointTypes.registerAll();\n ItemEntry<FireclayCrucibleItem> FIRECLAY_CRUCIBLE_ITEM = WoodenCog.REGISTRATE.item(\"fireclay_crucible\", FireclayCrucibleItem::new)\n .properties(properties -> properties.stacksTo(1))",
"score": 15.344105990200424
},
{
"filename": "src/main/java/net/chauvedev/woodencog/WoodenCogEvents.java",
"retrieved_chunk": " Block block = ((BlockItem) event.getObject().getItem()).getBlock();\n if (block instanceof KineticBlock){\n event.addCapability(MachineCapacityProvider.MACHINE_CAPACITY_KEY, new MachineCapacityProvider());\n }\n }\n }catch (Error error){\n WoodenCog.LOGGER.debug(\"-------ERROR---\");\n WoodenCog.LOGGER.error(\"Error found\",error);\n }\n }",
"score": 14.485343793226006
}
] | java | MachineCapacityEntry config = MachineCapacityStorage.getInstance().getCapacity(block.getBlockState().getBlock()); |
package net.chauvedev.woodencog.mixin;
import com.simibubi.create.content.kinetics.KineticNetwork;
import com.simibubi.create.content.kinetics.base.KineticBlockEntity;
import com.simibubi.create.content.kinetics.crusher.CrushingWheelBlockEntity;
import com.simibubi.create.content.kinetics.transmission.ClutchBlockEntity;
import net.chauvedev.woodencog.WoodenCog;
import net.chauvedev.woodencog.capability.MachineCapacity;
import net.chauvedev.woodencog.capability.MachineCapacityProvider;
import net.chauvedev.woodencog.core.MachineCapacityEntry;
import net.chauvedev.woodencog.core.MachineCapacityStorage;
import net.minecraft.core.BlockPos;
import net.minecraft.core.particles.ParticleTypes;
import net.minecraft.nbt.CompoundTag;
import net.minecraft.sounds.SoundEvent;
import net.minecraft.sounds.SoundEvents;
import net.minecraft.sounds.SoundSource;
import net.minecraft.world.level.Level;
import net.minecraft.world.level.block.Block;
import net.minecraft.world.level.block.Blocks;
import net.minecraft.world.level.block.entity.BlockEntity;
import net.minecraft.world.level.block.entity.BlockEntityType;
import net.minecraft.world.level.block.state.BlockState;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import java.awt.*;
import java.util.Objects;
@Mixin(value = KineticBlockEntity.class, remap = false)
public abstract class MixinKineticBlockEntity{
private boolean lifeBlockBlocked = false;
private float lifeBlock = 20*10;
@Shadow public abstract KineticNetwork getOrCreateNetwork();
@Shadow protected float speed;
@Shadow protected float stress;
@Shadow protected boolean overStressed;
@Shadow public abstract void setSpeed(float speed);
@Shadow protected float capacity;
@Shadow public abstract float calculateAddedStressCapacity();
@Shadow public abstract float calculateStressApplied();
@Shadow protected float lastStressApplied;
@Shadow protected float lastCapacityProvided;
@Shadow public abstract void detachKinetics();
@Shadow public abstract float getSpeed();
boolean destroyed = false;
@Inject(
method = {"tick"},
at = {@At("HEAD")},
remap = false)
public void tick(CallbackInfo ci){
KineticBlockEntity block = (KineticBlockEntity)((Object)this) ;
try{
this.tickDamagedTick(block);
}catch (Error error){
WoodenCog.LOGGER.debug("Error pendant le system tick");
error.printStackTrace();
}
}
public void tickDamagedTick(KineticBlockEntity block){
if (!MachineCapacityStorage.getInstance().active){
return;
}
if (MachineCapacityStorage.getInstance().isBlackListBlock(block)){
return;
}
MachineCapacityEntry config = MachineCapacityStorage.getInstance().getCapacity(block.getBlockState().getBlock());
MachineCapacity capacity = block.getCapability(MachineCapacityProvider.MACHINE_CAPACITY).resolve().get();
int chance = block.getLevel().random.nextInt(0,101);
float left = config.durabilityMax - capacity.getDurability();
if (chance > (100-config.damageChance) && left > 0 && block.getSpeed() > 0){
int damage_for_speed = (int) getSpeed()/10;
capacity.setDurability | (capacity.getDurability()+damage_for_speed); |
}
left = config.durabilityMax - capacity.getDurability();
if(left >= 10 && left < config.durabilityMax/2) {
block.getLevel().addParticle(ParticleTypes.SMOKE,false,block.getBlockPos().getX()+0.5f,block.getBlockPos().getY()+0.5f,block.getBlockPos().getZ()+0.5f,0,0.01,0);
}
if (left<10){
if (getOrCreateNetwork()!=null){
if (!destroyed || !capacity.isDestroyed() || this.stress != Integer.MAX_VALUE){
destroyed = true;
getOrCreateNetwork().updateCapacityFor(block,0);
getOrCreateNetwork().updateStressFor(block, Integer.MAX_VALUE);
getOrCreateNetwork().updateNetwork();
getOrCreateNetwork().sync();
capacity.setDestroyed(true);
}
}
block.getLevel().addParticle(ParticleTypes.CAMPFIRE_SIGNAL_SMOKE,false,block.getBlockPos().getX()+0.5f,block.getBlockPos().getY()+0.5f,block.getBlockPos().getZ()+0.5f,0,0.01,0);
}
}
}
| src/main/java/net/chauvedev/woodencog/mixin/MixinKineticBlockEntity.java | Sparks-and-Rotation-woodencog-bb3d3ef | [
{
"filename": "src/main/java/net/chauvedev/woodencog/mixin/MixinKineticBlock.java",
"retrieved_chunk": " } else {\n capacity.setDurability(0);\n int durability_to_remove = (int)(capacity.getDurability() / 10) + 1;\n itemInHand.hurtAndBreak(durability_to_remove, pPlayer, (player -> player.broadcastBreakEvent(player.getUsedItemHand())));\n pLevel.playLocalSound(\n pPos.getX(),\n pPos.getY(),\n pPos.getZ(),\n SoundEvents.ANVIL_USE,\n SoundSource.BLOCKS,",
"score": 61.0193330444728
},
{
"filename": "src/main/java/net/chauvedev/woodencog/core/MachineCapacityEntry.java",
"retrieved_chunk": "package net.chauvedev.woodencog.core;\nimport net.chauvedev.woodencog.capability.MachineCapacity;\nimport net.minecraft.world.level.block.Block;\npublic class MachineCapacityEntry {\n public boolean isBlackList = false;\n public String registryName = \"\";\n public float durabilityMax = 500;\n public int damageChance = 100;\n public static MachineCapacityEntry createEntryBlock(boolean isBlackList,float durabilityMax,int damageChance){\n MachineCapacityEntry entry = new MachineCapacityEntry();",
"score": 44.02929211688401
},
{
"filename": "src/main/java/net/chauvedev/woodencog/mixin/MixinKineticBlock.java",
"retrieved_chunk": " super(pProperties);\n }\n @Override\n public @NotNull InteractionResult use(BlockState pState, Level pLevel, BlockPos pPos, Player pPlayer, InteractionHand pHand, BlockHitResult pHit) {\n BlockEntity blockEntity = pLevel.getBlockEntity(pPos);\n if (blockEntity instanceof KineticBlockEntity kineticBlockEntity) {\n MachineCapacity capacity = kineticBlockEntity.getCapability(MachineCapacityProvider.MACHINE_CAPACITY).resolve().get();\n ItemStack itemInHand = pPlayer.getMainHandItem();\n if(itemInHand.is(TFCTags.Items.HAMMERS)) {\n if(capacity.isDestroyed()) {",
"score": 41.10104497132714
},
{
"filename": "src/main/java/net/chauvedev/woodencog/core/MachineCapacityStorage.java",
"retrieved_chunk": " public MachineCapacityEntry getCapacity(Block block){\n String full = block.getRegistryName().getNamespace()+\":\"+block.getRegistryName().getPath();\n try {\n CustomBlockConfig.BlockInformation info = CustomBlockConfig.registeredBlocks.get(full);\n return MachineCapacityEntry.createEntryBlock(false,\n info.durability,\n info.chance\n );\n } catch (NullPointerException e) {\n return MachineCapacityEntry.createEntryBlock(false,",
"score": 35.56804212475441
},
{
"filename": "src/main/java/net/chauvedev/woodencog/config/WoodenCogCommonConfigs.java",
"retrieved_chunk": " .define(\"durability\", 144000);\n DEFAULT_DAMAGE_CHANCE = BUILDER\n .comment(\"chance of machine getting damage (from 0 to 100 number over 100 are the same as 100)\")\n .define(\"chance\", 10);\n WEAR_BLACKLIST = BUILDER\n .comment(\"This list contains block that should not damage over time\")\n .defineList(\"blacklist\", List.of(\n \"create:shaft\",\n \"create:cogwheel\",\n \"create:large_cogwheel\",",
"score": 31.957759089726114
}
] | java | (capacity.getDurability()+damage_for_speed); |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.