hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 11
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
251
| max_stars_repo_name
stringlengths 4
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
251
| max_issues_repo_name
stringlengths 4
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
251
| max_forks_repo_name
stringlengths 4
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.05M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.04M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ed51e5aefc8aa4c007f752784c838fb5f4f57c1c
| 2,297 |
py
|
Python
|
network/dataset/image_loading.py
|
imsb-uke/podometric_u_net
|
a33afcc186d618889df73c7ab2941dfbb63574ac
|
[
"MIT"
] | null | null | null |
network/dataset/image_loading.py
|
imsb-uke/podometric_u_net
|
a33afcc186d618889df73c7ab2941dfbb63574ac
|
[
"MIT"
] | null | null | null |
network/dataset/image_loading.py
|
imsb-uke/podometric_u_net
|
a33afcc186d618889df73c7ab2941dfbb63574ac
|
[
"MIT"
] | null | null | null |
import os
import numpy as np
from skimage.io import imread
# Function to load image
# Function to load mask
| 37.048387 | 124 | 0.652155 |
ed52fe7003cd3391400a3e6ca8a3b67edfc17d59
| 6,769 |
py
|
Python
|
series/simple/numeric_series.py
|
kefir/snakee
|
a17734d4b2d7dfd3e6c7b195baa128fbc84d197b
|
[
"MIT"
] | null | null | null |
series/simple/numeric_series.py
|
kefir/snakee
|
a17734d4b2d7dfd3e6c7b195baa128fbc84d197b
|
[
"MIT"
] | null | null | null |
series/simple/numeric_series.py
|
kefir/snakee
|
a17734d4b2d7dfd3e6c7b195baa128fbc84d197b
|
[
"MIT"
] | null | null | null |
from typing import Optional, Callable
try: # Assume we're a sub-module in a package.
from series import series_classes as sc
from utils import numeric as nm
except ImportError: # Apparently no higher-level package has been imported, fall back to a local import.
from .. import series_classes as sc
from ...utils import numeric as nm
Native = sc.AnySeries
DEFAULT_NUMERIC = True
WINDOW_DEFAULT = (-1, 0, 1)
WINDOW_WO_CENTER = (-2, -1, 0, 1, 2)
WINDOW_NEIGHBORS = (-1, 0)
| 34.186869 | 116 | 0.601418 |
ed5484913e5b4d8984ea6ca02d8e86830b881e11
| 4,857 |
py
|
Python
|
app/internal/module/video/database.py
|
kuropengin/SHINtube-video-api
|
8a4b068fb95a9a2736b3dba3782dbbbf73815290
|
[
"MIT"
] | null | null | null |
app/internal/module/video/database.py
|
kuropengin/SHINtube-video-api
|
8a4b068fb95a9a2736b3dba3782dbbbf73815290
|
[
"MIT"
] | null | null | null |
app/internal/module/video/database.py
|
kuropengin/SHINtube-video-api
|
8a4b068fb95a9a2736b3dba3782dbbbf73815290
|
[
"MIT"
] | null | null | null |
import glob
import pathlib
from .filemanager import filemanager_class
database = database_class()
| 33.267123 | 71 | 0.558369 |
ed548c718f56038d0a32759b322ccf9c4f9f5e93
| 29,735 |
py
|
Python
|
python/OpenGeoTile.py
|
scoofy/open-geotiling
|
0b1305d4482d6df46104135662ffe4565f92f9f0
|
[
"Apache-2.0"
] | null | null | null |
python/OpenGeoTile.py
|
scoofy/open-geotiling
|
0b1305d4482d6df46104135662ffe4565f92f9f0
|
[
"Apache-2.0"
] | null | null | null |
python/OpenGeoTile.py
|
scoofy/open-geotiling
|
0b1305d4482d6df46104135662ffe4565f92f9f0
|
[
"Apache-2.0"
] | null | null | null |
from openlocationcode import openlocationcode as olc
from enum import Enum
import math, re
# Copy from OpenLocationCode.java
# A separator used to break the code into two parts to aid memorability.
SEPARATOR = '+'
# Copy from OpenLocationCode.java
# The character used to pad codes.
PADDING_CHARACTER = '0'
PADDING_2 = "00"
PADDING_4 = "0000"
PADDING_6 = "000000"
CODE_ALPHABET = olc.CODE_ALPHABET_
BASE_20_SET = {x+y for x in CODE_ALPHABET for y in CODE_ALPHABET}
BASE_20_BORDER_SET = {x for x in BASE_20_SET if x[0] in ['2', 'X'] or x[1] in ['2', 'X']}
NORTH_DIGITS = {x for x in BASE_20_BORDER_SET if x[0] == 'X'}
EAST_DIGITS = {x for x in BASE_20_BORDER_SET if x[1] == 'X'}
SOUTH_DIGITS = {x for x in BASE_20_BORDER_SET if x[0] == '2'}
WEST_DIGITS = {x for x in BASE_20_BORDER_SET if x[1] == '2'}
memoized_digit_dict = {
"N1": NORTH_DIGITS,
"E1": EAST_DIGITS,
"S1": SOUTH_DIGITS,
"W1": WEST_DIGITS,
}
| 43.535871 | 150 | 0.613351 |
ed55484ea14f91f98d1615b910fc743371e53922
| 13,543 |
py
|
Python
|
deep-rl/lib/python2.7/site-packages/OpenGL/arrays/arraydatatype.py
|
ShujaKhalid/deep-rl
|
99c6ba6c3095d1bfdab81bd01395ced96bddd611
|
[
"MIT"
] | 87 |
2015-04-09T16:57:27.000Z
|
2022-02-21T13:21:12.000Z
|
deep-rl/lib/python2.7/site-packages/OpenGL/arrays/arraydatatype.py
|
ShujaKhalid/deep-rl
|
99c6ba6c3095d1bfdab81bd01395ced96bddd611
|
[
"MIT"
] | 47 |
2015-04-09T21:05:30.000Z
|
2021-06-22T15:21:18.000Z
|
deep-rl/lib/python2.7/site-packages/OpenGL/arrays/arraydatatype.py
|
ShujaKhalid/deep-rl
|
99c6ba6c3095d1bfdab81bd01395ced96bddd611
|
[
"MIT"
] | 16 |
2015-04-09T19:10:22.000Z
|
2020-07-19T05:41:06.000Z
|
"""Array data-type implementations (abstraction points for GL array types"""
import ctypes
import OpenGL
from OpenGL.raw.GL import _types
from OpenGL import plugins
from OpenGL.arrays import formathandler, _arrayconstants as GL_1_1
from OpenGL import logs
_log = logs.getLog( 'OpenGL.arrays.arraydatatype' )
from OpenGL import acceleratesupport
ADT = None
if acceleratesupport.ACCELERATE_AVAILABLE:
try:
from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT
except ImportError as err:
_log.warn(
"Unable to load ArrayDatatype accelerator from OpenGL_accelerate"
)
if ADT is None:
# Python-coded version
GLOBAL_REGISTRY = HandlerRegistry( plugins.FormatHandler.match)
formathandler.FormatHandler.TYPE_REGISTRY = GLOBAL_REGISTRY
# the final array data-type classes...
GLcharARBArray = GLcharArray
GLbooleanArray = GLubyteArray
else:
# Cython-coded array handler
_log.info( 'Using accelerated ArrayDatatype' )
ArrayDatatype = ADT( None, None )
GLclampdArray = ADT( GL_1_1.GL_DOUBLE, _types.GLclampd )
GLclampfArray = ADT( GL_1_1.GL_FLOAT, _types.GLclampf )
GLdoubleArray = ADT( GL_1_1.GL_DOUBLE, _types.GLdouble )
GLfloatArray = ADT( GL_1_1.GL_FLOAT, _types.GLfloat )
GLbyteArray = ADT( GL_1_1.GL_BYTE, _types.GLbyte )
GLcharArray = GLcharARBArray = ADT( GL_1_1.GL_BYTE, _types.GLchar )
GLshortArray = ADT( GL_1_1.GL_SHORT, _types.GLshort )
GLintArray = ADT( GL_1_1.GL_INT, _types.GLint )
GLubyteArray = GLbooleanArray = ADT( GL_1_1.GL_UNSIGNED_BYTE, _types.GLubyte )
GLushortArray = ADT( GL_1_1.GL_UNSIGNED_SHORT, _types.GLushort )
GLuintArray = ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLuint )
GLint64Array = ADT( None, _types.GLint64 )
GLuint64Array = ADT( GL_1_1.GL_UNSIGNED_INT64, _types.GLuint64 )
GLenumArray = ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLenum )
GLsizeiArray = ADT( GL_1_1.GL_INT, _types.GLsizei )
GLvoidpArray = ADT( _types.GL_VOID_P, _types.GLvoidp )
GL_CONSTANT_TO_ARRAY_TYPE = {
GL_1_1.GL_DOUBLE : GLclampdArray,
GL_1_1.GL_FLOAT : GLclampfArray,
GL_1_1.GL_FLOAT : GLfloatArray,
GL_1_1.GL_DOUBLE : GLdoubleArray,
GL_1_1.GL_BYTE : GLbyteArray,
GL_1_1.GL_SHORT : GLshortArray,
GL_1_1.GL_INT : GLintArray,
GL_1_1.GL_UNSIGNED_BYTE : GLubyteArray,
GL_1_1.GL_UNSIGNED_SHORT : GLushortArray,
GL_1_1.GL_UNSIGNED_INT : GLuintArray,
#GL_1_1.GL_UNSIGNED_INT : GLenumArray,
}
| 44.844371 | 100 | 0.616333 |
ed57f1712b86394159992dc11fd79688181d493e
| 13,851 |
bzl
|
Python
|
tensorflow_probability/python/build_defs.bzl
|
jbergmanster/probability
|
e15b307066e7485b8fe9faf3d289c739ab8d3806
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_probability/python/build_defs.bzl
|
jbergmanster/probability
|
e15b307066e7485b8fe9faf3d289c739ab8d3806
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_probability/python/build_defs.bzl
|
jbergmanster/probability
|
e15b307066e7485b8fe9faf3d289c739ab8d3806
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Build defs for TF/NumPy/JAX-variadic libraries & tests."""
# [internal] load python3.bzl
NO_REWRITE_NEEDED = [
"internal:all_util",
"internal:docstring_util",
"internal:reparameterization",
"layers",
"platform_google",
]
REWRITER_TARGET = "//tensorflow_probability/substrates/meta:rewrite"
RUNFILES_ROOT = "tensorflow_probability/"
def _substrate_src(src, substrate):
"""Rewrite a single src filename for the given substrate."""
return "_{}/_generated_{}".format(substrate, src)
def _substrate_srcs(srcs, substrate):
"""Rewrite src filenames for the given substrate."""
return [_substrate_src(src, substrate) for src in srcs]
def _substrate_dep(dep, substrate):
"""Convert a single dep to one appropriate for the given substrate."""
dep_to_check = dep
if dep.startswith(":"):
dep_to_check = "{}{}".format(native.package_name(), dep)
for no_rewrite in NO_REWRITE_NEEDED:
if no_rewrite in dep_to_check:
return dep
if "tensorflow_probability/" in dep or dep.startswith(":"):
if "internal/backend" in dep:
return dep
if ":" in dep:
return "{}.{}".format(dep, substrate)
return "{}:{}.{}".format(dep, dep.split("/")[-1], substrate)
return dep
def _substrate_deps(deps, substrate):
"""Convert deps to those appropriate for the given substrate."""
new_deps = [_substrate_dep(dep, substrate) for dep in deps]
backend_dep = "//tensorflow_probability/python/internal/backend/{}".format(substrate)
if backend_dep not in new_deps:
new_deps.append(backend_dep)
return new_deps
# This is needed for the transitional period during which we have the internal
# py2and3_test and py_test comingling in BUILD files. Otherwise the OSS export
# rewrite process becomes irreversible.
def py3_test(*args, **kwargs):
"""Internal/external reversibility, denotes py3-only vs py2+3 tests.
Args:
*args: Passed to underlying py_test.
**kwargs: Passed to underlying py_test. srcs_version and python_version
are added (with value `"PY3"`) if not specified.
"""
kwargs = dict(kwargs)
if "srcs_version" not in kwargs:
kwargs["srcs_version"] = "PY3"
if "python_version" not in kwargs:
kwargs["python_version"] = "PY3"
native.py_test(*args, **kwargs)
def _resolve_omit_dep(dep):
"""Resolves a `substrates_omit_deps` item to full target."""
if ":" not in dep:
dep = "{}:{}".format(dep, dep.split("/")[-1])
if dep.startswith(":"):
dep = "{}{}".format(native.package_name(), dep)
return dep
def _substrate_runfiles_symlinks_impl(ctx):
"""A custom BUILD rule to generate python runfiles symlinks.
A custom build rule which adds runfiles symlinks for files matching a
substrate genrule file pattern, i.e. `'_jax/_generated_normal.py'`.
This rule will aggregate and pass along deps while adding the given
symlinks to the runfiles structure.
Build rule attributes:
- substrate: One of 'jax' or 'numpy'; which substrate this applies to.
- deps: A list of py_library labels. These are passed along.
Args:
ctx: Rule analysis context.
Returns:
Info objects to propagate deps and add runfiles symlinks.
"""
# Aggregate the depset inputs to resolve transitive dependencies.
transitive_sources = []
uses_shared_libraries = []
imports = []
has_py2_only_sources = []
has_py3_only_sources = []
cc_infos = []
for dep in ctx.attr.deps:
if PyInfo in dep:
transitive_sources.append(dep[PyInfo].transitive_sources)
uses_shared_libraries.append(dep[PyInfo].uses_shared_libraries)
imports.append(dep[PyInfo].imports)
has_py2_only_sources.append(dep[PyInfo].has_py2_only_sources)
has_py3_only_sources.append(dep[PyInfo].has_py3_only_sources)
# if PyCcLinkParamsProvider in dep: # DisableOnExport
# cc_infos.append(dep[PyCcLinkParamsProvider].cc_info) # DisableOnExport
if CcInfo in dep:
cc_infos.append(dep[CcInfo])
# Determine the set of symlinks to generate.
transitive_sources = depset(transitive = transitive_sources)
runfiles_dict = {}
substrate = ctx.attr.substrate
file_substr = "_{}/_generated_".format(substrate)
for f in transitive_sources.to_list():
if "tensorflow_probability" in f.dirname and file_substr in f.short_path:
pre, post = f.short_path.split("/python/")
out_path = "{}/substrates/{}/{}".format(
pre,
substrate,
post.replace(file_substr, ""),
)
runfiles_dict[RUNFILES_ROOT + out_path] = f
# Construct the output structures to pass along Python srcs/deps/etc.
py_info = PyInfo(
transitive_sources = transitive_sources,
uses_shared_libraries = any(uses_shared_libraries),
imports = depset(transitive = imports),
has_py2_only_sources = any(has_py2_only_sources),
has_py3_only_sources = any(has_py3_only_sources),
)
py_cc_link_info = cc_common.merge_cc_infos(cc_infos = cc_infos)
py_runfiles = depset(
transitive = [depset(transitive = [
dep[DefaultInfo].data_runfiles.files,
dep[DefaultInfo].default_runfiles.files,
]) for dep in ctx.attr.deps],
)
runfiles = DefaultInfo(runfiles = ctx.runfiles(
transitive_files = py_runfiles,
root_symlinks = runfiles_dict,
))
return py_info, py_cc_link_info, runfiles
# See documentation at:
# https://docs.bazel.build/versions/3.4.0/skylark/rules.html
substrate_runfiles_symlinks = rule(
implementation = _substrate_runfiles_symlinks_impl,
attrs = {
"substrate": attr.string(),
"deps": attr.label_list(),
},
)
def multi_substrate_py_library(
name,
srcs = [],
deps = [],
substrates_omit_deps = [],
jax_omit_deps = [],
numpy_omit_deps = [],
testonly = 0,
srcs_version = "PY2AND3"):
"""A TFP `py_library` for each of TF, NumPy, and JAX.
Args:
name: The TF `py_library` name. NumPy and JAX libraries have '.numpy' and
'.jax' appended.
srcs: As with `py_library`. A `genrule` is used to rewrite srcs for NumPy
and JAX substrates.
deps: As with `py_library`. The list is rewritten to depend on
substrate-specific libraries for substrate variants.
substrates_omit_deps: List of deps to omit if those libraries are not
rewritten for the substrates.
jax_omit_deps: List of deps to omit for the JAX substrate.
numpy_omit_deps: List of deps to omit for the NumPy substrate.
testonly: As with `py_library`.
srcs_version: As with `py_library`.
"""
native.py_library(
name = name,
srcs = srcs,
deps = deps,
srcs_version = srcs_version,
testonly = testonly,
)
remove_deps = [
"//third_party/py/tensorflow",
"//third_party/py/tensorflow:tensorflow",
]
trimmed_deps = [dep for dep in deps if (dep not in substrates_omit_deps and
dep not in remove_deps)]
resolved_omit_deps_numpy = [
_resolve_omit_dep(dep)
for dep in substrates_omit_deps + numpy_omit_deps
]
for src in srcs:
native.genrule(
name = "rewrite_{}_numpy".format(src.replace(".", "_")),
srcs = [src],
outs = [_substrate_src(src, "numpy")],
cmd = "$(location {}) $(SRCS) --omit_deps={} > $@".format(
REWRITER_TARGET,
",".join(resolved_omit_deps_numpy),
),
tools = [REWRITER_TARGET],
)
native.py_library(
name = "{}.numpy.raw".format(name),
srcs = _substrate_srcs(srcs, "numpy"),
deps = _substrate_deps(trimmed_deps, "numpy"),
srcs_version = srcs_version,
testonly = testonly,
)
# Add symlinks under tfp/substrates/numpy.
substrate_runfiles_symlinks(
name = "{}.numpy".format(name),
substrate = "numpy",
deps = [":{}.numpy.raw".format(name)],
testonly = testonly,
)
resolved_omit_deps_jax = [
_resolve_omit_dep(dep)
for dep in substrates_omit_deps + jax_omit_deps
]
jax_srcs = _substrate_srcs(srcs, "jax")
for src in srcs:
native.genrule(
name = "rewrite_{}_jax".format(src.replace(".", "_")),
srcs = [src],
outs = [_substrate_src(src, "jax")],
cmd = "$(location {}) $(SRCS) --omit_deps={} --numpy_to_jax > $@".format(
REWRITER_TARGET,
",".join(resolved_omit_deps_jax),
),
tools = [REWRITER_TARGET],
)
native.py_library(
name = "{}.jax.raw".format(name),
srcs = jax_srcs,
deps = _substrate_deps(trimmed_deps, "jax"),
srcs_version = srcs_version,
testonly = testonly,
)
# Add symlinks under tfp/substrates/jax.
substrate_runfiles_symlinks(
name = "{}.jax".format(name),
substrate = "jax",
deps = [":{}.jax.raw".format(name)],
testonly = testonly,
)
def multi_substrate_py_test(
name,
size = "small",
jax_size = None,
numpy_size = None,
srcs = [],
deps = [],
tags = [],
numpy_tags = [],
jax_tags = [],
disabled_substrates = [],
srcs_version = "PY2AND3",
timeout = None,
shard_count = None):
"""A TFP `py2and3_test` for each of TF, NumPy, and JAX.
Args:
name: Name of the `test_suite` which covers TF, NumPy and JAX variants
of the test. Each substrate will have a dedicated `py2and3_test`
suffixed with '.tf', '.numpy', or '.jax' as appropriate.
size: As with `py_test`.
jax_size: A size override for the JAX target.
numpy_size: A size override for the numpy target.
srcs: As with `py_test`. These will have a `genrule` emitted to rewrite
NumPy and JAX variants, writing the test file into a subdirectory.
deps: As with `py_test`. The list is rewritten to depend on
substrate-specific libraries for substrate variants.
tags: Tags global to this test target. NumPy also gets a `'tfp_numpy'`
tag, and JAX gets a `'tfp_jax'` tag. A `f'_{name}'` tag is used
to produce the `test_suite`.
numpy_tags: Tags specific to the NumPy test. (e.g. `"notap"`).
jax_tags: Tags specific to the JAX test. (e.g. `"notap"`).
disabled_substrates: Iterable of substrates to disable, items from
["numpy", "jax"].
srcs_version: As with `py_test`.
timeout: As with `py_test`.
shard_count: As with `py_test`.
"""
name_tag = "_{}".format(name)
tags = [t for t in tags]
tags.append(name_tag)
tags.append("multi_substrate")
native.py_test(
name = "{}.tf".format(name),
size = size,
srcs = srcs,
main = "{}.py".format(name),
deps = deps,
tags = tags,
srcs_version = srcs_version,
timeout = timeout,
shard_count = shard_count,
)
if "numpy" not in disabled_substrates:
numpy_srcs = _substrate_srcs(srcs, "numpy")
native.genrule(
name = "rewrite_{}_numpy".format(name),
srcs = srcs,
outs = numpy_srcs,
cmd = "$(location {}) $(SRCS) > $@".format(REWRITER_TARGET),
tools = [REWRITER_TARGET],
)
py3_test(
name = "{}.numpy".format(name),
size = numpy_size or size,
srcs = numpy_srcs,
main = _substrate_src("{}.py".format(name), "numpy"),
deps = _substrate_deps(deps, "numpy"),
tags = tags + ["tfp_numpy"] + numpy_tags,
srcs_version = srcs_version,
python_version = "PY3",
timeout = timeout,
shard_count = shard_count,
)
if "jax" not in disabled_substrates:
jax_srcs = _substrate_srcs(srcs, "jax")
native.genrule(
name = "rewrite_{}_jax".format(name),
srcs = srcs,
outs = jax_srcs,
cmd = "$(location {}) $(SRCS) --numpy_to_jax > $@".format(REWRITER_TARGET),
tools = [REWRITER_TARGET],
)
jax_deps = _substrate_deps(deps, "jax")
# [internal] Add JAX build dep
py3_test(
name = "{}.jax".format(name),
size = jax_size or size,
srcs = jax_srcs,
main = _substrate_src("{}.py".format(name), "jax"),
deps = jax_deps,
tags = tags + ["tfp_jax"] + jax_tags,
srcs_version = srcs_version,
python_version = "PY3",
timeout = timeout,
shard_count = shard_count,
)
native.test_suite(
name = name,
tags = [name_tag],
)
| 35.698454 | 89 | 0.608043 |
ed583ccefc13cf5fca32a4b108662e62505e92e1
| 5,425 |
py
|
Python
|
src/wikidated/wikidata/wikidata_dump.py
|
lschmelzeisen/wikidata-history-analyzer
|
8673639b61839d2dca271fbbaf2feb8563b75f2d
|
[
"ECL-2.0",
"Apache-2.0"
] | 6 |
2021-06-10T09:26:44.000Z
|
2021-07-07T13:49:00.000Z
|
src/wikidated/wikidata/wikidata_dump.py
|
lschmelzeisen/wikidata-history-analyzer
|
8673639b61839d2dca271fbbaf2feb8563b75f2d
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/wikidated/wikidata/wikidata_dump.py
|
lschmelzeisen/wikidata-history-analyzer
|
8673639b61839d2dca271fbbaf2feb8563b75f2d
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
#
# Copyright 2021 Lukas Schmelzeisen
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import annotations
import json
from datetime import date, datetime
from logging import getLogger
from pathlib import Path
from typing import Mapping, MutableSequence, Sequence, Type, TypeVar
import requests
from pydantic import BaseModel as PydanticModel
from pydantic import validator
from tqdm import tqdm # type: ignore
from typing_extensions import Final
from wikidated._utils import RangeMap
from wikidated.wikidata.wikidata_dump_file import WikidataDumpFile
from wikidated.wikidata.wikidata_dump_pages_meta_history import (
WikidataDumpPagesMetaHistory,
)
from wikidated.wikidata.wikidata_dump_sites_table import WikidataDumpSitesTable
_LOGGER = getLogger(__name__)
_T_WikidataDumpFile = TypeVar("_T_WikidataDumpFile", bound=WikidataDumpFile)
| 33.487654 | 87 | 0.654194 |
ed58570015c33daeb7f03921904a43571a44e66f
| 18,726 |
py
|
Python
|
tcapygen/layoutgen.py
|
Ahrvo-Trading-Systems/tcapy
|
df8439aa5c754fc9a7fde463c44c489b27112f76
|
[
"Apache-2.0"
] | 189 |
2020-03-20T17:03:04.000Z
|
2022-03-30T13:33:27.000Z
|
tcapygen/layoutgen.py
|
Ahrvo-Trading-Systems/tcapy
|
df8439aa5c754fc9a7fde463c44c489b27112f76
|
[
"Apache-2.0"
] | 4 |
2020-06-06T14:58:21.000Z
|
2022-03-10T22:31:15.000Z
|
tcapygen/layoutgen.py
|
Ahrvo-Trading-Systems/tcapy
|
df8439aa5c754fc9a7fde463c44c489b27112f76
|
[
"Apache-2.0"
] | 60 |
2020-03-20T17:06:56.000Z
|
2022-03-26T02:48:58.000Z
|
from __future__ import division, print_function
__author__ = 'saeedamen' # Saeed Amen / [email protected]
#
# Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
#
# See the License for the specific language governing permissions and limitations under the License.
#
## Web server components
import dash_core_components as dcc
import dash_html_components as html
import base64
import os
## Date/time components
import pandas as pd
import datetime
from datetime import timedelta
from collections import OrderedDict
from pandas.tseries.offsets import *
from tcapy.vis.layoutdash import LayoutDash
########################################################################################################################
| 58.51875 | 154 | 0.595215 |
ed585ed9b9e64b5cd7e7fef27facda2ab0843b74
| 4,320 |
py
|
Python
|
tests/molecular/molecules/molecule/fixtures/cof/periodic_kagome.py
|
andrewtarzia/stk
|
1ac2ecbb5c9940fe49ce04cbf5603fd7538c475a
|
[
"MIT"
] | 21 |
2018-04-12T16:25:24.000Z
|
2022-02-14T23:05:43.000Z
|
tests/molecular/molecules/molecule/fixtures/cof/periodic_kagome.py
|
JelfsMaterialsGroup/stk
|
0d3e1b0207aa6fa4d4d5ee8dfe3a29561abb08a2
|
[
"MIT"
] | 8 |
2019-03-19T12:36:36.000Z
|
2020-11-11T12:46:00.000Z
|
tests/molecular/molecules/molecule/fixtures/cof/periodic_kagome.py
|
supramolecular-toolkit/stk
|
0d3e1b0207aa6fa4d4d5ee8dfe3a29561abb08a2
|
[
"MIT"
] | 5 |
2018-08-07T13:00:16.000Z
|
2021-11-01T00:55:10.000Z
|
import pytest
import stk
from ...case_data import CaseData
| 45.473684 | 71 | 0.344907 |
ed587bf56577619d8ec39ef62825f11e9ce7e776
| 3,511 |
py
|
Python
|
projects/MAE/utils/weight_convert.py
|
Oneflow-Inc/libai
|
e473bd3962f07b1e37232d2be39c8257df0ec0f3
|
[
"Apache-2.0"
] | 55 |
2021-12-10T08:47:06.000Z
|
2022-03-28T09:02:15.000Z
|
projects/MAE/utils/weight_convert.py
|
Oneflow-Inc/libai
|
e473bd3962f07b1e37232d2be39c8257df0ec0f3
|
[
"Apache-2.0"
] | 106 |
2021-11-03T05:16:45.000Z
|
2022-03-31T06:16:23.000Z
|
projects/MAE/utils/weight_convert.py
|
Oneflow-Inc/libai
|
e473bd3962f07b1e37232d2be39c8257df0ec0f3
|
[
"Apache-2.0"
] | 13 |
2021-12-29T08:12:08.000Z
|
2022-03-28T06:59:45.000Z
|
# coding=utf-8
# Copyright 2021 The OneFlow Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import oneflow as flow
import torch
logger = logging.getLogger(__name__)
def convert_qkv_weight(cfg, value):
"""
Convert qkv.weight to be compatible with LiBai transformer layer
Args:
cfg: config file
value: qkv.weight in the loaded checkpoint
"""
num_heads = cfg.model.num_heads
hidden_size = cfg.model.embed_dim
head_size = int(hidden_size / num_heads)
qkv_weight = (
value.view([3, num_heads, head_size, hidden_size])
.permute(1, 0, 2, 3)
.contiguous()
.view(hidden_size * 3, hidden_size)
)
return qkv_weight
def convert_qkv_bias(cfg, value):
"""
Convert qkv.bias to be compatible with LiBai transformer layer
Args:
cfg: config file
value: qkv.bias in the loaded checkpoint
"""
num_heads = cfg.model.num_heads
hidden_size = cfg.model.embed_dim
head_size = int(hidden_size / num_heads)
qkv_bias = (
value.view(3, num_heads, head_size).permute(1, 0, 2).contiguous().view(hidden_size * 3)
)
return qkv_bias
def filter_keys(key, value, cfg):
"""
Filtering the state_dict keys and values to match LiBai's MAE model
"""
if "norm1" in key:
key = key.replace("norm1", "input_layernorm")
elif "attn.qkv" in key:
key = key.replace("attn.qkv", "self_attention.query_key_value")
if "weight" in key:
value = convert_qkv_weight(cfg, value)
if "bias" in key:
value = convert_qkv_bias(cfg, value)
elif "attn.proj" in key:
key = key.replace("attn.proj", "self_attention.dense")
elif "norm2" in key:
key = key.replace("norm2", "post_attention_layernorm")
elif "mlp.fc1" in key:
key = key.replace("mlp.fc1", "mlp.dense_h_to_4h")
elif "mlp.fc2" in key:
key = key.replace("mlp.fc2", "mlp.dense_4h_to_h")
elif "fc_norm" in key:
key = key.replace("fc_norm", "norm")
return key, value
def load_torch_checkpoint(model, cfg, path="./mae_finetuned_vit_base.pth", strict=False):
"""
Load checkpoint from the given torch weights.
Torch weight can be downloaded from the original repo:
https://github.com/facebookresearch/mae
"""
torch_dict = torch.load(path, map_location="cpu")["model"]
parameters = torch_dict
new_parameters = dict()
for key, value in parameters.items():
if "num_batches_tracked" not in key:
# to global tensor
key, val = filter_keys(key, value, cfg)
val = val.detach().cpu().numpy()
val = flow.tensor(val).to_global(
sbp=flow.sbp.broadcast, placement=flow.placement("cuda", ranks=[0])
)
new_parameters[key] = val
model.load_state_dict(new_parameters, strict=strict)
print("Successfully load torch mae checkpoint.")
return model
| 32.509259 | 95 | 0.656508 |
ed5a1b538137819cd768e00bc5a8aa7705765275
| 37 |
py
|
Python
|
dthm4kaiako/events/__init__.py
|
taskmaker1/dthm4kaiako
|
681babc10b3223b5ae7fdf19b98c53d2bef4ea1a
|
[
"MIT"
] | 3 |
2018-12-10T07:03:02.000Z
|
2021-04-12T02:18:30.000Z
|
dthm4kaiako/events/__init__.py
|
taskmaker1/dthm4kaiako
|
681babc10b3223b5ae7fdf19b98c53d2bef4ea1a
|
[
"MIT"
] | 566 |
2018-09-30T02:54:28.000Z
|
2022-03-28T01:20:01.000Z
|
dthm4kaiako/events/__init__.py
|
taskmaker1/dthm4kaiako
|
681babc10b3223b5ae7fdf19b98c53d2bef4ea1a
|
[
"MIT"
] | 3 |
2019-04-04T19:53:39.000Z
|
2021-05-16T02:04:46.000Z
|
"""Module for events application."""
| 18.5 | 36 | 0.702703 |
ed5a4c9715fe0f81c7675d25ae101b58391d1929
| 8,462 |
py
|
Python
|
spot/level1.py
|
K0gata/SGLI_Python_output_tool
|
1368e0408edd737a5109d0523db6c147faa80b97
|
[
"MIT"
] | 1 |
2020-08-04T04:17:49.000Z
|
2020-08-04T04:17:49.000Z
|
spot/level1.py
|
K0gata/SGLI_Python_Open_Tool
|
1368e0408edd737a5109d0523db6c147faa80b97
|
[
"MIT"
] | null | null | null |
spot/level1.py
|
K0gata/SGLI_Python_Open_Tool
|
1368e0408edd737a5109d0523db6c147faa80b97
|
[
"MIT"
] | null | null | null |
import numpy as np
import logging
from decimal import Decimal, ROUND_HALF_UP
from abc import ABC, abstractmethod, abstractproperty
from spot.utility import bilin_2d
from spot.config import PROJ_TYPE
# =============================
# Level-1 template class
# =============================
def get_product_data_list(self):
return list(self.h5_file['/Image_data'].keys())
def get_unit(self, prod_name: str):
if 'Rt_' in prod_name:
return 'NA'
# Get attrs set
unit_name = 'Unit'
attrs = self.h5_file['/Image_data/' + prod_name].attrs
# Get unit
if unit_name not in attrs:
return 'NA'
return attrs[unit_name][0].decode('UTF-8')
# =============================
# Level-1 map-projection class
# =============================
# =============================
# Level-1 sub-processing level class
# =============================
# EOF
| 34.538776 | 120 | 0.610021 |
ed5ab9e7476a3e24312d9ef871509f4e43e86312
| 18,788 |
py
|
Python
|
devil/devil/utils/cmd_helper.py
|
Martijnve23/catapult
|
5c63b19d221af6a12889e8727acc85d93892cab7
|
[
"BSD-3-Clause"
] | 1,894 |
2015-04-17T18:29:53.000Z
|
2022-03-28T22:41:06.000Z
|
devil/devil/utils/cmd_helper.py
|
Martijnve23/catapult
|
5c63b19d221af6a12889e8727acc85d93892cab7
|
[
"BSD-3-Clause"
] | 4,640 |
2015-07-08T16:19:08.000Z
|
2019-12-02T15:01:27.000Z
|
infra/services/android_docker/third_party/devil/utils/cmd_helper.py
|
NDevTK/chromium-infra
|
d38e088e158d81f7f2065a38aa1ea1894f735ec4
|
[
"BSD-3-Clause"
] | 698 |
2015-06-02T19:18:35.000Z
|
2022-03-29T16:57:15.000Z
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A wrapper for subprocess to make calling shell commands easier."""
import codecs
import logging
import os
import pipes
import select
import signal
import string
import subprocess
import sys
import time
CATAPULT_ROOT_PATH = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', '..'))
SIX_PATH = os.path.join(CATAPULT_ROOT_PATH, 'third_party', 'six')
if SIX_PATH not in sys.path:
sys.path.append(SIX_PATH)
import six
from devil import base_error
logger = logging.getLogger(__name__)
_SafeShellChars = frozenset(string.ascii_letters + string.digits + '@%_-+=:,./')
# Cache the string-escape codec to ensure subprocess can find it
# later. Return value doesn't matter.
if six.PY2:
codecs.lookup('string-escape')
def SingleQuote(s):
"""Return an shell-escaped version of the string using single quotes.
Reliably quote a string which may contain unsafe characters (e.g. space,
quote, or other special characters such as '$').
The returned value can be used in a shell command line as one token that gets
to be interpreted literally.
Args:
s: The string to quote.
Return:
The string quoted using single quotes.
"""
return pipes.quote(s)
def DoubleQuote(s):
"""Return an shell-escaped version of the string using double quotes.
Reliably quote a string which may contain unsafe characters (e.g. space
or quote characters), while retaining some shell features such as variable
interpolation.
The returned value can be used in a shell command line as one token that gets
to be further interpreted by the shell.
The set of characters that retain their special meaning may depend on the
shell implementation. This set usually includes: '$', '`', '\', '!', '*',
and '@'.
Args:
s: The string to quote.
Return:
The string quoted using double quotes.
"""
if not s:
return '""'
elif all(c in _SafeShellChars for c in s):
return s
else:
return '"' + s.replace('"', '\\"') + '"'
def ShrinkToSnippet(cmd_parts, var_name, var_value):
"""Constructs a shell snippet for a command using a variable to shrink it.
Takes into account all quoting that needs to happen.
Args:
cmd_parts: A list of command arguments.
var_name: The variable that holds var_value.
var_value: The string to replace in cmd_parts with $var_name
Returns:
A shell snippet that does not include setting the variable.
"""
return ' '.join(shrink(part) for part in cmd_parts)
def RunCmd(args, cwd=None):
"""Opens a subprocess to execute a program and returns its return value.
Args:
args: A string or a sequence of program arguments. The program to execute is
the string or the first item in the args sequence.
cwd: If not None, the subprocess's current directory will be changed to
|cwd| before it's executed.
Returns:
Return code from the command execution.
"""
logger.debug(str(args) + ' ' + (cwd or ''))
return Call(args, cwd=cwd)
def GetCmdOutput(args, cwd=None, shell=False, env=None):
"""Open a subprocess to execute a program and returns its output.
Args:
args: A string or a sequence of program arguments. The program to execute is
the string or the first item in the args sequence.
cwd: If not None, the subprocess's current directory will be changed to
|cwd| before it's executed.
shell: Whether to execute args as a shell command.
env: If not None, a mapping that defines environment variables for the
subprocess.
Returns:
Captures and returns the command's stdout.
Prints the command's stderr to logger (which defaults to stdout).
"""
(_, output) = GetCmdStatusAndOutput(args, cwd, shell, env)
return output
def GetCmdStatusAndOutput(args,
cwd=None,
shell=False,
env=None,
merge_stderr=False):
"""Executes a subprocess and returns its exit code and output.
Args:
args: A string or a sequence of program arguments. The program to execute is
the string or the first item in the args sequence.
cwd: If not None, the subprocess's current directory will be changed to
|cwd| before it's executed.
shell: Whether to execute args as a shell command. Must be True if args
is a string and False if args is a sequence.
env: If not None, a mapping that defines environment variables for the
subprocess.
merge_stderr: If True, captures stderr as part of stdout.
Returns:
The 2-tuple (exit code, stdout).
"""
status, stdout, stderr = GetCmdStatusOutputAndError(
args, cwd=cwd, shell=shell, env=env, merge_stderr=merge_stderr)
if stderr:
logger.critical('STDERR: %s', stderr)
logger.debug('STDOUT: %s%s', stdout[:4096].rstrip(),
'<truncated>' if len(stdout) > 4096 else '')
return (status, stdout)
def StartCmd(args, cwd=None, shell=False, env=None):
"""Starts a subprocess and returns a handle to the process.
Args:
args: A string or a sequence of program arguments. The program to execute is
the string or the first item in the args sequence.
cwd: If not None, the subprocess's current directory will be changed to
|cwd| before it's executed.
shell: Whether to execute args as a shell command. Must be True if args
is a string and False if args is a sequence.
env: If not None, a mapping that defines environment variables for the
subprocess.
Returns:
A process handle from subprocess.Popen.
"""
_ValidateAndLogCommand(args, cwd, shell)
return Popen(
args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=shell,
cwd=cwd,
env=env)
def GetCmdStatusOutputAndError(args,
cwd=None,
shell=False,
env=None,
merge_stderr=False):
"""Executes a subprocess and returns its exit code, output, and errors.
Args:
args: A string or a sequence of program arguments. The program to execute is
the string or the first item in the args sequence.
cwd: If not None, the subprocess's current directory will be changed to
|cwd| before it's executed.
shell: Whether to execute args as a shell command. Must be True if args
is a string and False if args is a sequence.
env: If not None, a mapping that defines environment variables for the
subprocess.
merge_stderr: If True, captures stderr as part of stdout.
Returns:
The 3-tuple (exit code, stdout, stderr).
"""
_ValidateAndLogCommand(args, cwd, shell)
stderr = subprocess.STDOUT if merge_stderr else subprocess.PIPE
pipe = Popen(
args,
stdout=subprocess.PIPE,
stderr=stderr,
shell=shell,
cwd=cwd,
env=env)
stdout, stderr = pipe.communicate()
return (pipe.returncode, stdout, stderr)
_IterProcessStdout = (_IterProcessStdoutQueue
if sys.platform == 'win32' else _IterProcessStdoutFcntl)
"""Iterate over a process's stdout.
This is intentionally not public.
Args:
process: The process in question.
iter_timeout: An optional length of time, in seconds, to wait in
between each iteration. If no output is received in the given
time, this generator will yield None.
timeout: An optional length of time, in seconds, during which
the process must finish. If it fails to do so, a TimeoutError
will be raised.
buffer_size: The maximum number of bytes to read (and thus yield) at once.
poll_interval: The length of time to wait in calls to `select.select`.
If iter_timeout is set, the remaining length of time in the iteration
may take precedence.
Raises:
TimeoutError: if timeout is set and the process does not complete.
Yields:
basestrings of data or None.
"""
def GetCmdStatusAndOutputWithTimeout(args,
timeout,
cwd=None,
shell=False,
logfile=None,
env=None):
"""Executes a subprocess with a timeout.
Args:
args: List of arguments to the program, the program to execute is the first
element.
timeout: the timeout in seconds or None to wait forever.
cwd: If not None, the subprocess's current directory will be changed to
|cwd| before it's executed.
shell: Whether to execute args as a shell command. Must be True if args
is a string and False if args is a sequence.
logfile: Optional file-like object that will receive output from the
command as it is running.
env: If not None, a mapping that defines environment variables for the
subprocess.
Returns:
The 2-tuple (exit code, output).
Raises:
TimeoutError on timeout.
"""
_ValidateAndLogCommand(args, cwd, shell)
output = six.StringIO()
process = Popen(
args,
cwd=cwd,
shell=shell,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
env=env)
try:
for data in _IterProcessStdout(process, timeout=timeout):
if logfile:
logfile.write(data)
output.write(data)
except TimeoutError:
raise TimeoutError(output.getvalue())
str_output = output.getvalue()
logger.debug('STDOUT+STDERR: %s%s', str_output[:4096].rstrip(),
'<truncated>' if len(str_output) > 4096 else '')
return process.returncode, str_output
def IterCmdOutputLines(args,
iter_timeout=None,
timeout=None,
cwd=None,
shell=False,
env=None,
check_status=True):
"""Executes a subprocess and continuously yields lines from its output.
Args:
args: List of arguments to the program, the program to execute is the first
element.
iter_timeout: Timeout for each iteration, in seconds.
timeout: Timeout for the entire command, in seconds.
cwd: If not None, the subprocess's current directory will be changed to
|cwd| before it's executed.
shell: Whether to execute args as a shell command. Must be True if args
is a string and False if args is a sequence.
env: If not None, a mapping that defines environment variables for the
subprocess.
check_status: A boolean indicating whether to check the exit status of the
process after all output has been read.
Yields:
The output of the subprocess, line by line.
Raises:
CalledProcessError if check_status is True and the process exited with a
non-zero exit status.
"""
cmd = _ValidateAndLogCommand(args, cwd, shell)
process = Popen(
args,
cwd=cwd,
shell=shell,
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
return _IterCmdOutputLines(
process,
cmd,
iter_timeout=iter_timeout,
timeout=timeout,
check_status=check_status)
| 31.10596 | 80 | 0.64951 |
ed5b25db8eee2bdd6eb22e7c4a9c331775d6cf05
| 1,651 |
py
|
Python
|
services/server/server/apps/checkout/migrations/0001_initial.py
|
AyanSamanta23/moni-moni
|
8e8aa4edf4cd2e2b005f6dbe8c885ecc791e6a2b
|
[
"MIT"
] | null | null | null |
services/server/server/apps/checkout/migrations/0001_initial.py
|
AyanSamanta23/moni-moni
|
8e8aa4edf4cd2e2b005f6dbe8c885ecc791e6a2b
|
[
"MIT"
] | null | null | null |
services/server/server/apps/checkout/migrations/0001_initial.py
|
AyanSamanta23/moni-moni
|
8e8aa4edf4cd2e2b005f6dbe8c885ecc791e6a2b
|
[
"MIT"
] | null | null | null |
# Generated by Django 4.0.2 on 2022-02-26 15:52
from django.db import migrations, models
| 40.268293 | 142 | 0.59358 |
ed5b5860c856a4418e7eeb1cf777cb4c10722142
| 2,845 |
py
|
Python
|
api/to_astm.py
|
urchinpro/L2-forms
|
37f33386984efbb2d1e92c73d915256247801109
|
[
"MIT"
] | null | null | null |
api/to_astm.py
|
urchinpro/L2-forms
|
37f33386984efbb2d1e92c73d915256247801109
|
[
"MIT"
] | null | null | null |
api/to_astm.py
|
urchinpro/L2-forms
|
37f33386984efbb2d1e92c73d915256247801109
|
[
"MIT"
] | null | null | null |
import itertools
from astm import codec
from collections import defaultdict
from django.utils import timezone
import directions.models as directions
import directory.models as directory
import api.models as api
import simplejson as json
| 34.277108 | 138 | 0.636555 |
ed5bad05b400253943df833d896315c0be535899
| 19,288 |
py
|
Python
|
test/unit/test_som_rom_parser.py
|
CospanDesign/nysa
|
ffe07f0b8fe2f6217e7a862d89b80f1b17163be9
|
[
"MIT"
] | 15 |
2015-08-31T20:50:39.000Z
|
2022-03-13T08:56:39.000Z
|
test/unit/test_som_rom_parser.py
|
CospanDesign/nysa
|
ffe07f0b8fe2f6217e7a862d89b80f1b17163be9
|
[
"MIT"
] | 5 |
2015-05-02T16:48:57.000Z
|
2017-06-15T16:25:34.000Z
|
test/unit/test_som_rom_parser.py
|
CospanDesign/nysa
|
ffe07f0b8fe2f6217e7a862d89b80f1b17163be9
|
[
"MIT"
] | 6 |
2016-09-02T16:02:13.000Z
|
2021-06-29T22:29:45.000Z
|
#!/usr/bin/python
import unittest
import json
import sys
import os
import string
sys.path.append(os.path.join(os.path.dirname(__file__),
os.pardir,
os.pardir))
from nysa.cbuilder import sdb_component as sdbc
from nysa.cbuilder import sdb_object_model as som
from nysa.cbuilder.som_rom_parser import parse_rom_image
from nysa.cbuilder.som_rom_generator import generate_rom_image
from nysa.cbuilder.sdb import SDBInfo
from nysa.cbuilder.sdb import SDBWarning
from nysa.cbuilder.sdb import SDBError
from nysa.common.status import StatusLevel
from nysa.common.status import Status
ROM1 = "5344422D\n"\
"00010100\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000100\n"\
"80000000\n"\
"0000C594\n"\
"00000001\n"\
"00000001\n"\
"140F0105\n"\
"746F7000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000207\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000100\n"\
"80000000\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"140F0105\n"\
"64657669\n"\
"63652031\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"000000FF"
ROM2 = "5344422D\n"\
"00020100\n"\
"00000000\n"\
"00000000\n"\
"03000000\n"\
"00000000\n"\
"80000000\n"\
"0000C594\n"\
"00000001\n"\
"00000001\n"\
"140F0105\n"\
"746F7000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000020\n"\
"00000000\n"\
"00000000\n"\
"00000100\n"\
"00000000\n"\
"80000000\n"\
"0000C594\n"\
"00000001\n"\
"00000001\n"\
"140F0105\n"\
"70657269\n"\
"70686572\n"\
"616C0000\n"\
"00000000\n"\
"00000002\n"\
"00000000\n"\
"00000040\n"\
"00000100\n"\
"00000000\n"\
"00000200\n"\
"00030000\n"\
"80000000\n"\
"0000C594\n"\
"00000001\n"\
"00000001\n"\
"140F0105\n"\
"6D656D6F\n"\
"72790000\n"\
"00000000\n"\
"00000000\n"\
"00000002\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"000000FF\n"\
"5344422D\n"\
"00020100\n"\
"00000000\n"\
"00000000\n"\
"00000100\n"\
"00000000\n"\
"80000000\n"\
"0000C594\n"\
"00000001\n"\
"00000001\n"\
"140F0105\n"\
"70657269\n"\
"70686572\n"\
"616C0000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000207\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000100\n"\
"80000000\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"140F0105\n"\
"64657669\n"\
"63652031\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"00000000\n"\
"00000207\n"\
"00000001\n"\
"00000000\n"\
"00000003\n"\
"00000100\n"\
"80000000\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"140F0105\n"\
"64657669\n"\
"63652032\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"000000FF\n"\
"5344422D\n"\
"00020100\n"\
"00000100\n"\
"00000000\n"\
"00000200\n"\
"00030000\n"\
"80000000\n"\
"0000C594\n"\
"00000001\n"\
"00000001\n"\
"140F0105\n"\
"6D656D6F\n"\
"72790000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000207\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00010000\n"\
"80000000\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"140F0105\n"\
"6D656D6F\n"\
"72792031\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"00000000\n"\
"00000207\n"\
"00000000\n"\
"00010000\n"\
"00000000\n"\
"00030000\n"\
"80000000\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"140F0105\n"\
"6D656D6F\n"\
"72792032\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"000000FF"
ROMD = "5344422D\n"\
"00020100\n"\
"00000000\n"\
"00000000\n"\
"00000002\n"\
"00000000\n"\
"80000000\n"\
"0000C594\n"\
"00000001\n"\
"00000001\n"\
"140F0106\n"\
"746F7000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000020\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"20000000\n"\
"80000000\n"\
"0000C594\n"\
"00000001\n"\
"00000001\n"\
"140F0106\n"\
"70657269\n"\
"70686572\n"\
"616C0000\n"\
"00000000\n"\
"00000002\n"\
"00000000\n"\
"00000040\n"\
"00000001\n"\
"00000000\n"\
"00000001\n"\
"00800000\n"\
"80000000\n"\
"0000C594\n"\
"00000001\n"\
"00000001\n"\
"140F0106\n"\
"6D656D6F\n"\
"72790000\n"\
"00000000\n"\
"00000000\n"\
"00000002\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"000000FF\n"\
"5344422D\n"\
"00020100\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"20000000\n"\
"80000000\n"\
"0000C594\n"\
"00000001\n"\
"00000001\n"\
"140F0106\n"\
"70657269\n"\
"70686572\n"\
"616C0000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000207\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000340\n"\
"80000000\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"140F0106\n"\
"53444200\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"00000101\n"\
"00000207\n"\
"00000000\n"\
"10000000\n"\
"00000000\n"\
"10000008\n"\
"80000000\n"\
"0000C594\n"\
"00000000\n"\
"00000001\n"\
"140F0107\n"\
"77625F67\n"\
"70696F00\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"000000FF\n"\
"5344422D\n"\
"00010100\n"\
"00000001\n"\
"00000000\n"\
"00000001\n"\
"00800000\n"\
"80000000\n"\
"0000C594\n"\
"00000001\n"\
"00000001\n"\
"140F0106\n"\
"6D656D6F\n"\
"72790000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000502\n"\
"00000207\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00800000\n"\
"80000000\n"\
"0000C594\n"\
"00000000\n"\
"00000001\n"\
"140F0107\n"\
"77625F73\n"\
"6472616D\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"000000FF"
| 27.792507 | 169 | 0.506999 |
ed5bb200d9597641b3d366c18b6bda01b9a7883d
| 6,119 |
py
|
Python
|
src/TF-gui/tftrain.py
|
jeetsagar/turbojet
|
9b17edde0a7e01d0fa320261fbc2734ce53577d2
|
[
"MIT"
] | null | null | null |
src/TF-gui/tftrain.py
|
jeetsagar/turbojet
|
9b17edde0a7e01d0fa320261fbc2734ce53577d2
|
[
"MIT"
] | null | null | null |
src/TF-gui/tftrain.py
|
jeetsagar/turbojet
|
9b17edde0a7e01d0fa320261fbc2734ce53577d2
|
[
"MIT"
] | 2 |
2021-05-20T05:47:59.000Z
|
2021-08-24T07:44:37.000Z
|
#!python3
import os
import pandas as pd
import tensorflow as tf
from tensorflow.keras import layers
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
# gpu_devices = tf.config.experimental.list_physical_devices("GPU")
# for device in gpu_devices:
# tf.config.experimental.set_memory_growth(device, True)
| 42.493056 | 120 | 0.57346 |
ed5bcaf7cb360ac7f0af74528df0eb589224f1a5
| 5,434 |
py
|
Python
|
library/kong_api.py
|
sebastienc/ansible-kong-module
|
c1e7b471a517d1ec99c5629f3729ebc34088bd64
|
[
"MIT"
] | 34 |
2016-03-09T17:10:52.000Z
|
2019-12-25T08:31:49.000Z
|
library/kong_api.py
|
sebastienc/ansible-kong-module
|
c1e7b471a517d1ec99c5629f3729ebc34088bd64
|
[
"MIT"
] | 6 |
2016-05-16T14:09:05.000Z
|
2018-07-23T21:09:33.000Z
|
library/kong_api.py
|
sebastienc/ansible-kong-module
|
c1e7b471a517d1ec99c5629f3729ebc34088bd64
|
[
"MIT"
] | 23 |
2016-02-17T12:18:16.000Z
|
2021-05-06T09:39:35.000Z
|
#!/usr/bin/python
DOCUMENTATION = '''
---
module: kong
short_description: Configure a Kong API Gateway
'''
EXAMPLES = '''
- name: Register a site
kong:
kong_admin_uri: http://127.0.0.1:8001/apis/
name: "Mockbin"
taget_url: "http://mockbin.com"
request_host: "mockbin.com"
state: present
- name: Delete a site
kong:
kong_admin_uri: http://127.0.0.1:8001/apis/
name: "Mockbin"
state: absent
'''
import json, requests, os
def main():
fields = [
'name',
'upstream_url',
'request_host',
'request_path',
'strip_request_path',
'preserve_host'
]
helper = ModuleHelper(fields)
global module # might not need this
module = helper.get_module()
base_url, data, state, auth_user, auth_password = helper.prepare_inputs(module)
api = KongAPI(base_url, auth_user, auth_password)
if state == "present":
response = api.add_or_update(**data)
if state == "absent":
response = api.delete_by_name(data.get("name"))
if state == "list":
response = api.list()
if response.status_code == 401:
module.fail_json(msg="Please specify kong_admin_username and kong_admin_password", meta=response.json())
elif response.status_code == 403:
module.fail_json(msg="Please check kong_admin_username and kong_admin_password", meta=response.json())
else:
has_changed, meta = helper.get_response(response, state)
module.exit_json(changed=has_changed, meta=meta)
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
if __name__ == '__main__':
main()
| 30.188889 | 133 | 0.597902 |
ed5c8a4473db3e1f846fdf5ddd27546849b2b2e4
| 3,091 |
py
|
Python
|
src/compas_plotters/artists/lineartist.py
|
XingxinHE/compas
|
d2901dbbacdaf4694e5adae78ba8f093f10532bf
|
[
"MIT"
] | null | null | null |
src/compas_plotters/artists/lineartist.py
|
XingxinHE/compas
|
d2901dbbacdaf4694e5adae78ba8f093f10532bf
|
[
"MIT"
] | null | null | null |
src/compas_plotters/artists/lineartist.py
|
XingxinHE/compas
|
d2901dbbacdaf4694e5adae78ba8f093f10532bf
|
[
"MIT"
] | null | null | null |
from compas_plotters.artists import Artist
from matplotlib.lines import Line2D
from compas.geometry import intersection_line_box_xy
__all__ = ['LineArtist']
| 36.364706 | 122 | 0.530573 |
ed5d3821ab68704ffac0f126c20afbf8dae239de
| 1,018 |
py
|
Python
|
plot2d_artificial_dataset1_silvq.py
|
manome/python-silvq
|
b50d7486e970fbe9a5b66dd3fc5beb8b5de8ca2f
|
[
"BSD-3-Clause"
] | null | null | null |
plot2d_artificial_dataset1_silvq.py
|
manome/python-silvq
|
b50d7486e970fbe9a5b66dd3fc5beb8b5de8ca2f
|
[
"BSD-3-Clause"
] | null | null | null |
plot2d_artificial_dataset1_silvq.py
|
manome/python-silvq
|
b50d7486e970fbe9a5b66dd3fc5beb8b5de8ca2f
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- encoding: utf8 -*-
import numpy as np
from sklearn.metrics import accuracy_score
from sklearn.model_selection import train_test_split
from lvq import SilvqModel
from lvq.utils import plot2d
if __name__ == '__main__':
main()
| 30.848485 | 118 | 0.697446 |
ed5d69e17539392ab832fd82b04ce64e261c7b31
| 7,727 |
py
|
Python
|
classification_experiments/Fine-Tuned-ResNet-50/Fine-Tuned-ResNet-50.py
|
ifr1m/hyper-kvasir
|
21cc366e78c0cb4e180a26a0e441d6c0d5171da9
|
[
"CC-BY-4.0"
] | 38 |
2019-12-20T13:17:09.000Z
|
2022-03-20T08:39:40.000Z
|
classification_experiments/Fine-Tuned-ResNet-50/Fine-Tuned-ResNet-50.py
|
smaranjitghose/hyper-kvasir
|
b4815d151ef90cffa1bbc8fbf97cd091a20ce600
|
[
"CC-BY-4.0"
] | 2 |
2021-01-12T10:45:13.000Z
|
2021-01-28T06:14:45.000Z
|
classification_experiments/Fine-Tuned-ResNet-50/Fine-Tuned-ResNet-50.py
|
smaranjitghose/hyper-kvasir
|
b4815d151ef90cffa1bbc8fbf97cd091a20ce600
|
[
"CC-BY-4.0"
] | 11 |
2020-03-24T17:58:04.000Z
|
2021-12-09T16:12:16.000Z
|
#!/usr/bin/env python
# coding: utf-8
# In[ ]:
#Importing all required libraries
# In[ ]:
from __future__ import absolute_import, division, print_function, unicode_literals
# In[ ]:
#Checking for correct cuda and tf versions
from tensorflow.python.platform import build_info as tf_build_info
print(tf_build_info.cuda_version_number)
# 9.0 in v1.10.0
print(tf_build_info.cudnn_version_number)
# 7 in v1.10.0
# In[ ]:
import tensorflow as tf
import pathlib
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Conv2D, Flatten, Dropout, MaxPooling2D
from tensorflow.keras.preprocessing.image import ImageDataGenerator
import os
import numpy as np
import matplotlib.pyplot as plt
# In[ ]:
AUTOTUNE = tf.data.experimental.AUTOTUNE
# In[ ]:
import IPython.display as display
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
import os
# In[ ]:
tf.__version__
# In[ ]:
#Train and test data folder
train_data_dir = "\\hyper-kvasir\\splits\\all\\1"
test_data_dir = "\\hyper-kvasir\\splits\\all\\0"
# In[ ]:
train_data_dir = pathlib.Path(train_data_dir)
test_data_dir = pathlib.Path(test_data_dir)
# In[ ]:
#count how many images are there
image_count = len(list(train_data_dir.glob('*/*.jpg')))
image_count
# In[ ]:
total_train = len(list(train_data_dir.glob('*/*.jpg')))
total_val = len(list(test_data_dir.glob('*/*.jpg')))
# In[ ]:
#get the class names
CLASS_NAMES = np.array([item.name for item in train_data_dir.glob('*') if item.name != "LICENSE.txt"])
CLASS_NAMES
# In[ ]:
#Define parameter for training
batch_size = 32
IMG_HEIGHT = 224
IMG_WIDTH = 224
STEPS_PER_EPOCH = np.ceil(image_count/batch_size)
epochs = 8
num_classes = len(CLASS_NAMES) #23
# In[ ]:
#We use image data generators to load the images and prepare them for the training
train_image_generator = ImageDataGenerator() # Generator for our training data
validation_image_generator = ImageDataGenerator() # Generator for our validation data
train_data_gen = train_image_generator.flow_from_directory(directory=str(train_data_dir),
batch_size=batch_size,
shuffle=True,
target_size=(IMG_HEIGHT, IMG_WIDTH),
classes = list(CLASS_NAMES),
class_mode='categorical'
)
val_data_gen = validation_image_generator.flow_from_directory(directory=str(test_data_dir),
batch_size=batch_size,
shuffle=True,
target_size=(IMG_HEIGHT, IMG_WIDTH),
class_mode='categorical',
classes = list(CLASS_NAMES)
)
#get class order from directories
print(train_data_gen.class_indices.keys())
print(val_data_gen.class_indices.keys())
# In[ ]:
IMG_SIZE = 224
IMG_SHAPE = (IMG_SIZE, IMG_SIZE, 3)
# base model from the pre-trained model. Resnet 50 in this case
base_model = tf.keras.applications.ResNet50(input_shape=IMG_SHAPE,
include_top=False,
weights='imagenet')
base_model.trainable = False
# In[ ]:
#add new classification layer
x = base_model.output
x = tf.keras.layers.GlobalAveragePooling2D()(x)
x = tf.keras.layers.Dense(num_classes,activation='softmax')(x)
model = tf.keras.models.Model(inputs=base_model.input, outputs=x)
base_learning_rate = 0.001
model.compile(optimizer=tf.keras.optimizers.Adam(lr=base_learning_rate),
loss='categorical_crossentropy',
metrics=['accuracy'])
# In[ ]:
#fit the model
history = model.fit_generator(
train_data_gen,
steps_per_epoch=total_train // batch_size,
epochs=epochs,
validation_data=val_data_gen,
validation_steps=total_val // batch_size
)
# In[ ]:
#create training plots
history
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs_range = range(epochs)
plt.figure(figsize=(8, 8))
plt.subplot(1, 2, 1)
plt.plot(epochs_range, acc, label='Training Accuracy')
plt.plot(epochs_range, val_acc, label='Validation Accuracy')
plt.legend(loc='lower right')
plt.title('Training and Validation Accuracy')
plt.subplot(1, 2, 2)
plt.plot(epochs_range, loss, label='Training Loss')
plt.plot(epochs_range, val_loss, label='Validation Loss')
plt.legend(loc='upper right')
plt.title('Training and Validation Loss')
plt.show()
# In[ ]:
base_model.trainable = True #now we want to train the base model
# In[ ]:
# How many layers are in the base model
print("Layers base model: ", len(base_model.layers))
# Fine tune from layer x
fine_tune_at = 100
# Freeze all the layers before the fine tune starting layer
for layer in base_model.layers[:fine_tune_at]:
layer.trainable = False
# In[ ]:
model.compile(loss='categorical_crossentropy',
optimizer = tf.keras.optimizers.RMSprop(lr=base_learning_rate/10),
metrics=['accuracy'])
# In[ ]:
model.summary()
# In[ ]:
#Fine tune step
initial_epochs = 7
fine_tune_epochs = 3
total_epochs = initial_epochs + fine_tune_epochs
train_batches = total_train // batch_size
print(total_val // batch_size)
validation_batches = total_val // batch_size
history_fine = model.fit_generator(
train_data_gen,
steps_per_epoch=total_train // batch_size,
epochs=total_epochs,
initial_epoch = history.epoch[-1],
validation_data=val_data_gen,
validation_steps=total_val // batch_size
)
# In[ ]:
acc += history_fine.history['accuracy']
val_acc += history_fine.history['val_accuracy']
loss += history_fine.history['loss']
val_loss += history_fine.history['val_loss']
# In[ ]:
#Plot fine tuning
plt.figure(figsize=(8, 8))
plt.subplot(2, 1, 1)
plt.plot(acc, label='Training Accuracy')
plt.plot(val_acc, label='Validation Accuracy')
plt.ylim([0.8, 1])
plt.plot([initial_epochs-1,initial_epochs-1],
plt.ylim(), label='Start Fine Tuning')
plt.legend(loc='lower right')
plt.title('Training and Validation Accuracy')
plt.subplot(2, 1, 2)
plt.plot(loss, label='Training Loss')
plt.plot(val_loss, label='Validation Loss')
plt.ylim([0, 1.0])
plt.plot([initial_epochs-1,initial_epochs-1],
plt.ylim(), label='Start Fine Tuning')
plt.legend(loc='upper right')
plt.title('Training and Validation Loss')
plt.xlabel('epoch')
plt.show()
# In[ ]:
#model save and load
import os
# In[ ]:
#some time stamp
from datetime import datetime
# current date and time.
now = datetime.now()
timestamp = datetime.timestamp(now)
print("timestamp =", timestamp)
# In[ ]:
mode_filename = str(timestamp)+'mymodel.h5'
model.save(model_filename)
# In[ ]:
#To apply the model on new data
new_model = tf.keras.models.load_model(model_filename)
# Show the model architecture
new_model.summary()
# In[ ]:
from tensorflow.keras.preprocessing import image
#image directory containing images to test
img_dir="\\polyps"
for i,img in enumerate(os.listdir(img_dir)):
tmpimage = image.load_img(os.path.join(img_dir,img), target_size=(IMG_SIZE,IMG_SIZE))
tmpimage = np.expand_dims(tmpimage, axis=0).astype('float32')
result_class=new_model.predict(tmpimage)
print(img,";",CLASS_NAMES[result_class.argmax(axis=-1)])
| 21.889518 | 102 | 0.666235 |
ed5e905c814c4d72273c16c39c47e06ae62fc1f0
| 897 |
gyp
|
Python
|
tools/android/android_tools.gyp
|
SlimKatLegacy/android_external_chromium_org
|
ee480ef5039d7c561fc66ccf52169ead186f1bea
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 2 |
2015-03-04T02:36:53.000Z
|
2016-06-25T11:22:17.000Z
|
tools/android/android_tools.gyp
|
j4ckfrost/android_external_chromium_org
|
a1a3dad8b08d1fcf6b6b36c267158ed63217c780
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
tools/android/android_tools.gyp
|
j4ckfrost/android_external_chromium_org
|
a1a3dad8b08d1fcf6b6b36c267158ed63217c780
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 4 |
2015-02-09T08:49:30.000Z
|
2017-08-26T02:03:34.000Z
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
# Intermediate target grouping the android tools needed to run native
# unittests and instrumentation test apks.
{
'target_name': 'android_tools',
'type': 'none',
'dependencies': [
'adb_reboot/adb_reboot.gyp:adb_reboot',
'forwarder2/forwarder.gyp:forwarder2',
'md5sum/md5sum.gyp:md5sum',
'purge_ashmem/purge_ashmem.gyp:purge_ashmem',
],
},
{
'target_name': 'memdump',
'type': 'none',
'dependencies': [
'memdump/memdump.gyp:memdump',
],
},
{
'target_name': 'memconsumer',
'type': 'none',
'dependencies': [
'memconsumer/memconsumer.gyp:memconsumer',
],
},
],
}
| 25.628571 | 73 | 0.596433 |
ed5f32dd2cd9143c086d6a609f05220bf9f92fde
| 12,380 |
py
|
Python
|
test/functional/fantasygold_opcall.py
|
FantasyGold/FantasyGold-Core
|
afff8871e770045e468e2f536ede9db0dff889d5
|
[
"MIT"
] | 13 |
2018-04-30T21:43:40.000Z
|
2020-12-07T11:06:47.000Z
|
test/functional/fantasygold_opcall.py
|
donoel2/FantasyGold-Core
|
afff8871e770045e468e2f536ede9db0dff889d5
|
[
"MIT"
] | 4 |
2018-05-10T00:18:18.000Z
|
2019-07-08T23:12:54.000Z
|
test/functional/fantasygold_opcall.py
|
donoel2/FantasyGold-Core
|
afff8871e770045e468e2f536ede9db0dff889d5
|
[
"MIT"
] | 13 |
2018-04-30T17:41:54.000Z
|
2020-12-08T18:24:06.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
from test_framework.fantasygold import *
from test_framework.fantasygoldconfig import *
import sys
if __name__ == '__main__':
OpCallTest().main()
| 57.581395 | 689 | 0.714943 |
ed5fdee808e9a889711f8e8007e05b2a81263072
| 1,883 |
py
|
Python
|
intake_sklearn/source.py
|
AlbertDeFusco/intake-sklearn
|
6cd0e11b26703712eb338032518e5c55b725c48f
|
[
"BSD-3-Clause"
] | 1 |
2022-02-23T09:00:38.000Z
|
2022-02-23T09:00:38.000Z
|
intake_sklearn/source.py
|
AlbertDeFusco/intake-sklearn
|
6cd0e11b26703712eb338032518e5c55b725c48f
|
[
"BSD-3-Clause"
] | 1 |
2019-10-14T12:25:26.000Z
|
2019-10-25T13:55:59.000Z
|
intake_sklearn/source.py
|
AlbertDeFusco/intake-sklearn
|
6cd0e11b26703712eb338032518e5c55b725c48f
|
[
"BSD-3-Clause"
] | 1 |
2021-07-28T17:49:36.000Z
|
2021-07-28T17:49:36.000Z
|
from intake.source.base import DataSource, Schema
import joblib
import fsspec
import sklearn
import re
from . import __version__
| 27.691176 | 89 | 0.591609 |
ed607bad1d48fdf5da41de44d6ec206f2716afe4
| 4,915 |
py
|
Python
|
jedi/evaluate/dynamic.py
|
hatamov/jedi
|
10df0f933f931a8e0e70304d823f6df0dc3000bd
|
[
"MIT"
] | null | null | null |
jedi/evaluate/dynamic.py
|
hatamov/jedi
|
10df0f933f931a8e0e70304d823f6df0dc3000bd
|
[
"MIT"
] | null | null | null |
jedi/evaluate/dynamic.py
|
hatamov/jedi
|
10df0f933f931a8e0e70304d823f6df0dc3000bd
|
[
"MIT"
] | null | null | null |
"""
One of the really important features of |jedi| is to have an option to
understand code like this::
def foo(bar):
bar. # completion here
foo(1)
There's no doubt wheter bar is an ``int`` or not, but if there's also a call
like ``foo('str')``, what would happen? Well, we'll just show both. Because
that's what a human would expect.
It works as follows:
- |Jedi| sees a param
- search for function calls named ``foo``
- execute these calls and check the input. This work with a ``ParamListener``.
"""
from itertools import chain
from jedi._compatibility import unicode
from jedi.parser import tree as pr
from jedi import settings
from jedi import debug
from jedi.evaluate.cache import memoize_default
from jedi.evaluate import imports
| 33.435374 | 95 | 0.584334 |
ed614dd2f553e42b3e9876c261fcf0d4bfb4705a
| 2,245 |
py
|
Python
|
steamcheck/views.py
|
moird/linux-game-report
|
8c3204d857134b0685bc3c213cd9d9e9f9a5f2fd
|
[
"MIT"
] | null | null | null |
steamcheck/views.py
|
moird/linux-game-report
|
8c3204d857134b0685bc3c213cd9d9e9f9a5f2fd
|
[
"MIT"
] | null | null | null |
steamcheck/views.py
|
moird/linux-game-report
|
8c3204d857134b0685bc3c213cd9d9e9f9a5f2fd
|
[
"MIT"
] | null | null | null |
from steamcheck import app
from flask import jsonify, render_template
import os
import steamapi
import json
| 35.634921 | 111 | 0.623163 |
ed6236b34ab65a1e059ca45441d455cec6bd4e90
| 516 |
py
|
Python
|
validator/delphi_validator/run.py
|
benjaminysmith/covidcast-indicators
|
b1474cd68a1497166fefe4beffd4d5ff867b9a61
|
[
"MIT"
] | null | null | null |
validator/delphi_validator/run.py
|
benjaminysmith/covidcast-indicators
|
b1474cd68a1497166fefe4beffd4d5ff867b9a61
|
[
"MIT"
] | null | null | null |
validator/delphi_validator/run.py
|
benjaminysmith/covidcast-indicators
|
b1474cd68a1497166fefe4beffd4d5ff867b9a61
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Functions to call when running the tool.
This module should contain a function called `run_module`, that is executed
when the module is run with `python -m delphi_validator`.
"""
from delphi_utils import read_params
from .validate import Validator
def run_module():
"""Run the validator as a module."""
parent_params = read_params()
params = parent_params['validation']
validator = Validator(params)
validator.validate(parent_params["export_dir"]).print_and_exit()
| 28.666667 | 75 | 0.732558 |
ed62a0f3bd61d82280e96fe9d14711d5df97f622
| 1,876 |
py
|
Python
|
datasets/validation_folders.py
|
zenithfang/supervised_dispnet
|
f81dfccfdc944e015d8fae17e24b3e664bec14d6
|
[
"MIT"
] | 39 |
2020-01-17T18:33:42.000Z
|
2021-11-14T02:36:32.000Z
|
datasets/validation_folders.py
|
zenithfang/supervised_dispnet
|
f81dfccfdc944e015d8fae17e24b3e664bec14d6
|
[
"MIT"
] | 7 |
2020-01-10T14:52:44.000Z
|
2021-03-15T18:55:35.000Z
|
datasets/validation_folders.py
|
zenithfang/supervised_dispnet
|
f81dfccfdc944e015d8fae17e24b3e664bec14d6
|
[
"MIT"
] | 10 |
2020-03-01T11:35:50.000Z
|
2022-01-18T10:54:04.000Z
|
import torch.utils.data as data
import numpy as np
from imageio import imread
from path import Path
import pdb
| 32.912281 | 153 | 0.615672 |
ed62a4dc340088a21f482019440f649f39123cfc
| 1,293 |
py
|
Python
|
secretpy/ciphers/rot18.py
|
tigertv/crypt
|
e464f998e5540f52e269fe360ec9d3a08e976b2e
|
[
"MIT"
] | 51 |
2019-01-09T02:38:03.000Z
|
2022-03-19T19:08:02.000Z
|
secretpy/ciphers/rot18.py
|
tigertv/crypt
|
e464f998e5540f52e269fe360ec9d3a08e976b2e
|
[
"MIT"
] | 7 |
2019-10-03T19:50:32.000Z
|
2021-05-28T22:52:09.000Z
|
secretpy/ciphers/rot18.py
|
tigertv/crypt
|
e464f998e5540f52e269fe360ec9d3a08e976b2e
|
[
"MIT"
] | 13 |
2019-04-16T16:39:44.000Z
|
2021-06-01T21:50:12.000Z
|
#!/usr/bin/python
from .rot13 import Rot13
import secretpy.alphabets as al
| 24.396226 | 93 | 0.58314 |
ed6310e1d8d83cf871e0d32a527ca7f1529b58ca
| 1,302 |
py
|
Python
|
pysaurus/database/special_properties.py
|
notoraptor/pysaurus
|
3bf5fe8c15e0e0e580e5edaea05b4a1298641367
|
[
"MIT"
] | null | null | null |
pysaurus/database/special_properties.py
|
notoraptor/pysaurus
|
3bf5fe8c15e0e0e580e5edaea05b4a1298641367
|
[
"MIT"
] | 4 |
2021-08-13T14:03:02.000Z
|
2022-03-05T16:02:45.000Z
|
pysaurus/database/special_properties.py
|
notoraptor/pysaurus
|
3bf5fe8c15e0e0e580e5edaea05b4a1298641367
|
[
"MIT"
] | null | null | null |
from abc import abstractmethod
from pysaurus.database.properties import PropType
from pysaurus.database.video import Video
| 26.04 | 83 | 0.627496 |
ed65a740d0a6c0e521ed5a04db6b899535f0bcde
| 19,613 |
py
|
Python
|
patrole_tempest_plugin/rbac_utils.py
|
openstack/patrole
|
fa0ee135121a5e86301ad5ee1854b3a0bd70b69b
|
[
"Apache-2.0"
] | 14 |
2017-01-03T15:07:18.000Z
|
2020-09-17T18:07:39.000Z
|
patrole_tempest_plugin/rbac_utils.py
|
openstack/patrole
|
fa0ee135121a5e86301ad5ee1854b3a0bd70b69b
|
[
"Apache-2.0"
] | null | null | null |
patrole_tempest_plugin/rbac_utils.py
|
openstack/patrole
|
fa0ee135121a5e86301ad5ee1854b3a0bd70b69b
|
[
"Apache-2.0"
] | 12 |
2017-02-28T20:08:48.000Z
|
2020-12-30T09:31:51.000Z
|
# Copyright 2017 AT&T Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import sys
import time
from oslo_log import log as logging
from oslo_utils import excutils
from tempest import config
from tempest.lib import exceptions as lib_exc
from patrole_tempest_plugin import rbac_exceptions
CONF = config.CONF
LOG = logging.getLogger(__name__)
def _set_override_role_called(self):
"""Helper for tracking whether ``override_role`` was called."""
self.__override_role_called = True
def _set_override_role_caught_exc(self):
"""Helper for tracking whether exception was thrown inside
``override_role``.
"""
self.__override_role_caught_exc = True
def _validate_override_role_called(self):
"""Idempotently validate that ``override_role`` is called and reset
its value to False for sequential tests.
"""
was_called = self.__override_role_called
self.__override_role_called = False
return was_called
def _validate_override_role_caught_exc(self):
"""Idempotently validate that exception was caught inside
``override_role``, so that, by process of elimination, it can be
determined whether one was thrown outside (which is invalid).
"""
caught_exception = self.__override_role_caught_exc
self.__override_role_caught_exc = False
return caught_exception
def is_admin():
"""Verifies whether the current test role equals the admin role.
:returns: True if ``rbac_test_roles`` contain the admin role.
"""
roles = CONF.patrole.rbac_test_roles
# TODO(vegasq) drop once CONF.patrole.rbac_test_role is removed
if CONF.patrole.rbac_test_role:
roles.append(CONF.patrole.rbac_test_role)
roles = list(set(roles))
# TODO(felipemonteiro): Make this more robust via a context is admin
# lookup.
return CONF.identity.admin_role in roles
| 39.069721 | 120 | 0.629175 |
ed66f473c8ee9e1a4cbf088bc3dc94834ee24ff9
| 6,029 |
py
|
Python
|
core/my_widgets/drug_picker.py
|
kimera1999/pmpktn
|
5307b6684a08bac4b88617f097017b5ea4192ab2
|
[
"MIT"
] | null | null | null |
core/my_widgets/drug_picker.py
|
kimera1999/pmpktn
|
5307b6684a08bac4b88617f097017b5ea4192ab2
|
[
"MIT"
] | null | null | null |
core/my_widgets/drug_picker.py
|
kimera1999/pmpktn
|
5307b6684a08bac4b88617f097017b5ea4192ab2
|
[
"MIT"
] | 1 |
2020-05-16T14:28:59.000Z
|
2020-05-16T14:28:59.000Z
|
from initialize import *
from core.db.db_func import query_linedrug_list
import os
import wx
def onTextChange(self, e):
if os.name == "nt":
if e.String == "":
self.Clear()
elif len(e.String) >= 1:
if not self.IsPopupShown():
self.Popup()
self.SetInsertionPointEnd()
if os.name == "posix":
if e.String == "":
self.Clear()
| 31.238342 | 96 | 0.543208 |
ed6710b9dafd0dadb8b0c6608676f1c2e79ad2c8
| 615 |
py
|
Python
|
em Python/Roteiro4/Roteiro4__grafos.py
|
GuilhermeEsdras/Grafos
|
b6556c3d679496d576f65b798a1a584cd73e40f4
|
[
"MIT"
] | null | null | null |
em Python/Roteiro4/Roteiro4__grafos.py
|
GuilhermeEsdras/Grafos
|
b6556c3d679496d576f65b798a1a584cd73e40f4
|
[
"MIT"
] | null | null | null |
em Python/Roteiro4/Roteiro4__grafos.py
|
GuilhermeEsdras/Grafos
|
b6556c3d679496d576f65b798a1a584cd73e40f4
|
[
"MIT"
] | null | null | null |
from Roteiro4.Roteiro4__funcoes import Grafo
| 23.653846 | 82 | 0.461789 |
ed6739ff5f8ea8f36a47b3ca0134c0c015b0b7c7
| 3,499 |
py
|
Python
|
fuzzybee/joboard/views.py
|
youtaya/knight
|
6899e18ca6b1ef01daaae7d7fd14b50a26aa0aee
|
[
"MIT"
] | null | null | null |
fuzzybee/joboard/views.py
|
youtaya/knight
|
6899e18ca6b1ef01daaae7d7fd14b50a26aa0aee
|
[
"MIT"
] | null | null | null |
fuzzybee/joboard/views.py
|
youtaya/knight
|
6899e18ca6b1ef01daaae7d7fd14b50a26aa0aee
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from django.shortcuts import get_object_or_404, render_to_response, render
from django.http import HttpResponseRedirect, HttpResponse
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from joboard.models import Factory
from joboard.forms import FactoryForm
from django.template import RequestContext
from django.core.exceptions import ObjectDoesNotExist
from urllib import urlopen, urlencode
import urllib2
from fuzzybee.conf import b_url, b_ak, geo_table, l_url, app_id, app_key
from utils.pack_json import toJSON, fromJSON
from django.contrib.auth.decorators import login_required
from people.models import People
import logging
logger = logging.getLogger(__name__)
def save_factory_cloud(fact_info, fact_id):
title = fact_info['fact_name']
address = fact_info['fact_addr']
lat = fact_info['fact_lat']
lng = fact_info['fact_lng']
num = fact_info['hire_num']
data = {
'title': title.encode("utf-8"),
'address': address.encode("utf-8"),
'latitude': lat,
'longitude': lng,
'job_num': num,
'factory_id': fact_id,
}
head = {
'X-AVOSCloud-Application-Id': app_id,
'X-AVOSCloud-Application-Key': app_key,
'Content-Type': 'application/json',
}
req = urllib2.Request(l_url, toJSON(data), head)
print str(req)
response = urllib2.urlopen(req)
#print respone.read()
lean_response = fromJSON(response.read())
print lean_response
lean_objectId = lean_response['objectId']
# save in Baidu Map
params = urlencode({
'title': title.encode("utf-8"),
'address': address.encode("utf-8"),
'latitude': lat,
'longitude': lng,
'coord_type': 3,
'geotable_id': geo_table,
'ak': b_ak,
'job_num': num,
'lean_id': lean_objectId,
})
req = urllib2.Request(b_url, params)
#print str(req)
response = urllib2.urlopen(req)
#print respone.read()
| 33.009434 | 105 | 0.658474 |
ed67b3786dc5aa973280b427220b99a230def591
| 464 |
py
|
Python
|
flask/app.py
|
yatsu/react-flask-graphql-example
|
18a38b7602c81a85a3cc38c74440ce34d63fc32a
|
[
"MIT"
] | 21 |
2017-06-24T15:29:30.000Z
|
2021-03-03T06:58:41.000Z
|
flask/app.py
|
yatsu/react-flask-graphql-example
|
18a38b7602c81a85a3cc38c74440ce34d63fc32a
|
[
"MIT"
] | null | null | null |
flask/app.py
|
yatsu/react-flask-graphql-example
|
18a38b7602c81a85a3cc38c74440ce34d63fc32a
|
[
"MIT"
] | 6 |
2018-01-15T06:36:11.000Z
|
2022-03-18T07:57:39.000Z
|
from flask import Flask
from flask_cors import CORS
from flask_graphql import GraphQLView
from schema import Schema
if __name__ == '__main__':
app = create_app(graphiql=True)
CORS(app, resources={r'/graphql': {'origins': '*'}})
app.run()
| 22.095238 | 73 | 0.668103 |
ed689bd57d1b5e295bd3bb253d9bfc772e080d82
| 5,431 |
py
|
Python
|
mortgagetvm/mortgageOptions.py
|
AndrewChap/mortgagetvm
|
4ec39707d61bcb3224bdcddce84bf237f02352d1
|
[
"MIT"
] | null | null | null |
mortgagetvm/mortgageOptions.py
|
AndrewChap/mortgagetvm
|
4ec39707d61bcb3224bdcddce84bf237f02352d1
|
[
"MIT"
] | null | null | null |
mortgagetvm/mortgageOptions.py
|
AndrewChap/mortgagetvm
|
4ec39707d61bcb3224bdcddce84bf237f02352d1
|
[
"MIT"
] | null | null | null |
# Factory-like class for mortgage options
| 58.397849 | 88 | 0.606518 |
ed693e39d7414ae26d14dc6568bc549d2c30f321
| 1,452 |
py
|
Python
|
DD/Terrain.py
|
CodingBullywug/DDreshape
|
393e5ea336eb6cb78f31345731ccf52baf19bfac
|
[
"MIT"
] | 2 |
2020-04-13T04:47:26.000Z
|
2022-02-19T06:10:04.000Z
|
DD/Terrain.py
|
CodingBullywug/DDreshape
|
393e5ea336eb6cb78f31345731ccf52baf19bfac
|
[
"MIT"
] | null | null | null |
DD/Terrain.py
|
CodingBullywug/DDreshape
|
393e5ea336eb6cb78f31345731ccf52baf19bfac
|
[
"MIT"
] | 1 |
2020-04-13T04:47:30.000Z
|
2020-04-13T04:47:30.000Z
|
from DD.utils import PoolByteArray2NumpyArray, NumpyArray2PoolByteArray
from DD.Entity import Entity
import numpy as np
| 37.230769 | 144 | 0.644628 |
ed69c7e1252a3ec3f75d6d65d353de14affd6d0c
| 1,628 |
py
|
Python
|
bluesky/tests/utils.py
|
AbbyGi/bluesky
|
759f9c55dce97dc47513cca749a69dd861bdf58d
|
[
"BSD-3-Clause"
] | 43 |
2015-08-04T20:13:41.000Z
|
2019-04-12T17:21:36.000Z
|
bluesky/tests/utils.py
|
AbbyGi/bluesky
|
759f9c55dce97dc47513cca749a69dd861bdf58d
|
[
"BSD-3-Clause"
] | 966 |
2015-07-29T16:43:21.000Z
|
2019-05-09T21:02:28.000Z
|
bluesky/tests/utils.py
|
AbbyGi/bluesky
|
759f9c55dce97dc47513cca749a69dd861bdf58d
|
[
"BSD-3-Clause"
] | 48 |
2019-05-15T18:01:06.000Z
|
2022-03-03T18:53:43.000Z
|
from collections import defaultdict
import contextlib
import tempfile
import sys
import threading
import asyncio
def _fabricate_asycio_event(loop):
th_ev = threading.Event()
aio_event = None
h = loop.call_soon_threadsafe(really_make_the_event)
if not th_ev.wait(0.1):
h.cancel()
raise Exception("failed to make asyncio event")
return aio_event
| 23.941176 | 66 | 0.594595 |
ed69da856e9dae34d6443933a8a9df258e7f8e95
| 1,116 |
py
|
Python
|
cli/check_json.py
|
MJJojo97/openslides-backend
|
af0d1edb0070e352d46f285a1ba0bbe3702d49ae
|
[
"MIT"
] | null | null | null |
cli/check_json.py
|
MJJojo97/openslides-backend
|
af0d1edb0070e352d46f285a1ba0bbe3702d49ae
|
[
"MIT"
] | null | null | null |
cli/check_json.py
|
MJJojo97/openslides-backend
|
af0d1edb0070e352d46f285a1ba0bbe3702d49ae
|
[
"MIT"
] | null | null | null |
import json
import sys
from openslides_backend.models.checker import Checker, CheckException
if __name__ == "__main__":
sys.exit(main())
| 24.8 | 78 | 0.53405 |
ed6a3ab7b10d68f5a936266b4ec7064d1ae865c6
| 313 |
py
|
Python
|
utils/mgmt.py
|
robinagist/manic
|
b1bdefbb11c7489164a0c08bb092ffecb5900261
|
[
"MIT"
] | 2 |
2018-09-06T05:39:11.000Z
|
2020-10-13T21:40:06.000Z
|
utils/mgmt.py
|
robinagist/manic
|
b1bdefbb11c7489164a0c08bb092ffecb5900261
|
[
"MIT"
] | 20 |
2018-01-20T05:00:23.000Z
|
2018-01-25T17:55:00.000Z
|
utils/mgmt.py
|
robinagist/Manic-Mapped-Memory-Server
|
b1bdefbb11c7489164a0c08bb092ffecb5900261
|
[
"MIT"
] | null | null | null |
from utils.data import load_memfile_configs
from utils.server import plain_response
from sanic import response
| 19.5625 | 61 | 0.776358 |
ed6ad5b625da50e0023d94d78806dbcd8acd64a1
| 28,127 |
py
|
Python
|
datasets/medicalImage.py
|
UpCoder/YNe
|
2f932456eda29b1e04f4c7e212e2ab0dacfe831b
|
[
"MIT"
] | null | null | null |
datasets/medicalImage.py
|
UpCoder/YNe
|
2f932456eda29b1e04f4c7e212e2ab0dacfe831b
|
[
"MIT"
] | null | null | null |
datasets/medicalImage.py
|
UpCoder/YNe
|
2f932456eda29b1e04f4c7e212e2ab0dacfe831b
|
[
"MIT"
] | null | null | null |
# -*- coding=utf-8 -*-
import SimpleITK as itk
import pydicom
import numpy as np
from PIL import Image, ImageDraw
import gc
from skimage.morphology import disk, dilation
import nipy
import os
from glob import glob
import scipy
import cv2
from xml.dom.minidom import Document
typenames = ['CYST', 'FNH', 'HCC', 'HEM', 'METS']
typeids = [0, 1, 2, 3, 4]
#
# DICOMMHD
# DICOM
# mhd
# mhd
#
# DICOMID
# ID keytypename valuetypeid
# ID keytypeid valuetypename
# ID
# nameid
#
def get_kernel_filters(kernel_size):
'''
kernel5 1dilateddilated
:param kernel_size:
:return: [5, kernel_size, kernel_size]
'''
kernel_whole = np.ones([kernel_size, kernel_size], np.uint8)
half_size = kernel_size // 2
kernel_left = np.copy(kernel_whole)
kernel_left[:, half_size + 1:] = 0
kernel_right = np.copy(kernel_whole)
kernel_right[:, :half_size] = 0
kernel_top = np.copy(kernel_whole)
kernel_top[half_size + 1:, :] = 0
kernel_bottom = np.copy(kernel_whole)
kernel_bottom[:half_size, :] = 0
return np.concatenate([
np.expand_dims(kernel_whole, axis=0),
np.expand_dims(kernel_left, axis=0),
np.expand_dims(kernel_right, axis=0),
np.expand_dims(kernel_top, axis=0),
np.expand_dims(kernel_bottom, axis=0),
], axis=0)
#
# def image_expand(image, size):
#
def find_significant_layer(mask_image):
'''
:param mask_image: [depth, width, height]
:return: idx
'''
sum_res = np.sum(np.sum(mask_image, axis=1), axis=1)
return np.argmax(sum_res)
#
#
def compress22dim(image):
'''
'''
shape = list(np.shape(image))
if len(shape) == 3:
return np.squeeze(image)
return image
def extract_ROI(image, mask_image):
'''
'''
xs, ys = np.where(mask_image == 1)
xs_min = np.min(xs)
xs_max = np.max(xs)
ys_min = np.min(ys)
ys_max = np.max(ys)
return image[xs_min: xs_max + 1, ys_min: ys_max + 1]
# def image_expand(mask_image, r):
# return dilation(mask_image, disk(r))
'''
(512, 512)(1, 512, 512)
'''
'''
'''
'''
[w, h, d]reshape[d, w, h]
'''
'''
[d, w, h]reshape[w, h, d]
'''
def MICCAI2018_Iterator(image_dir, execute_func, *parameters):
'''
MICCAI2018
:param execute_func:
:return:
'''
for sub_name in ['train', 'val', 'test']:
names = os.listdir(os.path.join(image_dir, sub_name))
for name in names:
cur_slice_dir = os.path.join(image_dir, sub_name, name)
execute_func(cur_slice_dir, *parameters)
def convertCase2PNGs(volume_path, seg_path, save_dir=None, z_axis=5.0, short_edge=64):
'''
niiPNG
:param volume_path: nii
:param seg_path:
:return:
'''
from skimage.measure import label
volume, header = read_nii_with_header(volume_path)
# volume = np.transpose(volume, [1, 0, 2])
volume = np.asarray(volume, np.float32)
max_v = 250.
min_v = -200.
# max_v = 180
# min_v = -70
volume[volume > max_v] = max_v
volume[volume < min_v] = min_v
volume -= np.mean(volume)
min_v = np.min(volume)
max_v = np.max(volume)
interv = max_v - min_v
volume = (volume - min_v) / interv
z_axis_case = header['voxel_spacing'][-1]
slice_num = int(z_axis / z_axis_case)
if slice_num == 0:
slice_num = 1
seg = read_nii(seg_path)
# print np.shape(volume), np.shape(seg)
[_, _, channel] = np.shape(volume)
imgs = []
names = []
masks = []
tumor_weakly_masks = []
liver_masks = []
i = slice_num + 1
pos_slice_num = np.sum(np.sum(np.sum(seg == 2, axis=0), axis=0) != 0)
total_slice_num = np.shape(seg)[-1]
print('pos_slice_num is ', pos_slice_num, total_slice_num)
neg_rate = (3.0 * pos_slice_num) / total_slice_num #
if neg_rate > 1.0:
neg_rate = 1.0
for i in range(channel):
seg_slice = seg[:, :, i]
mid_slice = np.expand_dims(volume[:, :, i], axis=0)
pre_slice = []
# pre_end = i - slice_num / 2
# pre_end = i
# for j in range(1, slice_num + 1):
# z = pre_end - j
# if z < 0:
# z = 0
# pre_slice.append(volume[:, :, z])
if (i - 1) < 0:
pre_slice = np.expand_dims(volume[:, :, i], axis=0)
else:
pre_slice = np.expand_dims(volume[:, :, i-1], axis=0)
next_slice = []
# next_start = i + slice_num / 2
# next_start = i
# for j in range(1, slice_num + 1):
# z = next_start + j
# if z >= channel:
# z = channel - 1
# next_slice.append(volume[:, :, z])
if (i + 1) >= channel:
next_slice = np.expand_dims(volume[:, :, i], axis=0)
else:
next_slice = np.expand_dims(volume[:, :, i+1], axis=0)
# pre_slice = np.mean(pre_slice, axis=0, keepdims=True)
# next_slice = np.mean(next_slice, axis=0, keepdims=True)
imgs.append(
np.transpose(np.concatenate([pre_slice, mid_slice, next_slice], axis=0),
axes=[1, 2, 0]))
names.append(os.path.basename(volume_path).split('.')[0].split('-')[1] + '-' + str(i))
binary_seg_slice = np.asarray(seg_slice == 2, np.uint8)
# print np.max(binary_seg_slice)
masks.append(binary_seg_slice)
labeled_mask = label(binary_seg_slice)
weakly_label_mask = np.zeros_like(binary_seg_slice, np.uint8)
for idx in range(1, np.max(labeled_mask) + 1):
xs, ys = np.where(labeled_mask == idx)
min_xs = np.min(xs)
max_xs = np.max(xs)
min_ys = np.min(ys)
max_ys = np.max(ys)
weakly_label_mask[min_xs: max_xs, min_ys: max_ys] = 1
liver_masks.append(np.asarray(seg_slice == 1, np.uint8))
tumor_weakly_masks.append(weakly_label_mask)
# i += 1
return np.asarray(imgs, np.float32), np.asarray(masks, np.uint8), np.asarray(liver_masks, np.uint8), np.asarray(
tumor_weakly_masks, np.uint8)
def statics_num_slices_lesion(nii_dir):
'''
caseslice
:param nii_dir:
:return:
'''
mask_nii_paths = glob(os.path.join(nii_dir, 'segmentation-*.nii'))
for mask_nii_path in mask_nii_paths:
mask_img = read_nii(mask_nii_path)
has_lesion = np.asarray(np.sum(np.sum(mask_img == 2, axis=0), axis=0)>0, np.bool)
num_lesion_slices = np.sum(has_lesion)
print os.path.basename(mask_nii_path), num_lesion_slices, np.shape(mask_img)[-1]
if __name__ == '__main__':
# for phasename in ['NC', 'ART', 'PV']:
# convert_dicomseries2mhd(
# '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/' + phasename,
# '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/MHD/' + phasename + '.mhd'
# )
# names = os.listdir('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2')
# for name in names:
# path = os.path.join('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2', name)
# image = read_nil(path)
# print(np.shape(image))
# conver2JPG single phase
# image_dir = '/home/give/Documents/dataset/MICCAI2018/Slices/crossvalidation/0'
# save_dir = '/home/give/Documents/dataset/MICCAI2018_Detection/SinglePhase'
# phase_name = 'NC'
# MICCAI2018_Iterator(image_dir, dicom2jpg_singlephase, save_dir, phase_name)
# conver2JPG multi phase
# image_dir = '/home/give/Documents/dataset/LiverLesionDetection_Splited/0'
# static_pixel_num(image_dir, 'PV')
statics_num_slices_lesion('/media/give/CBMIR/ld/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2')
| 33.484524 | 138 | 0.657731 |
ed6aff1082796c2046965ddce3d39f2087944e89
| 925 |
py
|
Python
|
setup.py
|
marcus-luck/zohoreader
|
e832f076a8a87bf27607980fb85a1d2bc8339743
|
[
"MIT"
] | 1 |
2020-11-11T02:19:50.000Z
|
2020-11-11T02:19:50.000Z
|
setup.py
|
marcus-luck/zohoreader
|
e832f076a8a87bf27607980fb85a1d2bc8339743
|
[
"MIT"
] | null | null | null |
setup.py
|
marcus-luck/zohoreader
|
e832f076a8a87bf27607980fb85a1d2bc8339743
|
[
"MIT"
] | null | null | null |
from setuptools import setup
setup(name='zohoreader',
version='0.1',
description='A simple reader for zoho projects API to get all projects, users and timereports',
long_description=readme(),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
],
keywords='zoho, API, zoho project',
url='https://github.com/marcus-luck/zohoreader',
author='Marcus Luck',
author_email='[email protected]',
license='MIT',
packages=['zohoreader'],
zip_safe=False,
install_requires=[
'requests>=2.12.4',
'python-dateutil>=2.7.2'
],
test_suite='nose.collector',
tests_require=['nose', 'nose-cover3'],
include_package_data=True
)
| 28.030303 | 101 | 0.596757 |
ed6b5de7ad69456fafac8a04559f11ef56300d5e
| 24,607 |
bzl
|
Python
|
web/repositories.bzl
|
Ubehebe/rules_webtesting
|
c231866a3bccc0f27b31050a57dc2b4a700ad64e
|
[
"Apache-2.0"
] | null | null | null |
web/repositories.bzl
|
Ubehebe/rules_webtesting
|
c231866a3bccc0f27b31050a57dc2b4a700ad64e
|
[
"Apache-2.0"
] | null | null | null |
web/repositories.bzl
|
Ubehebe/rules_webtesting
|
c231866a3bccc0f27b31050a57dc2b4a700ad64e
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines external repositories needed by rules_webtesting."""
load("//web/internal:platform_http_file.bzl", "platform_http_file")
load("@bazel_gazelle//:deps.bzl", "go_repository")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("@bazel_tools//tools/build_defs/repo:java.bzl", "java_import_external")
# NOTE: URLs are mirrored by an asynchronous review process. They must
# be greppable for that to happen. It's OK to submit broken mirror
# URLs, so long as they're correctly formatted. Bazel's downloader
# has fast failover.
def web_test_repositories(**kwargs):
"""Defines external repositories required by Webtesting Rules.
This function exists for other Bazel projects to call from their WORKSPACE
file when depending on rules_webtesting using http_archive. This function
makes it easy to import these transitive dependencies into the parent
workspace. This will check to see if a repository has been previously defined
before defining a new repository.
Alternatively, individual dependencies may be excluded with an
"omit_" + name parameter. This is useful for users who want to be rigorous
about declaring their own direct dependencies, or when another Bazel project
is depended upon (e.g. rules_closure) that defines the same dependencies as
this one (e.g. com_google_guava.) Alternatively, a whitelist model may be
used by calling the individual functions this method references.
Please note that while these dependencies are defined, they are not actually
downloaded, unless a target is built that depends on them.
Args:
**kwargs: omit_... parameters used to prevent importing specific
dependencies.
"""
if should_create_repository("bazel_skylib", kwargs):
bazel_skylib()
if should_create_repository("com_github_blang_semver", kwargs):
com_github_blang_semver()
if should_create_repository("com_github_gorilla_context", kwargs):
com_github_gorilla_context()
if should_create_repository("com_github_gorilla_mux", kwargs):
com_github_gorilla_mux()
if should_create_repository("com_github_tebeka_selenium", kwargs):
com_github_tebeka_selenium()
if should_create_repository("com_github_urllib3", kwargs):
com_github_urllib3()
if should_create_repository("com_google_code_findbugs_jsr305", kwargs):
com_google_code_findbugs_jsr305()
if should_create_repository("com_google_code_gson", kwargs):
com_google_code_gson()
if should_create_repository(
"com_google_errorprone_error_prone_annotations",
kwargs,
):
com_google_errorprone_error_prone_annotations()
if should_create_repository("com_google_guava", kwargs):
com_google_guava()
if should_create_repository("com_squareup_okhttp3_okhttp", kwargs):
com_squareup_okhttp3_okhttp()
if should_create_repository("com_squareup_okio", kwargs):
com_squareup_okio()
if should_create_repository("commons_codec", kwargs):
commons_codec()
if should_create_repository("commons_logging", kwargs):
commons_logging()
if should_create_repository("junit", kwargs):
junit()
if should_create_repository("net_bytebuddy", kwargs):
net_bytebuddy()
if should_create_repository("org_apache_commons_exec", kwargs):
org_apache_commons_exec()
if should_create_repository("org_apache_httpcomponents_httpclient", kwargs):
org_apache_httpcomponents_httpclient()
if should_create_repository("org_apache_httpcomponents_httpcore", kwargs):
org_apache_httpcomponents_httpcore()
if should_create_repository("org_hamcrest_core", kwargs):
org_hamcrest_core()
if should_create_repository("org_jetbrains_kotlin_stdlib", kwargs):
org_jetbrains_kotlin_stdlib()
if should_create_repository("org_json", kwargs):
org_json()
if should_create_repository("org_seleniumhq_py", kwargs):
org_seleniumhq_py()
if should_create_repository("org_seleniumhq_selenium_api", kwargs):
org_seleniumhq_selenium_api()
if should_create_repository("org_seleniumhq_selenium_remote_driver", kwargs):
org_seleniumhq_selenium_remote_driver()
if kwargs.keys():
print("The following parameters are unknown: " + str(kwargs.keys()))
def should_create_repository(name, args):
"""Returns whether the name repository should be created.
This allows creation of a repository to be disabled by either an
"omit_" _+ name parameter or by previously defining a rule for the repository.
The args dict will be mutated to remove "omit_" + name.
Args:
name: The name of the repository that should be checked.
args: A dictionary that contains "omit_...": bool pairs.
Returns:
boolean indicating whether the repository should be created.
"""
key = "omit_" + name
if key in args:
val = args.pop(key)
if val:
return False
if native.existing_rule(name):
return False
return True
def browser_repositories(firefox = False, chromium = False, sauce = False):
"""Sets up repositories for browsers defined in //browsers/....
This should only be used on an experimental basis; projects should define
their own browsers.
Args:
firefox: Configure repositories for //browsers:firefox-native.
chromium: Configure repositories for //browsers:chromium-native.
sauce: Configure repositories for //browser/sauce:chrome-win10.
"""
if chromium:
org_chromium_chromedriver()
org_chromium_chromium()
if firefox:
org_mozilla_firefox()
org_mozilla_geckodriver()
if sauce:
com_saucelabs_sauce_connect()
| 43.019231 | 152 | 0.675011 |
ed6b5f33a003c3ef902a30bdc2ac23b77d488f11
| 8,045 |
py
|
Python
|
code/tools/run_viz_single_task.py
|
santomon/taskonomy
|
4b22087a2686172b21b61589831061e7a386fe36
|
[
"MIT"
] | 789 |
2018-03-21T05:28:38.000Z
|
2022-03-29T19:32:47.000Z
|
code/tools/run_viz_single_task.py
|
santomon/taskonomy
|
4b22087a2686172b21b61589831061e7a386fe36
|
[
"MIT"
] | 46 |
2018-05-03T07:11:10.000Z
|
2022-03-11T23:26:03.000Z
|
code/tools/run_viz_single_task.py
|
santomon/taskonomy
|
4b22087a2686172b21b61589831061e7a386fe36
|
[
"MIT"
] | 152 |
2018-03-24T10:20:44.000Z
|
2022-02-09T02:38:10.000Z
|
from __future__ import absolute_import, division, print_function
import argparse
import importlib
import itertools
import time
from multiprocessing import Pool
import numpy as np
import os
import pdb
import pickle
import subprocess
import sys
import tensorflow as tf
import tensorflow.contrib.slim as slim
import threading
import init_paths
from models.sample_models import *
target_tasks = "autoencoder colorization curvature denoise edge2d edge3d ego_motion fix_pose impainting_whole jigsaw keypoint2d keypoint3d non_fixated_pose point_match reshade rgb2depth rgb2mist rgb2sfnorm room_layout segment25d segment2d vanishing_point_well_defined segmentsemantic_rb class_selected class_1000"
list_of_tasks = target_tasks.split(" ")
ON_TEST_SET = True
IN_TRAIN_MODE = False
parser = argparse.ArgumentParser(description='Viz Single Task')
parser.add_argument('--idx', dest='idx',
help='Task to run', type=int)
parser.add_argument('--hs', dest='hs',
help='Hidden size to use', type=int)
parser.add_argument('--n-parallel', dest='n_parallel',
help='Number of models to run in parallel', type=int)
parser.set_defaults(n_parallel=1)
tf.logging.set_verbosity(tf.logging.ERROR)
ipython_std_out = sys.stdout
# Disabe
# Restore
# Force Print
pairs = list(itertools.product(list_of_tasks, list_of_tasks))
args = parser.parse_args()
idx_to_run = args.idx
if idx_to_run == -1:
pairs_to_run = pairs
else:
pairs_to_run = pairs[idx_to_run:idx_to_run+1]
if __name__ == '__main__':
run_to_task(None)
# with Pool(args.n_parallel) as p:
# p.map(run_to_task, list_of_tasks)
| 35.131004 | 313 | 0.605842 |
ed6bae7a17f418cda8c2e6d4ee817869bb64ec62
| 35,884 |
bzl
|
Python
|
stratum/portage/build_defs.bzl
|
cholve/stratum
|
09ddb5acb604f7e694a6b7d2fe93fea79f801794
|
[
"Apache-2.0"
] | 267 |
2019-09-11T15:01:37.000Z
|
2022-03-28T11:14:29.000Z
|
stratum/portage/build_defs.bzl
|
cholve/stratum
|
09ddb5acb604f7e694a6b7d2fe93fea79f801794
|
[
"Apache-2.0"
] | 906 |
2019-09-18T03:37:08.000Z
|
2022-03-30T00:59:53.000Z
|
stratum/portage/build_defs.bzl
|
cholve/stratum
|
09ddb5acb604f7e694a6b7d2fe93fea79f801794
|
[
"Apache-2.0"
] | 107 |
2019-09-16T07:30:53.000Z
|
2022-03-18T09:53:03.000Z
|
# Copyright 2018 Google LLC
# Copyright 2018-present Open Networking Foundation
# SPDX-License-Identifier: Apache-2.0
"""A portable build system for Stratum P4 switch stack.
To use this, load() this file in a BUILD file, specifying the symbols needed.
The public symbols are the macros:
decorate(path)
sc_cc_lib Declare a portable Library.
sc_proto_lib Declare a portable .proto Library.
sc_cc_bin Declare a portable Binary.
sc_package Declare a portable tarball package.
and the variables/lists:
ALL_ARCHES All known arches.
EMBEDDED_ARCHES All embedded arches.
EMBEDDED_PPC Name of PowerPC arch - "ppc".
EMBEDDED_X86 Name of "x86" arch.
HOST_ARCH Name of default "host" arch.
HOST_ARCHES All host arches.
STRATUM_INTERNAL For declaring Stratum internal visibility.
The macros are like cc_library(), proto_library(), and cc_binary(), but with
different options and some restrictions. The key difference: you can
supply lists of architectures for which they should be compiled - defaults
to all if left unstated. Internally, libraries and binaries are generated
for every listed architecture. The names are decorated to keep them different
and allow all to be generated and addressed independently.
This aspect of the system is suboptimal - something along the lines of
augmenting context with a user defined configuration fragment would be a
much cleaner solution.
Currently supported architectures:
ppc
x86
"""
load("//tools/build_defs/label:def.bzl", "parse_label")
load(
"//devtools/build_cleaner/skylark:build_defs.bzl",
"register_extension_info",
)
load("@rules_proto//proto:defs.bzl", "proto_library")
load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library", "cc_test")
# Generic path & label helpers. ============================================
def _normpath(path):
"""Normalize a path.
Normalizes a path by removing unnecessary path-up segments and its
corresponding directories. Providing own implementation because import os
is not allowed in build defs.
For example
../../dir/to/deeply/nested/path/../../../other/path
will become
../../dir/to/other/path
Args:
path: A valid absolute or relative path to normalize.
Returns:
A path equivalent to the input path with minimal use of path-up segments.
Invalid input paths will stay invalid.
"""
sep = "/"
level = 0
result = []
for d in path.split(sep):
if d in ("", "."):
if result:
continue
elif d == "..":
if level > 0:
result.pop()
level += -1
continue
else:
level += 1
result.append(d)
return sep.join(result)
# Adds a suffix to a label, expanding implicit targets if needed.
# Creates a relative filename from a label, replacing "//" and ":".
# Adds dquotes around a string.
# Adds squotes around a string.
# Emulate Python 2.5+ str(startswith([prefix ...])
def sc_platform_select(host = None, ppc = None, x86 = None, default = None):
"""Public macro to alter blaze rules based on the platform architecture.
Generates a blaze select(...) statement that can be used in most contexts to
alter a blaze rule based on the target platform architecture. If no selection
is provided for a given platform, {default} is used instead. A specific value
or default must be provided for every target platform.
Args:
host: The value to use for host builds.
ppc: The value to use for ppc builds.
x86: The value to use for x86 builds.
default: The value to use for any of {host,ppc,x86} that isn't specified.
Returns:
The requested selector.
"""
if default == None and (host == None or ppc == None or x86 == None):
fail("Missing a select value for at least one platform in " +
"sc_platform_select. Please add.")
config_label_prefix = "//stratum:stratum_"
return select({
"//conditions:default": (host or default),
config_label_prefix + "ppc": (ppc or default),
config_label_prefix + "x86": (x86 or default),
})
# Generates an sc_platform_select based on a textual list of arches.
def sc_platform_alias(
name,
host = None,
ppc = None,
x86 = None,
default = None,
visibility = None):
"""Public macro to create an alias that changes based on target arch.
Generates a blaze alias that will select the appropriate target. If no
selection is provided for a given platform and no default is set, a
dummy default target is used instead.
Args:
name: The name of the alias target.
host: The result of the alias for host builds.
ppc: The result of the alias for ppc builds.
x86: The result of the alias for x86 builds.
default: The result of the alias for any of {host,ppc,x86} that isn't
specified.
visibility: The visibility of the alias target.
"""
native.alias(
name = name,
actual = sc_platform_select(
default = default or "//stratum/portage:dummy",
host = host,
ppc = ppc,
x86 = x86,
),
visibility = visibility,
)
# Embedded build definitions. ==============================================
EMBEDDED_PPC = "ppc"
EMBEDDED_X86 = "x86"
EMBEDDED_ARCHES = [
EMBEDDED_PPC,
EMBEDDED_X86,
]
HOST_ARCH = "host"
HOST_ARCHES = [HOST_ARCH]
ALL_ARCHES = EMBEDDED_ARCHES + HOST_ARCHES
# Identify Stratum platform arch for .pb.h shims and other portability hacks.
_ARCH_DEFINES = sc_platform_select(
default = ["STRATUM_ARCH_HOST"],
ppc = ["STRATUM_ARCH_PPC"],
x86 = ["STRATUM_ARCH_X86"],
)
STRATUM_INTERNAL = [
"//stratum:__subpackages__",
]
#
# Build options for all embedded architectures
#
# Set _TRACE_SRCS to show sources in embedded sc_cc_lib compile steps.
# This is more general than it may seem: genrule doesn't have hdrs or deps
# attributes, so all embedded dependencies appear as a `src'.
# TODO(unknown): if useful again then inject from cmdline else kill feature.
_TRACE_SRCS = False
# Used for all gcc invocations.
_EMBEDDED_FLAGS = [
"-O0", # Don't use this for program-sizing build
#-- "-Os", # Use this for program-sizing build
"-g", # Don't use this for program-sizing build
"-Wall",
"-Werror", # Warn lots, and force fixing warnings.
"-no-canonical-prefixes", # Don't mangle paths and confuse blaze.
"-fno-builtin-malloc", # We'll use tcmalloc
"-fno-builtin-calloc",
"-fno-builtin-realloc",
"-fno-builtin-free",
"-D__STDC_FORMAT_MACROS=1",
# TODO(unknown): Figure out how we can use $(CC_FLAGS) instead of this.
"-D__GOOGLE_STL_LEGACY_COMPATIBILITY",
]
# Used for C and C++ compiler invocations.
_EMBEDDED_CFLAGS = [
"-I$(GENDIR)",
]
# Used for C++ compiler invocations.
_EMBEDDED_CXXFLAGS = [
"-std=gnu++11", # Allow C++11 features _and_ GNU extensions.
]
# Used for linking binaries.
_EMBEDDED_LDFLAGS = [
# "-static", # Use this for program-sizing build
# "-Wl,--gc-sections,--no-wchar-size-warning", # Use this for program-sizing build
]
# PPC ======================================================================
_PPC_GRTE = "//unsupported_toolchains/crosstoolng_powerpc32_8540/sysroot"
# X86 ======================================================================
_X86_GRTE = "//grte/v4_x86/release/usr/grte/v4"
# Portability definitions ===================================================
def sc_cc_test(
name,
size = None,
srcs = None,
deps = None,
data = None,
defines = None,
copts = None,
linkopts = None,
visibility = None):
"""Creates a cc_test rule that interacts safely with Stratum builds.
Generates a cc_test rule that doesn't break the build when an embedded arch
is selected. During embedded builds this target will generate a dummy binary
and will not attempt to build any dependencies.
Args:
name: Analogous to cc_test name argument.
size: Analogous to cc_test size argument.
srcs: Analogous to cc_test srcs argument.
deps: Analogous to cc_test deps argument.
data: Analogous to cc_test data argument.
defines: Analogous to cc_test defines argument.
copts: Analogous to cc_test copts argument.
linkopts: Analogous to cc_test linkopts argument.
visibility: Analogous to cc_test visibility argument.
"""
cc_test(
name = name,
size = size or "small",
srcs = sc_platform_select(host = srcs or [], default = []),
deps = sc_platform_select(
host = deps or [],
default = ["//stratum/portage:dummy_with_main"],
),
data = data or [],
defines = defines,
copts = copts,
linkopts = linkopts,
visibility = visibility,
)
register_extension_info(
extension_name = "sc_cc_test",
label_regex_for_dep = "{extension_name}",
)
def sc_cc_lib(
name,
deps = None,
srcs = None,
hdrs = None,
arches = None,
copts = None,
defines = None,
includes = None,
include_prefix = None,
strip_include_prefix = None,
data = None,
testonly = None,
textual_hdrs = None,
visibility = None,
xdeps = None):
"""Creates rules for the given portable library and arches.
Args:
name: Analogous to cc_library name argument.
deps: Analogous to cc_library deps argument.
srcs: Analogous to cc_library srcs argument.
hdrs: Analogous to cc_library hdrs argument.
arches: List of architectures to generate this way.
copts: Analogous to cc_library copts argument.
defines: Symbols added as "-D" compilation options.
includes: Paths to add as "-I" compilation options.
include_prefix: Analogous to cc_library include_prefix argument.
strip_include_prefix: Analogous to cc_library strip_include_prefix argument.
data: Files to provide as data at runtime (host builds only).
testonly: Standard blaze testonly parameter.
textual_hdrs: Analogous to cc_library.
visibility: Standard blaze visibility parameter.
xdeps: External (file) dependencies of this library - no decorations
assumed, used and exported as header, not for flags, libs, etc.
"""
alwayslink = 0
deps = depset(deps or [])
srcs = depset(srcs or [])
hdrs = depset(hdrs or [])
xdeps = depset(xdeps or [])
copts = depset(copts or [])
includes = depset(includes or [])
data = depset(data or [])
textual_hdrs = depset(textual_hdrs or [])
if srcs:
if [s for s in srcs.to_list() if not s.endswith(".h")]:
alwayslink = 1
if not arches:
arches = ALL_ARCHES
defs_plus = (defines or []) + _ARCH_DEFINES
textual_plus = textual_hdrs | depset(deps.to_list())
cc_library(
name = name,
deps = sc_platform_filter(deps, [], arches),
srcs = sc_platform_filter(srcs, [], arches),
hdrs = sc_platform_filter(hdrs, [], arches),
alwayslink = alwayslink,
copts = sc_platform_filter(copts, [], arches),
defines = defs_plus,
includes = sc_platform_filter(includes, [], arches),
include_prefix = include_prefix,
strip_include_prefix = strip_include_prefix,
testonly = testonly,
textual_hdrs = sc_platform_filter(
textual_plus | xdeps,
[],
arches,
),
data = sc_platform_filter(data, [], arches),
visibility = visibility,
)
register_extension_info(
extension_name = "sc_cc_lib",
label_regex_for_dep = "{extension_name}",
)
def sc_cc_bin(
name,
deps = None,
srcs = None,
arches = None,
copts = None,
defines = None,
includes = None,
testonly = None,
visibility = None):
"""Creates rules for the given portable binary and arches.
Args:
name: Analogous to cc_binary name argument.
deps: Analogous to cc_binary deps argument.
srcs: Analogous to cc_binary srcs argument.
arches: List of architectures to generate this way.
copts: Analogous to cc_binary copts argument.
defines: Symbols added as "-D" compilation options.
includes: Paths to add as "-I" compilation options.
testonly: Standard blaze testonly parameter.
visibility: Standard blaze visibility parameter.
"""
deps = depset(deps or [])
srcs = depset(srcs or [])
if not arches:
arches = ALL_ARCHES
defs_plus = (defines or []) + _ARCH_DEFINES
cc_binary(
name = name,
deps = sc_platform_filter(
deps,
["//stratum/portage:dummy_with_main"],
arches,
),
srcs = sc_platform_filter(srcs, [], arches),
copts = copts,
defines = defs_plus,
includes = includes,
linkopts = ["-ldl", "-lutil"],
testonly = testonly,
visibility = visibility,
)
register_extension_info(
extension_name = "sc_cc_bin",
label_regex_for_dep = "{extension_name}",
)
# Protobuf =================================================================
_SC_GRPC_DEPS = [
"//sandblaze/prebuilt/grpc",
"//sandblaze/prebuilt/grpc:grpc++_codegen_base",
"//sandblaze/prebuilt/grpc:grpc++_codegen_proto_lib",
]
_PROTOC = "@com_google_protobuf//:protobuf:protoc"
_PROTOBUF = "@com_google_protobuf//:protobuf"
_SC_GRPC_PLUGIN = "//sandblaze/prebuilt/protobuf:grpc_cpp_plugin"
_GRPC_PLUGIN = "//grpc:grpc_cpp_plugin"
def _loc(target):
"""Return target location for constructing commands.
Args:
target: Blaze target name available to this build.
Returns:
$(location target)
"""
return "$(location %s)" % target
def _gen_proto_lib(
name,
srcs,
hdrs,
deps,
arch,
visibility,
testonly,
proto_include,
grpc_shim_rule):
"""Creates rules and filegroups for embedded protobuf library.
For every given ${src}.proto, generate:
:${src}_${arch}.pb rule to run protoc
${src}.proto => ${src}.${arch}.pb.{h,cc}
:${src}_${arch}.grpc.pb rule to run protoc w/ erpc plugin:
${src}.proto => ${src}.${arch}.grpc.pb.{h,cc}
:${src}_${arch}_proto_rollup collects include options for protoc:
${src}_${arch}_proto_rollup.flags
Feed each set into sc_cc_lib to wrap them them up into a usable library;
note that ${src}_${arch}_erpc_proto depends on ${src}_${arch}_proto.
Args:
name: Base name for this library.
srcs: List of proto files
hdrs: More files to build into this library, but also exported for
dependent rules to utilize.
deps: List of deps for this library
arch: Which architecture to build this library for.
visibility: Standard blaze visibility parameter, passed through to
subsequent rules.
testonly: Standard blaze testonly parameter.
proto_include: Include path for generated sc_cc_libs.
grpc_shim_rule: If needed, the name of the grpc shim for this proto lib.
"""
bash_vars = ["g3=$${PWD}"]
# TODO(unknown): Switch protobuf to using the proto_include mechanism
protoc_label = _PROTOC
protobuf_label = _PROTOBUF
protobuf_hdrs = "%s:well_known_types_srcs" % protobuf_label
protobuf_srcs = [protobuf_hdrs]
protobuf_include = "$${g3}/protobuf/src"
if arch in EMBEDDED_ARCHES:
grpc_plugin = _SC_GRPC_PLUGIN
else:
grpc_plugin = _GRPC_PLUGIN
protoc_deps = []
for dep in deps:
if dep.endswith("_proto"):
protoc_deps.append("%s_%s_headers" % (dep, arch))
name_arch = decorate(name, arch)
# We use this filegroup to accumulate the set of .proto files needed to
# compile this proto.
native.filegroup(
name = decorate(name_arch, "headers"),
srcs = hdrs + protoc_deps,
visibility = visibility,
)
my_proto_rollup = decorate(name_arch, "proto_rollup.flags")
protoc_srcs_set = (srcs + hdrs + protoc_deps +
protobuf_srcs + [my_proto_rollup])
gen_srcs = []
gen_hdrs = []
grpc_gen_hdrs = []
grpc_gen_srcs = []
tools = [protoc_label]
grpc_tools = [protoc_label, grpc_plugin]
protoc = "$${g3}/%s" % _loc(protoc_label)
grpc_plugin = "$${g3}/%s" % _loc(grpc_plugin)
cpp_out = "$${g3}/$(GENDIR)/%s/%s" % (native.package_name(), arch)
accum_flags = []
full_proto_include = None
if proto_include == ".":
full_proto_include = native.package_name()
elif proto_include:
full_proto_include = "%s/%s" % (native.package_name(), proto_include)
if full_proto_include:
temp_prefix = "%s/%s" % (cpp_out, native.package_name()[len(full_proto_include):])
# We do a bit of extra work with these include flags to avoid generating
# warnings.
accum_flags.append(
"$$(if [[ -e $(GENDIR)/%s ]]; then echo -IG3LOC/$(GENDIR)/%s; fi)" %
(full_proto_include, full_proto_include),
)
accum_flags.append(
"$$(if [[ -e %s ]]; then echo -IG3LOC/%s; fi)" %
(full_proto_include, full_proto_include),
)
else:
temp_prefix = "%s/%s" % (cpp_out, native.package_name())
proto_rollups = [
decorate(decorate(dep, arch), "proto_rollup.flags")
for dep in deps
if dep.endswith("_proto")
]
proto_rollup_cmds = ["printf '%%s\n' %s" % flag for flag in accum_flags]
proto_rollup_cmds.append("cat $(SRCS)")
proto_rollup_cmd = "{ %s; } | sort -u -o $(@)" % "; ".join(proto_rollup_cmds)
native.genrule(
name = decorate(name_arch, "proto_rollup"),
srcs = proto_rollups,
outs = [my_proto_rollup],
cmd = proto_rollup_cmd,
visibility = visibility,
testonly = testonly,
)
for src in srcs + hdrs:
if src.endswith(".proto"):
src_stem = src[0:-6]
src_arch = "%s_%s" % (src_stem, arch)
temp_stem = "%s/%s" % (temp_prefix, src_stem)
gen_stem = "%s.%s" % (src_stem, arch)
# We can't use $${PWD} until this step, because our rollup command
# might be generated on another forge server.
proto_path_cmds = ["rollup=$$(sed \"s,G3LOC,$${PWD},g\" %s)" %
_loc(my_proto_rollup)]
proto_rollup_flags = ["$${rollup}"]
if proto_include:
# We'll be cd-ing to another directory before protoc, so
# adjust our .proto path accordingly.
proto_src_loc = "%s/%s" % (native.package_name(), src)
if proto_src_loc.startswith(full_proto_include + "/"):
proto_src_loc = proto_src_loc[len(full_proto_include) + 1:]
else:
print("Invalid proto include '%s' doesn't match src %s" %
(full_proto_include, proto_src_loc))
# By cd-ing to another directory, we force protoc to produce
# different symbols. Careful, our proto might be in GENDIR!
proto_path_cmds.append("; ".join([
"if [[ -e %s ]]" % ("%s/%s" % (full_proto_include, proto_src_loc)),
"then cd %s" % full_proto_include,
"else cd $(GENDIR)/%s" % full_proto_include,
"fi",
]))
gendir_include = ["-I$${g3}/$(GENDIR)", "-I$${g3}", "-I."]
else:
proto_src_loc = "%s/%s" % (native.package_name(), src)
proto_path_cmds.append("[[ -e %s ]] || cd $(GENDIR)" % proto_src_loc)
gendir_include = ["-I$(GENDIR)", "-I."]
# Generate messages
gen_pb_h = gen_stem + ".pb.h"
gen_pb_cc = gen_stem + ".pb.cc"
gen_hdrs.append(gen_pb_h)
gen_srcs.append(gen_pb_cc)
cmds = bash_vars + [
"mkdir -p %s" % temp_prefix,
] + proto_path_cmds + [
" ".join([protoc] +
gendir_include +
proto_rollup_flags +
[
"-I%s" % protobuf_include,
"--cpp_out=%s" % cpp_out,
proto_src_loc,
]),
"cd $${g3}",
"cp %s.pb.h %s" % (temp_stem, _loc(gen_pb_h)),
"cp %s.pb.cc %s" % (temp_stem, _loc(gen_pb_cc)),
]
pb_outs = [gen_pb_h, gen_pb_cc]
native.genrule(
name = src_arch + ".pb",
srcs = protoc_srcs_set,
outs = pb_outs,
tools = tools,
cmd = " && ".join(cmds),
heuristic_label_expansion = 0,
visibility = visibility,
)
# Generate GRPC
if grpc_shim_rule:
gen_grpc_pb_h = gen_stem + ".grpc.pb.h"
gen_grpc_pb_cc = gen_stem + ".grpc.pb.cc"
grpc_gen_hdrs.append(gen_grpc_pb_h)
grpc_gen_srcs.append(gen_grpc_pb_cc)
cmds = bash_vars + [
"mkdir -p %s" % temp_prefix,
] + proto_path_cmds + [
" ".join([
protoc,
"--plugin=protoc-gen-grpc-cpp=%s" % grpc_plugin,
] +
gendir_include +
proto_rollup_flags +
[
"-I%s" % protobuf_include,
"--grpc-cpp_out=%s" % cpp_out,
proto_src_loc,
]),
"cd $${g3}",
"cp %s.grpc.pb.h %s" % (temp_stem, _loc(gen_grpc_pb_h)),
"cp %s.grpc.pb.cc %s" % (temp_stem, _loc(gen_grpc_pb_cc)),
]
grpc_pb_outs = [gen_grpc_pb_h, gen_grpc_pb_cc]
native.genrule(
name = src_arch + ".grpc.pb",
srcs = protoc_srcs_set,
outs = grpc_pb_outs,
tools = grpc_tools,
cmd = " && ".join(cmds),
heuristic_label_expansion = 0,
visibility = visibility,
)
dep_set = depset(deps) | [protobuf_label]
includes = []
if proto_include:
includes = [proto_include]
# Note: Public sc_proto_lib invokes this once per (listed) arch;
# which then calls sc_cc_lib with same name for each arch;
# multiple such calls are OK as long as the arches are disjoint.
sc_cc_lib(
name = decorate(name, arch),
deps = dep_set,
srcs = gen_srcs,
hdrs = hdrs + gen_hdrs,
arches = [arch],
copts = [],
includes = includes,
testonly = testonly,
textual_hdrs = gen_hdrs,
visibility = visibility,
)
if grpc_shim_rule:
grpc_name = name[:-6] + "_grpc_proto"
grpc_dep_set = dep_set | [name] | _SC_GRPC_DEPS
grpc_gen_hdrs_plus = grpc_gen_hdrs + gen_hdrs
sc_cc_lib(
name = decorate(grpc_name, arch),
deps = grpc_dep_set,
srcs = grpc_gen_srcs,
hdrs = hdrs + grpc_gen_hdrs_plus + [grpc_shim_rule],
arches = [arch],
copts = [],
includes = includes,
testonly = testonly,
textual_hdrs = grpc_gen_hdrs_plus,
visibility = visibility,
)
def _gen_proto_shims(name, pb_modifier, srcs, arches, visibility):
"""Macro to build .pb.h multi-arch master switch for sc_proto_lib.
For each src path.proto, generates path.pb.h consisting of:
#ifdef logic to select path.${arch}.pb.h
Also generates an alias that will select the appropriate proto target
based on the currently selected platform architecture.
Args:
name: Base name for this library.
pb_modifier: protoc plugin-dependent file extension (e.g.: .pb)
srcs: List of proto files.
arches: List of arches this shim should support.
visibility: The blaze visibility of the generated alias.
Returns:
Name of shim rule for use in follow-on hdrs and/or src lists.
"""
outs = []
cmds = []
hdr_ext = pb_modifier + ".h"
for src in srcs:
pkg, filename = parse_label(src)
if not filename.endswith(".proto"):
continue
hdr_stem = filename[0:-6]
new_hdr_name = hdr_stem + hdr_ext
outs.append(new_hdr_name)
# Generate lines for shim switch file.
# Lines expand inside squotes, so quote accordingly.
include_fmt = "#include " + dquote(pkg + "/" + hdr_stem + ".%s" + hdr_ext)
lines = [
"#if defined(STRATUM_ARCH_%s)" % "PPC",
include_fmt % "ppc",
"#elif defined(STRATUM_ARCH_%s)" % "X86",
include_fmt % "x86",
"#elif defined(STRATUM_ARCH_%s)" % "HOST",
include_fmt % "host",
"#else",
"#error Unknown STRATUM_ARCH",
"#endif",
]
gen_cmds = [("printf '%%s\\n' '%s'" % line) for line in lines]
new_hdr_loc = "$(location %s)" % new_hdr_name
cmds.append("{ %s; } > %s" % (" && ".join(gen_cmds), new_hdr_loc))
shim_rule = decorate(name, "shims")
native.genrule(
name = shim_rule,
srcs = srcs,
outs = outs,
cmd = " && ".join(cmds) or "true",
)
sc_platform_alias(
name = name,
host = decorate(name, "host") if "host" in arches else None,
ppc = decorate(name, "ppc") if "ppc" in arches else None,
x86 = decorate(name, "x86") if "x86" in arches else None,
visibility = visibility,
)
return shim_rule
def _gen_py_proto_lib(name, srcs, deps, visibility, testonly):
"""Creates a py_proto_library from the given srcs.
There's no clean way to make python protos work with sc_proto_lib's
proto_include field, so we keep this simple.
For library "name", generates:
* ${name}_default_pb, a regular proto library.
* ${name}_py, a py_proto_library based on ${name}_default_pb.
Args:
name: Standard blaze name argument.
srcs: Standard blaze srcs argument.
deps: Standard blaze deps argument.
visibility: Standard blaze visibility argument.
testonly: Standard blaze testonly argument.
"""
regular_proto_name = decorate(name, "default_pb")
py_name = decorate(name, "py")
proto_library(
name = regular_proto_name,
srcs = srcs,
deps = [decorate(dep, "default_pb") for dep in deps],
visibility = visibility,
testonly = testonly,
)
native.py_proto_library(
name = py_name,
api_version = 2,
deps = [regular_proto_name],
visibility = visibility,
testonly = testonly,
)
# TODO(unknown): Add support for depending on normal proto_library rules.
def sc_proto_lib(
name = None,
srcs = [],
hdrs = [],
deps = [],
arches = [],
visibility = None,
testonly = None,
proto_include = None,
python_support = False,
services = []):
"""Public macro to build multi-arch library from Message protobuf(s).
For library "name", generates:
* ${name}_shim aka .pb.h master switch - see _gen_proto_shims, above.
* ${name}_${arch}_pb protobuf compile rules - one for each arch.
* sc_cc_lib(name) with those as input.
* ${name}_py a py_proto_library version of this library. Only generated
if python_support == True.
Args:
name: Base name for this library.
srcs: List of .proto files - private to this library.
hdrs: As above, but also exported for dependent rules to utilize.
deps: List of deps for this library
arches: Which architectures to build this library for, None => ALL.
visibility: Standard blaze visibility parameter, passed through to
subsequent rules.
testonly: Standard blaze testonly parameter.
proto_include: Path to add to include path. This will affect the
symbols generated by protoc, as well as the include
paths used for both sc_cc_lib and sc_proto_lib rules
that depend on this rule. Typically "."
python_support: Defaults to False. If True, generate a python proto library
from this rule. Any sc_proto_lib with python support may
only depend on sc_proto_libs that also have python support,
and may not use the proto_include field in this rule.
services: List of services to enable {"grpc", "rpc"};
Only "grpc" is supported. So "rpc" and "grpc" are equivalent.
"""
if not arches:
if testonly:
arches = HOST_ARCHES
else:
arches = ALL_ARCHES
service_enable = {
"grpc": 0,
}
for service in services or []:
if service == "grpc":
service_enable["grpc"] = 1
elif service == "rpc":
service_enable["grpc"] = 1
else:
fail("service='%s' not in (grpc, rpc)" % service)
deps = depset(deps or [])
shim_rule = _gen_proto_shims(
name = name,
pb_modifier = ".pb",
srcs = srcs + hdrs,
arches = arches,
visibility = visibility,
)
grpc_shim_rule = None
if (service_enable["grpc"]):
grpc_shim_rule = _gen_proto_shims(
name = decorate(name[:-6], "grpc_proto"),
pb_modifier = ".grpc.pb",
srcs = srcs + hdrs,
arches = arches,
visibility = visibility,
)
for arch in arches:
_gen_proto_lib(
name = name,
srcs = srcs,
hdrs = [shim_rule] + hdrs,
deps = deps,
arch = arch,
visibility = visibility,
testonly = testonly,
proto_include = proto_include,
grpc_shim_rule = grpc_shim_rule,
)
if python_support:
if proto_include:
fail("Cannot use proto_include on an sc_proto_lib with python support.")
_gen_py_proto_lib(
name = name,
srcs = depset(srcs + hdrs),
deps = deps,
visibility = visibility,
testonly = testonly,
)
register_extension_info(
extension_name = "sc_proto_lib",
label_regex_for_dep = "{extension_name}",
)
def sc_package(
name = None,
bins = None,
data = None,
deps = None,
arches = None,
visibility = None):
"""Public macro to package binaries and data for deployment.
For package "name", generates:
* ${name}_${arch}_bin and ${name}_${arch}_data filesets containing
respectively all of the binaries and all of the data needed for this
package and all dependency packages.
* ${name}_${arch} fileset containing the corresponding bin and data
filesets, mapped to bin/ and share/ respectively.
* ${name}_${arch}_tarball rule builds that .tar.gz package.
Args:
name: Base name for this package.
bins: List of sc_cc_bin rules to be packaged.
data: List of files (and file producing rules) to be packaged.
deps: List of other sc_packages to add to this package.
arches: Which architectures to build this library for,
None => EMBEDDED_ARCHES (HOST_ARCHES not generally supported).
visibility: Standard blaze visibility parameter, passed through to
all filesets.
"""
bins = depset(bins or [])
data = depset(data or [])
deps = depset(deps or [])
if not arches:
arches = EMBEDDED_ARCHES
fileset_name = decorate(name, "fs")
for extension, inputs in [
("bin", ["%s.stripped" % b for b in bins.to_list()]),
("data", data),
]:
native.Fileset(
name = decorate(fileset_name, extension),
out = decorate(name, extension),
entries = [
native.FilesetEntry(
files = inputs,
),
] + [
native.FilesetEntry(srcdir = decorate(dep, extension))
for dep in deps.to_list()
],
visibility = visibility,
)
# Add any platform specific files to the final tarball.
platform_entries = sc_platform_select(
# We use a different ppc toolchain for Stratum.
# This means that we must provide portable shared libs for our ppc
# executables.
ppc = [native.FilesetEntry(
srcdir = "%s:BUILD" % _PPC_GRTE,
files = [":libs"],
destdir = "lib/stratum",
symlinks = "dereference",
)],
default = [],
)
native.Fileset(
name = fileset_name,
out = name,
entries = [
native.FilesetEntry(
srcdir = decorate(name, "bin"),
destdir = "bin",
),
native.FilesetEntry(
srcdir = decorate(name, "data"),
destdir = "share",
),
] + platform_entries,
visibility = visibility,
)
outs = ["%s.tar.gz" % name]
# Copy our files into a temporary directory and make any necessary changes
# before tarballing.
cmds = [
"TEMP_DIR=$(@D)/stratum_packaging_temp",
"mkdir $${TEMP_DIR}",
"cp -r %s $${TEMP_DIR}/tarball" % _loc(fileset_name),
"if [[ -e $${TEMP_DIR}/tarball/bin ]]",
"then for f in $${TEMP_DIR}/tarball/bin/*.stripped",
" do mv $${f} $${f%.stripped}", # rename not available.
"done",
"fi",
"tar czf %s -h -C $${TEMP_DIR}/tarball ." % _loc(name + ".tar.gz"),
"rm -rf $${TEMP_DIR}",
]
native.genrule(
name = decorate(name, "tarball"),
srcs = [":%s" % fileset_name],
outs = outs,
cmd = "; ".join(cmds),
visibility = visibility,
)
| 35.042969 | 90 | 0.582767 |
ed6c12390ca654e898450e0424a1c59a124edd59
| 96,578 |
py
|
Python
|
src/genie/libs/parser/ios/tests/test_show_platform.py
|
miuvlad/genieparser
|
60b1151e3c67c6b55d75e30359d0bf52825efad8
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/ios/tests/test_show_platform.py
|
miuvlad/genieparser
|
60b1151e3c67c6b55d75e30359d0bf52825efad8
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/ios/tests/test_show_platform.py
|
miuvlad/genieparser
|
60b1151e3c67c6b55d75e30359d0bf52825efad8
|
[
"Apache-2.0"
] | null | null | null |
#!/bin/env python
import unittest
from unittest.mock import Mock
from pyats.topology import Device
from genie.metaparser.util.exceptions import SchemaEmptyParserError,\
SchemaMissingKeyError
from genie.libs.parser.ios.show_platform import ShowVersion,\
Dir,\
ShowRedundancy,\
ShowInventory,\
ShowBootvar, \
ShowProcessesCpuSorted,\
ShowProcessesCpu,\
ShowVersionRp,\
ShowPlatform,\
ShowPlatformPower,\
ShowProcessesCpuHistory,\
ShowProcessesCpuPlatform,\
ShowPlatformSoftwareStatusControl,\
ShowPlatformSoftwareSlotActiveMonitorMem,\
ShowPlatformHardware,\
ShowPlatformHardwarePlim,\
ShowPlatformHardwareQfpBqsOpmMapping,\
ShowPlatformHardwareQfpBqsIpmMapping,\
ShowPlatformHardwareSerdes,\
ShowPlatformHardwareSerdesInternal,\
ShowPlatformHardwareQfpBqsStatisticsChannelAll,\
ShowPlatformHardwareQfpInterfaceIfnameStatistics,\
ShowPlatformHardwareQfpStatisticsDrop,\
ShowEnvironment,\
ShowModule,\
ShowSwitch, ShowSwitchDetail
from genie.libs.parser.iosxe.tests.test_show_platform import TestShowPlatform as test_show_platform_iosxe,\
TestShowPlatformPower as test_show_platform_power_iosxe,\
TestShowVersionRp as test_show_version_rp_iosxe,\
TestShowProcessesCpu as test_show_processes_cpu_iosxe,\
TestShowProcessesCpuHistory as test_show_processes_cpu_history_iosxe,\
TestShowProcessesCpuPlatform as test_show_processes_cpu_platform_iosxe,\
TestShowPlatformSoftwareStatusControlProcessorBrief as test_show_platform_software_status_control_processor_brief_iosxe,\
TestShowPlatformSoftwareSlotActiveMonitorMemSwap as test_show_platform_software_slot_active_monitor_Mem_iosxe,\
TestShowPlatformHardware as test_show_platform_hardware_iosxe,\
TestShowPlatformHardwarePlim as test_show_platform_hardware_plim_iosxe,\
TestShowPlatformHardwareQfpBqsOpmMapping as test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe,\
TestShowPlatformHardwareQfpBqsIpmMapping as test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe,\
TestShowPlatformHardwareSerdesStatistics as test_show_platform_hardware_serdes_statistics_iosxe,\
TestShowPlatformHardwareSerdesStatisticsInternal as test_show_platform_hardware_serdes_statistics_internal_iosxe,\
ShowPlatformHardwareQfpBqsStatisticsChannelAll as show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe,\
ShowPlatformHardwareQfpInterface as show_platform_hardware_qfp_interface_iosxe,\
TestShowPlatformHardwareQfpStatisticsDrop as test_show_platform_hardware_qfp_statistics_drop_iosxe,\
TestShowEnv as test_show_env_iosxe,\
TestShowModule as test_show_module_iosxe,\
TestShowSwitch as test_show_switch_iosxe,\
TestShowSwitchDetail as test_show_switch_detail_iosxe
if __name__ == '__main__':
unittest.main()
| 41.307956 | 153 | 0.510582 |
ed6c19de3061a6952b4f83f10500239e87852cc5
| 2,883 |
py
|
Python
|
autumn/projects/covid_19/sri_lanka/sri_lanka/project.py
|
emmamcbryde/AuTuMN-1
|
b1e7de15ac6ef6bed95a80efab17f0780ec9ff6f
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
autumn/projects/covid_19/sri_lanka/sri_lanka/project.py
|
emmamcbryde/AuTuMN-1
|
b1e7de15ac6ef6bed95a80efab17f0780ec9ff6f
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
autumn/projects/covid_19/sri_lanka/sri_lanka/project.py
|
emmamcbryde/AuTuMN-1
|
b1e7de15ac6ef6bed95a80efab17f0780ec9ff6f
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
import numpy as np
from autumn.calibration.proposal_tuning import perform_all_params_proposal_tuning
from autumn.core.project import Project, ParameterSet, load_timeseries, build_rel_path, get_all_available_scenario_paths, \
use_tuned_proposal_sds
from autumn.calibration import Calibration
from autumn.calibration.priors import UniformPrior, BetaPrior,TruncNormalPrior
from autumn.calibration.targets import (
NormalTarget,
get_dispersion_priors_for_gaussian_targets,
)
from autumn.models.covid_19 import base_params, build_model
from autumn.settings import Region, Models
from autumn.projects.covid_19.sri_lanka.sri_lanka.scenario_builder import get_all_scenario_dicts
# Load and configure model parameters.
default_path = build_rel_path("params/default.yml")
#scenario_paths = [build_rel_path(f"params/scenario-{i}.yml") for i in range(7, 9)]
mle_path = build_rel_path("params/mle-params.yml")
baseline_params = base_params.update(default_path).update(mle_path, calibration_format=True)
all_scenario_dicts = get_all_scenario_dicts("LKA")
#scenario_params = [baseline_params.update(p) for p in scenario_paths]
scenario_params = [baseline_params.update(sc_dict) for sc_dict in all_scenario_dicts]
param_set = ParameterSet(baseline=baseline_params, scenarios=scenario_params)
ts_set = load_timeseries(build_rel_path("timeseries.json"))
notifications_ts = ts_set["notifications"].rolling(7).mean().loc[350::7]
death_ts = ts_set["infection_deaths"].loc[350:]
targets = [
NormalTarget(notifications_ts),
NormalTarget(death_ts),
]
priors = [
# Dispersion parameters based on targets
*get_dispersion_priors_for_gaussian_targets(targets),
*get_dispersion_priors_for_gaussian_targets(targets),
# Regional parameters
UniformPrior("contact_rate", [0.024, 0.027]),
UniformPrior("infectious_seed", [275.0, 450.0]),
# Detection
UniformPrior("testing_to_detection.assumed_cdr_parameter", [0.009, 0.025]),
UniformPrior("infection_fatality.multiplier", [0.09, 0.13]),
#VoC
UniformPrior("voc_emergence.alpha_beta.start_time", [370, 410]),
UniformPrior("voc_emergence.alpha_beta.contact_rate_multiplier", [3.2, 4.5]),
UniformPrior("voc_emergence.delta.start_time", [475, 530]),
UniformPrior("voc_emergence.delta.contact_rate_multiplier", [8.5, 11.5]),
]
# Load proposal sds from yml file
# use_tuned_proposal_sds(priors, build_rel_path("proposal_sds.yml"))
calibration = Calibration(priors, targets)
# FIXME: Replace with flexible Python plot request API.
import json
plot_spec_filepath = build_rel_path("timeseries.json")
with open(plot_spec_filepath) as f:
plot_spec = json.load(f)
project = Project(
Region.SRI_LANKA, Models.COVID_19, build_model, param_set, calibration, plots=plot_spec
)
#perform_all_params_proposal_tuning(project, calibration, priors, n_points=50, relative_likelihood_reduction=0.2)
| 43.029851 | 123 | 0.794658 |
ed6c1e66e6a96e129aa7826692f68edb943e0fae
| 9,245 |
py
|
Python
|
Analytics/resources/themes/test_subthemes.py
|
thanosbnt/SharingCitiesDashboard
|
5d123691d1f25d0b85e20e4e8293266bf23c9f8a
|
[
"Apache-2.0"
] | 4 |
2018-11-21T14:42:18.000Z
|
2020-05-11T10:52:59.000Z
|
Analytics/resources/themes/test_subthemes.py
|
thanosbnt/SharingCitiesDashboard
|
5d123691d1f25d0b85e20e4e8293266bf23c9f8a
|
[
"Apache-2.0"
] | 60 |
2018-11-21T15:11:59.000Z
|
2019-12-02T10:46:44.000Z
|
Analytics/resources/themes/test_subthemes.py
|
thanosbnt/SharingCitiesDashboard
|
5d123691d1f25d0b85e20e4e8293266bf23c9f8a
|
[
"Apache-2.0"
] | 7 |
2018-11-21T14:42:44.000Z
|
2019-11-28T16:24:14.000Z
|
import unittest
from http import HTTPStatus
from unittest import TestCase
import bcrypt
from flask.ctx import AppContext
from flask.testing import FlaskClient
from app import create_app
from models.theme import Theme, SubTheme
from models.users import Users
if __name__ == '__main__':
unittest.main()
| 42.800926 | 119 | 0.589508 |
ed6c49af1afdf5e937dac3ecb68b0de9cb7816d4
| 11,421 |
py
|
Python
|
selfdrive/sensord/rawgps/structs.py
|
TC921/openpilot
|
d5d91e6e3be02e2525ed8d6137e5fdca2b81657c
|
[
"MIT"
] | null | null | null |
selfdrive/sensord/rawgps/structs.py
|
TC921/openpilot
|
d5d91e6e3be02e2525ed8d6137e5fdca2b81657c
|
[
"MIT"
] | null | null | null |
selfdrive/sensord/rawgps/structs.py
|
TC921/openpilot
|
d5d91e6e3be02e2525ed8d6137e5fdca2b81657c
|
[
"MIT"
] | null | null | null |
from struct import unpack_from, calcsize
LOG_GNSS_POSITION_REPORT = 0x1476
LOG_GNSS_GPS_MEASUREMENT_REPORT = 0x1477
LOG_GNSS_CLOCK_REPORT = 0x1478
LOG_GNSS_GLONASS_MEASUREMENT_REPORT = 0x1480
LOG_GNSS_BDS_MEASUREMENT_REPORT = 0x1756
LOG_GNSS_GAL_MEASUREMENT_REPORT = 0x1886
LOG_GNSS_OEMDRE_MEASUREMENT_REPORT = 0x14DE
LOG_GNSS_OEMDRE_SVPOLY_REPORT = 0x14E1
LOG_GNSS_ME_DPO_STATUS = 0x1838
LOG_GNSS_CD_DB_REPORT = 0x147B
LOG_GNSS_PRX_RF_HW_STATUS_REPORT = 0x147E
LOG_CGPS_SLOW_CLOCK_CLIB_REPORT = 0x1488
LOG_GNSS_CONFIGURATION_STATE = 0x1516
glonass_measurement_report = """
uint8_t version;
uint32_t f_count;
uint8_t glonass_cycle_number;
uint16_t glonass_number_of_days;
uint32_t milliseconds;
float time_bias;
float clock_time_uncertainty;
float clock_frequency_bias;
float clock_frequency_uncertainty;
uint8_t sv_count;
"""
glonass_measurement_report_sv = """
uint8_t sv_id;
int8_t frequency_index;
uint8_t observation_state; // SVObservationStates
uint8_t observations;
uint8_t good_observations;
uint8_t hemming_error_count;
uint8_t filter_stages;
uint16_t carrier_noise;
int16_t latency;
uint8_t predetect_interval;
uint16_t postdetections;
uint32_t unfiltered_measurement_integral;
float unfiltered_measurement_fraction;
float unfiltered_time_uncertainty;
float unfiltered_speed;
float unfiltered_speed_uncertainty;
uint32_t measurement_status;
uint8_t misc_status;
uint32_t multipath_estimate;
float azimuth;
float elevation;
int32_t carrier_phase_cycles_integral;
uint16_t carrier_phase_cycles_fraction;
float fine_speed;
float fine_speed_uncertainty;
uint8_t cycle_slip_count;
uint32_t pad;
"""
gps_measurement_report = """
uint8_t version;
uint32_t f_count;
uint16_t week;
uint32_t milliseconds;
float time_bias;
float clock_time_uncertainty;
float clock_frequency_bias;
float clock_frequency_uncertainty;
uint8_t sv_count;
"""
gps_measurement_report_sv = """
uint8_t sv_id;
uint8_t observation_state; // SVObservationStates
uint8_t observations;
uint8_t good_observations;
uint16_t parity_error_count;
uint8_t filter_stages;
uint16_t carrier_noise;
int16_t latency;
uint8_t predetect_interval;
uint16_t postdetections;
uint32_t unfiltered_measurement_integral;
float unfiltered_measurement_fraction;
float unfiltered_time_uncertainty;
float unfiltered_speed;
float unfiltered_speed_uncertainty;
uint32_t measurement_status;
uint8_t misc_status;
uint32_t multipath_estimate;
float azimuth;
float elevation;
int32_t carrier_phase_cycles_integral;
uint16_t carrier_phase_cycles_fraction;
float fine_speed;
float fine_speed_uncertainty;
uint8_t cycle_slip_count;
uint32_t pad;
"""
position_report = """
uint8 u_Version; /* Version number of DM log */
uint32 q_Fcount; /* Local millisecond counter */
uint8 u_PosSource; /* Source of position information */ /* 0: None 1: Weighted least-squares 2: Kalman filter 3: Externally injected 4: Internal database */
uint32 q_Reserved1; /* Reserved memory field */
uint16 w_PosVelFlag; /* Position velocity bit field: (see DM log 0x1476 documentation) */
uint32 q_PosVelFlag2; /* Position velocity 2 bit field: (see DM log 0x1476 documentation) */
uint8 u_FailureCode; /* Failure code: (see DM log 0x1476 documentation) */
uint16 w_FixEvents; /* Fix events bit field: (see DM log 0x1476 documentation) */
uint32 _fake_align_week_number;
uint16 w_GpsWeekNumber; /* GPS week number of position */
uint32 q_GpsFixTimeMs; /* GPS fix time of week of in milliseconds */
uint8 u_GloNumFourYear; /* Number of Glonass four year cycles */
uint16 w_GloNumDaysInFourYear; /* Glonass calendar day in four year cycle */
uint32 q_GloFixTimeMs; /* Glonass fix time of day in milliseconds */
uint32 q_PosCount; /* Integer count of the number of unique positions reported */
uint64 t_DblFinalPosLatLon[2]; /* Final latitude and longitude of position in radians */
uint32 q_FltFinalPosAlt; /* Final height-above-ellipsoid altitude of position */
uint32 q_FltHeadingRad; /* User heading in radians */
uint32 q_FltHeadingUncRad; /* User heading uncertainty in radians */
uint32 q_FltVelEnuMps[3]; /* User velocity in east, north, up coordinate frame. In meters per second. */
uint32 q_FltVelSigmaMps[3]; /* Gaussian 1-sigma value for east, north, up components of user velocity */
uint32 q_FltClockBiasMeters; /* Receiver clock bias in meters */
uint32 q_FltClockBiasSigmaMeters; /* Gaussian 1-sigma value for receiver clock bias in meters */
uint32 q_FltGGTBMeters; /* GPS to Glonass time bias in meters */
uint32 q_FltGGTBSigmaMeters; /* Gaussian 1-sigma value for GPS to Glonass time bias uncertainty in meters */
uint32 q_FltGBTBMeters; /* GPS to BeiDou time bias in meters */
uint32 q_FltGBTBSigmaMeters; /* Gaussian 1-sigma value for GPS to BeiDou time bias uncertainty in meters */
uint32 q_FltBGTBMeters; /* BeiDou to Glonass time bias in meters */
uint32 q_FltBGTBSigmaMeters; /* Gaussian 1-sigma value for BeiDou to Glonass time bias uncertainty in meters */
uint32 q_FltFiltGGTBMeters; /* Filtered GPS to Glonass time bias in meters */
uint32 q_FltFiltGGTBSigmaMeters; /* Filtered Gaussian 1-sigma value for GPS to Glonass time bias uncertainty in meters */
uint32 q_FltFiltGBTBMeters; /* Filtered GPS to BeiDou time bias in meters */
uint32 q_FltFiltGBTBSigmaMeters; /* Filtered Gaussian 1-sigma value for GPS to BeiDou time bias uncertainty in meters */
uint32 q_FltFiltBGTBMeters; /* Filtered BeiDou to Glonass time bias in meters */
uint32 q_FltFiltBGTBSigmaMeters; /* Filtered Gaussian 1-sigma value for BeiDou to Glonass time bias uncertainty in meters */
uint32 q_FltSftOffsetSec; /* SFT offset as computed by WLS in seconds */
uint32 q_FltSftOffsetSigmaSec; /* Gaussian 1-sigma value for SFT offset in seconds */
uint32 q_FltClockDriftMps; /* Clock drift (clock frequency bias) in meters per second */
uint32 q_FltClockDriftSigmaMps; /* Gaussian 1-sigma value for clock drift in meters per second */
uint32 q_FltFilteredAlt; /* Filtered height-above-ellipsoid altitude in meters as computed by WLS */
uint32 q_FltFilteredAltSigma; /* Gaussian 1-sigma value for filtered height-above-ellipsoid altitude in meters */
uint32 q_FltRawAlt; /* Raw height-above-ellipsoid altitude in meters as computed by WLS */
uint32 q_FltRawAltSigma; /* Gaussian 1-sigma value for raw height-above-ellipsoid altitude in meters */
uint32 align_Flt[14];
uint32 q_FltPdop; /* 3D position dilution of precision as computed from the unweighted
uint32 q_FltHdop; /* Horizontal position dilution of precision as computed from the unweighted least-squares covariance matrix */
uint32 q_FltVdop; /* Vertical position dilution of precision as computed from the unweighted least-squares covariance matrix */
uint8 u_EllipseConfidence; /* Statistical measure of the confidence (percentage) associated with the uncertainty ellipse values */
uint32 q_FltEllipseAngle; /* Angle of semimajor axis with respect to true North, with increasing angles moving clockwise from North. In units of degrees. */
uint32 q_FltEllipseSemimajorAxis; /* Semimajor axis of final horizontal position uncertainty error ellipse. In units of meters. */
uint32 q_FltEllipseSemiminorAxis; /* Semiminor axis of final horizontal position uncertainty error ellipse. In units of meters. */
uint32 q_FltPosSigmaVertical; /* Gaussian 1-sigma value for final position height-above-ellipsoid altitude in meters */
uint8 u_HorizontalReliability; /* Horizontal position reliability 0: Not set 1: Very Low 2: Low 3: Medium 4: High */
uint8 u_VerticalReliability; /* Vertical position reliability */
uint16 w_Reserved2; /* Reserved memory field */
uint32 q_FltGnssHeadingRad; /* User heading in radians derived from GNSS only solution */
uint32 q_FltGnssHeadingUncRad; /* User heading uncertainty in radians derived from GNSS only solution */
uint32 q_SensorDataUsageMask; /* Denotes which additional sensor data were used to compute this position fix. BIT[0] 0x00000001 <96> Accelerometer BIT[1] 0x00000002 <96> Gyro 0x0000FFFC - Reserved A bit set to 1 indicates that certain fields as defined by the SENSOR_AIDING_MASK were aided with sensor data*/
uint32 q_SensorAidMask; /* Denotes which component of the position report was assisted with additional sensors defined in SENSOR_DATA_USAGE_MASK BIT[0] 0x00000001 <96> Heading aided with sensor data BIT[1] 0x00000002 <96> Speed aided with sensor data BIT[2] 0x00000004 <96> Position aided with sensor data BIT[3] 0x00000008 <96> Velocity aided with sensor data 0xFFFFFFF0 <96> Reserved */
uint8 u_NumGpsSvsUsed; /* The number of GPS SVs used in the fix */
uint8 u_TotalGpsSvs; /* Total number of GPS SVs detected by searcher, including ones not used in position calculation */
uint8 u_NumGloSvsUsed; /* The number of Glonass SVs used in the fix */
uint8 u_TotalGloSvs; /* Total number of Glonass SVs detected by searcher, including ones not used in position calculation */
uint8 u_NumBdsSvsUsed; /* The number of BeiDou SVs used in the fix */
uint8 u_TotalBdsSvs; /* Total number of BeiDou SVs detected by searcher, including ones not used in position calculation */
"""
| 50.76 | 403 | 0.698976 |
ed6ced72ed9bc083484bd7a8ca32221ff538be8a
| 12,541 |
py
|
Python
|
python2.7libs/hammer_tools/content_browser.py
|
anvdev/Hammer-Tools
|
0211ec837da6754e537c98624ecd07c23abab28e
|
[
"Apache-2.0"
] | 19 |
2019-10-09T13:48:11.000Z
|
2021-06-14T01:25:23.000Z
|
python2.7libs/hammer_tools/content_browser.py
|
anvdev/Hammer-Tools
|
0211ec837da6754e537c98624ecd07c23abab28e
|
[
"Apache-2.0"
] | 219 |
2019-10-08T14:44:48.000Z
|
2021-06-19T06:27:46.000Z
|
python2.7libs/hammer_tools/content_browser.py
|
anvdev/Hammer-Tools
|
0211ec837da6754e537c98624ecd07c23abab28e
|
[
"Apache-2.0"
] | 3 |
2020-02-14T06:18:06.000Z
|
2020-11-25T20:47:06.000Z
|
from __future__ import print_function
try:
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5.QtCore import *
except ImportError:
from PySide2.QtWidgets import *
from PySide2.QtGui import *
from PySide2.QtCore import *
import hou
from hammer_tools.utils import createAction
link_or_state_icon = 'BUTTONS_link'
embedded_icon = 'BUTTONS_pinned'
if __name__ == '__main__':
app = QApplication([])
window = ContentBrowser()
window.show()
app.exec_()
| 37.21365 | 106 | 0.660633 |
ed6cf42a0947849b7e11a5ffae5ba378599d9f7e
| 1,106 |
py
|
Python
|
rt-thread/applications/server/udp_sender.py
|
luhuadong/stm32f769-disco-demo
|
c7fb0d627b02c3f87959f43f1447bc79f62a7099
|
[
"Apache-2.0"
] | null | null | null |
rt-thread/applications/server/udp_sender.py
|
luhuadong/stm32f769-disco-demo
|
c7fb0d627b02c3f87959f43f1447bc79f62a7099
|
[
"Apache-2.0"
] | null | null | null |
rt-thread/applications/server/udp_sender.py
|
luhuadong/stm32f769-disco-demo
|
c7fb0d627b02c3f87959f43f1447bc79f62a7099
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python3
"""
UDP sender
"""
import socket
import time
import sys
smsg = b'\xaa\x08\xfe\x00\xc9\xe6\x5f\xee'
if __name__ == '__main__':
main()
| 17.83871 | 65 | 0.513562 |
ed6da0909e41c7d13eeccbce6856b66ed6774782
| 1,273 |
py
|
Python
|
yudzuki/role.py
|
LunaProject-Discord/yudzuki.py
|
7ff2d1f9e39ed5300a46c48fb95df50046814ede
|
[
"MIT"
] | 6 |
2021-04-29T12:48:55.000Z
|
2021-06-25T06:54:37.000Z
|
yudzuki/role.py
|
LunaProject-Discord/yudzuki.py
|
7ff2d1f9e39ed5300a46c48fb95df50046814ede
|
[
"MIT"
] | null | null | null |
yudzuki/role.py
|
LunaProject-Discord/yudzuki.py
|
7ff2d1f9e39ed5300a46c48fb95df50046814ede
|
[
"MIT"
] | null | null | null |
__all__ = (
"Role",
)
| 19.584615 | 51 | 0.531815 |
ed6e652c3847138189ca7b951889b9b3a32aa8ce
| 1,702 |
py
|
Python
|
jassen/django/project/project/urls.py
|
cabilangan112/intern-drf-blog
|
b2d6c7a4af1316b2c7ce38547bd9df99b4f3e8b9
|
[
"MIT"
] | null | null | null |
jassen/django/project/project/urls.py
|
cabilangan112/intern-drf-blog
|
b2d6c7a4af1316b2c7ce38547bd9df99b4f3e8b9
|
[
"MIT"
] | null | null | null |
jassen/django/project/project/urls.py
|
cabilangan112/intern-drf-blog
|
b2d6c7a4af1316b2c7ce38547bd9df99b4f3e8b9
|
[
"MIT"
] | null | null | null |
"""project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from rest_framework import routers
from blog import views
from blog.views import PostViewSet,CommentViewSet,CategoryViewSet,TagViewSet,DraftViewSet,HideViewSet
from django.conf import settings
from django.conf.urls.static import static
router = routers.DefaultRouter()
router.register(r'hide',HideViewSet, base_name='hiddinn')
router.register(r'draft',DraftViewSet, base_name='draft')
router.register(r'post', PostViewSet, base_name='post')
router.register(r'comment', CommentViewSet, base_name='comment')
router.register(r'tags', TagViewSet, base_name='tags')
router.register(r'category', CategoryViewSet, base_name='category')
from django.contrib import admin
from django.urls import path
urlpatterns = [
path('admin/', admin.site.urls),
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
urlpatterns.extend(
static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) +
static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
)
| 37.822222 | 101 | 0.756757 |
ed6e6b0df61cc3926c2f1e1ffc6195fcb5a7b2f1
| 13 |
py
|
Python
|
deep-learning-app/src/models/__init__.py
|
everbrez/Deep-Learning-based-Chemical-Graphics-Analysis-Platform
|
5ecaedadd74e96891c28d9f73384e07c1526916b
|
[
"Apache-2.0"
] | 1 |
2021-04-30T10:44:32.000Z
|
2021-04-30T10:44:32.000Z
|
deep-learning-app/src/models/__init__.py
|
everbrez/Deep-Learning-based-Chemical-Graphics-Analysis-Platform
|
5ecaedadd74e96891c28d9f73384e07c1526916b
|
[
"Apache-2.0"
] | null | null | null |
deep-learning-app/src/models/__init__.py
|
everbrez/Deep-Learning-based-Chemical-Graphics-Analysis-Platform
|
5ecaedadd74e96891c28d9f73384e07c1526916b
|
[
"Apache-2.0"
] | null | null | null |
print('init')
| 13 | 13 | 0.692308 |
ed6e6d96a4c0121238dbb61b6a4a506e75d9c0bd
| 1,007 |
py
|
Python
|
chemmltoolkit/tensorflow/callbacks/variableScheduler.py
|
Andy-Wilkinson/ChemMLToolk
|
83efc7ea66d2def860a3e04ccd70d77fb689fddc
|
[
"MIT"
] | 1 |
2019-10-30T03:43:24.000Z
|
2019-10-30T03:43:24.000Z
|
chemmltoolkit/tensorflow/callbacks/variableScheduler.py
|
Andy-Wilkinson/ChemMLToolk
|
83efc7ea66d2def860a3e04ccd70d77fb689fddc
|
[
"MIT"
] | 2 |
2021-11-28T21:09:30.000Z
|
2021-11-28T21:09:39.000Z
|
chemmltoolkit/tensorflow/callbacks/variableScheduler.py
|
Andy-Wilkinson/ChemMLToolkit
|
83efc7ea66d2def860a3e04ccd70d77fb689fddc
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
| 37.296296 | 73 | 0.646475 |
ed6f5b3794c25687738dfe6c60b7b8d1ed6647b2
| 14,621 |
py
|
Python
|
join_peaks.py
|
nijibabulu/chip_tools
|
04def22059a6018b3b49247d69d7b04eee1dcd89
|
[
"MIT"
] | null | null | null |
join_peaks.py
|
nijibabulu/chip_tools
|
04def22059a6018b3b49247d69d7b04eee1dcd89
|
[
"MIT"
] | null | null | null |
join_peaks.py
|
nijibabulu/chip_tools
|
04def22059a6018b3b49247d69d7b04eee1dcd89
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
import os
import sys
import math
import csv
import collections
import docopt
import peakzilla_qnorm_mapq_patched as pz
__doc__ = '''
Usage: join_peaks.py [options] PEAKS CHIP INPUT [ (PEAKS CHIP INPUT) ... ]
This script finds peaks in common between multiple ChIP experiments determined
by peakzilla. For each ChIP experiment, input a PEAKS file as otuput by
peakzilla, and 2 BED files (CHIP and INPUT) as input to peakzilla.
This will output a table with 3 columns identifying the peaks (Chromosome,
Start, End, Name,'NPeaks','Spread','ChipSE','EnrichSE'). NPeaks signifies the
number of peaks that were called among all the ChIP experiments, Spread is the
difference between the biggest and smallest ChIP peak, ChipSE and EnrichSE are
the standard error on the mean among the ChIP and Enrich values for the peaks.
For each experinent "X", information about the peaks are output: 'XPZName','XPZScore',
'XPZChip','XPZInput','XPZEnrich','XPZFDR','XChip','XInput','XEnrich','XMapq'.
All 'PZ' columns are the original output from peakzilla and the remaining
columns are re-calculated in this script (also output regardless of the presence
of a peak).
Options:
--max-distance=DIST maximum summit distance to join peaks [default: 10]
'''
args = docopt.docopt(__doc__)
#np.set_printoptions(precision=1,suppress=True)
maxdist = int(args['--max-distance'])
peaksets = {}
filesets = {}
for peakfile,chipfile,controlfile in zip(args['PEAKS'],args['CHIP'],args['INPUT']):
set_name = os.path.basename(peakfile).split('.')[0]
peaksets[set_name] = collections.defaultdict(list)
filesets[set_name] = FileSet(peakfile,chipfile,controlfile)
r = csv.reader(open(peakfile),delimiter='\t')
r.next() # header
'''
#XXX: limit peaks
maxpeaks = 20
peakcounter = 0
for row in r:
if float(row[5]) >= 100 and float(row[8]) >= 10:
peakcounter += 1
if peakcounter > maxpeaks:
break
peaksets[set_name][row[0]].append(PZPeak(set_name,*row))
'''
for row in r:
peaksets[set_name][row[0]].append(PZPeak(set_name,*row))
JoinedPeak.WIDTH += peaksets[set_name].itervalues().next()[0].width()
JoinedPeak.WIDTH /= len(peaksets)
# find closest peak to each peak in the new set
# make new peaks when there's no qualifying one
npeaks = 0
joined_peaks = collections.defaultdict(list)
for set_name,peakset in peaksets.items():
for chrom,peaks in peakset.items():
for peak in peaks:
closest = None
for jp in joined_peaks[chrom]:
dist = jp.dist(peak)
if dist >= 0 and dist <= maxdist:
if closest is None or closest.dist(peak) > dist:
closest = jp
if closest is None or not closest.can_add(peak):
npeaks += 1
joined_peaks[chrom].append(JoinedPeak(peak))
else:
closest.add(peak)
plus_model,minus_model = pz.generate_ideal_model(JoinedPeak.WIDTH)
for set_name,fileset in filesets.items():
scorer = PeakScorer(fileset.chip_tags,fileset.control_tags,
JoinedPeak.WIDTH,plus_model,minus_model)
peaks_to_score = collections.defaultdict(list)
for chrom,peaks in joined_peaks.items():
for jp in peaks:
if set_name not in jp.peaks:
jp.peaks[set_name] = SlavePeak(set_name,jp.center)
peaks_to_score[chrom].append(jp.peaks[set_name])
scorer.score_peaks(peaks_to_score)
print JoinedPeak.header()
for chrom,peaks in joined_peaks.items():
for peak in peaks:
print peak
#plus_model,minus_model = pz.generate_ideal_model(JoinedPeak.WIDTH)
#def get_coverage(fileset,type,jp,pseudocount=0):
#score = 0
#start = max(0,jp.center-JoinedPeak.WIDTH/2)
#for aln in fileset.get_file(type).fetch(
#reference = jp.chrom, start = start,
#end = jp.center+JoinedPeak.WIDTH/2):
#if aln.is_reverse:
#score += minus_model[aln.pos-start]
#else:
#score += plus_model[aln.pos-start]
#return (score+pseudocount)*10.**6/fileset.get_tagcount(type)
#return 10.**6*fileset.get_file(type).count(
#reference = jp.chrom,
#start = max(0,jp.center-JoinedPeak.WIDTH/2),
#end = jp.center+JoinedPeak.WIDTH/2)/fileset.get_tagcount(type)
#start = jp.center,
#end = jp.center+1)
#matrix = np.zeros((npeaks,len(peaksets)*2))
#i = 0
#for chrom,peaks in joined_peaks.items():
#for jp in peaks:
#for j,set_name in enumerate(peaksets.keys()):
#control_coverage = get_coverage(filesets[set_name],'control',jp,pseudocount=1)
#chip_coverage = get_coverage(filesets[set_name],'chip',jp)
#matrix[i][j] = float(chip_coverage)
#matrix[i][j+len(peaksets)] = float(control_coverage)
#i += 1
#quantile_normalize.quantile_norm(matrix)
#i = 0
#for chrom,peaks in joined_peaks.items():
#for jp in peaks:
#for j,set_name in enumerate(peaksets.keys()):
#if set_name not in jp.peaks:
#jp.peaks[set_name] = SlavePeak(
#set_name,matrix[i][j],matrix[i][j + len(peaksets)])
#else:
#jp.peaks[set_name].computed_chip = matrix[i][j]
#jp.peaks[set_name].computed_control = matrix[i][j+len(peaksets)]
#jp.peaks[set_name].compute_fold_enrichment()
#print jp
#i += 1
'''
i = 0
for chrom,peaks in joined_peaks.items():
for jp in peaks:
for j,set_name in enumerate(filesets.keys()):
matrix[i][j] = float(jp.peaks[set_name].computed_chip)
matrix[i][j+len(peaksets)] = float(jp.peaks[set_name].computed_control)
i += 1
'''
| 39.730978 | 122 | 0.603584 |
ed6ff0df42bec5dfbd4d71634bb7ab44a9c003d2
| 9,473 |
py
|
Python
|
django_town/rest_swagger/views.py
|
uptown/django-town
|
4c3b078a8ce5dcc275d65faa4a1cdfb7ebc74a50
|
[
"MIT"
] | null | null | null |
django_town/rest_swagger/views.py
|
uptown/django-town
|
4c3b078a8ce5dcc275d65faa4a1cdfb7ebc74a50
|
[
"MIT"
] | null | null | null |
django_town/rest_swagger/views.py
|
uptown/django-town
|
4c3b078a8ce5dcc275d65faa4a1cdfb7ebc74a50
|
[
"MIT"
] | null | null | null |
from django_town.rest import RestApiView, rest_api_manager
from django_town.http import http_json_response
from django_town.cache.utlis import SimpleCache
from django_town.oauth2.swagger import swagger_authorizations_data
from django_town.social.oauth2.permissions import OAuth2Authenticated, OAuth2AuthenticatedOrReadOnly
from django_town.social.permissions import Authenticated, AuthenticatedOrReadOnly
| 43.059091 | 108 | 0.331468 |
ed71593db0e5552171798bc1852cca8f7c4d9f3e
| 2,285 |
py
|
Python
|
components/dash-core-components/tests/integration/dropdown/test_dynamic_options.py
|
mastermind88/dash
|
760af721980e18d91bdbc4e204d1d063c7ed325c
|
[
"MIT"
] | null | null | null |
components/dash-core-components/tests/integration/dropdown/test_dynamic_options.py
|
mastermind88/dash
|
760af721980e18d91bdbc4e204d1d063c7ed325c
|
[
"MIT"
] | null | null | null |
components/dash-core-components/tests/integration/dropdown/test_dynamic_options.py
|
mastermind88/dash
|
760af721980e18d91bdbc4e204d1d063c7ed325c
|
[
"MIT"
] | null | null | null |
from dash import Dash, Input, Output, dcc, html
from dash.exceptions import PreventUpdate
| 27.202381 | 87 | 0.647265 |
ed725c3c070133c88aad862a90d3bfcbc58edf09
| 768 |
py
|
Python
|
Server.py
|
dipghoshraj/live-video-streming-with-web-socket
|
dda924e22a4c40d225ec39dd94ee1e489233c403
|
[
"BSD-2-Clause"
] | 3 |
2020-06-30T03:49:46.000Z
|
2021-07-17T16:15:55.000Z
|
Server.py
|
dipghoshraj/live-video-streming-with-web-socket
|
dda924e22a4c40d225ec39dd94ee1e489233c403
|
[
"BSD-2-Clause"
] | null | null | null |
Server.py
|
dipghoshraj/live-video-streming-with-web-socket
|
dda924e22a4c40d225ec39dd94ee1e489233c403
|
[
"BSD-2-Clause"
] | null | null | null |
import cv2
import io
import socket
import struct
import time
import pickle
import zlib
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client_socket.connect(('127.0.0.1', 8485))
connection = client_socket.makefile('wb')
cam = cv2.VideoCapture("E:/songs/Attention Charlie Puth(GabbarWorld.com) 1080p.mp4")
cam.set(3, 320)
cam.set(4, 240)
img_counter = 0
encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 90]
while True:
ret, frame = cam.read()
result, frame = cv2.imencode('.jpg', frame, encode_param)
# data = zlib.compress(pickle.dumps(frame, 0))
data = pickle.dumps(frame, 0)
size = len(data)
print("{}: {}".format(img_counter, size))
client_socket.sendall(struct.pack(">L", size) + data)
img_counter += 1
cam.release()
| 29.538462 | 84 | 0.71224 |
ed74d0762a12ab84a6b4c685f57a0a532e003b99
| 7,059 |
py
|
Python
|
hal/agent/tf2_utils.py
|
gunpowder78/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | 1 |
2022-03-13T21:48:52.000Z
|
2022-03-13T21:48:52.000Z
|
hal/agent/tf2_utils.py
|
gunpowder78/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | null | null | null |
hal/agent/tf2_utils.py
|
gunpowder78/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | 1 |
2022-03-30T07:20:29.000Z
|
2022-03-30T07:20:29.000Z
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for Tensorflow 2.0.
Partially adapted from:
https://www.tensorflow.org/tutorials/text/image_captioning
"""
# Lint as: python3
# pylint: disable=invalid-name
from __future__ import absolute_import
from __future__ import division
import tensorflow as tf
def film_params(sentence_embedding, n_layer_channel):
"""Generate FiLM parameters from a sentence embedding.
Generate FiLM parameters from a sentence embedding. This method assumes a
batch dimension exists.
Args:
sentence_embedding: a tensor containing batched sentenced embedding to be
transformed
n_layer_channel: a list of integers specifying how many channels are at
each hidden layer to be FiLM'ed
Returns:
a tuple of tensors the same length as n_layer_channel. Each element
contains all gamma_i and beta_i for a single hidden layer.
"""
n_total = sum(n_layer_channel) * 2
all_params = tf.layers.dense(sentence_embedding, n_total)
all_params = tf.keras.layers.Dense(
2 * sum * (n_layer_channel), activation=tf.nn.relu)
return tf.split(all_params, [c * 2 for c in n_layer_channel], 1)
def stack_conv_layer(layer_cfg, padding='same'):
"""Stack convolution layers per layer_cfg.
Args:
layer_cfg: list of integer tuples specifying the parameter each layer;
each tuple should be (channel, kernel size, strides)
padding: what kind of padding the conv layers use
Returns:
the keras model with stacked conv layers
"""
layers = []
for cfg in layer_cfg[:-1]:
layers.append(
tf.keras.layers.Conv2D(
filters=cfg[0],
kernel_size=cfg[1],
strides=cfg[2],
activation=tf.nn.relu,
padding=padding))
final_cfg = layer_cfg[-1]
layers.append(
tf.keras.layers.Conv2D(
final_cfg[0], final_cfg[1], final_cfg[2], padding=padding))
return tf.keras.Sequential(layers)
def stack_dense_layer(layer_cfg):
"""Stack Dense layers.
Args:
layer_cfg: list of integer specifying the number of units at each layer
Returns:
the keras model with stacked dense layers
"""
layers = []
for cfg in layer_cfg[:-1]:
layers.append(tf.keras.layers.Dense(cfg, activation=tf.nn.relu))
layers.append(tf.keras.layers.Dense(layer_cfg[-1]))
return tf.keras.Sequential(layers)
def soft_variables_update(source_variables, target_variables, polyak_rate=1.0):
"""Update the target variables using exponential moving average.
Specifically, v_s' = v_s * polyak_rate + (1-polyak_rate) * v_t
Args:
source_variables: the moving average variables
target_variables: the new observations
polyak_rate: rate of moving average
Returns:
Operation that does the update
"""
updates = []
for (v_s, v_t) in zip(source_variables, target_variables):
v_t.shape.assert_is_compatible_with(v_s.shape)
def update_fn(v1, v2):
"""Update variables."""
# For not trainable variables do hard updates.
return v1.assign(polyak_rate * v1 + (1 - polyak_rate) * v2)
update = update_fn(v_t, v_s)
updates.append(update)
return updates
def vector_tensor_product(a, b):
""""Returns keras layer that perfrom a outer product between a and b."""
# a shape: [B, ?, d], b shape: [B, ?, d]
shape_layer = tf.keras.layers.Lambda(tf.shape)
shape = shape_layer(b)
shape_numpy = b.get_shape()
variable_length = shape[1] # variable_len = ?
expand_dims_layer_1 = tf.keras.layers.Reshape((-1, 1, shape_numpy[-1]))
expand_dims_layer_2 = tf.keras.layers.Reshape((-1, 1, shape_numpy[-1]))
a = expand_dims_layer_1(a) # a shape: [B, ?, 1, d]
b = expand_dims_layer_2(b) # a shape: [B, ?, 1, d]
tile_layer = tf.keras.layers.Lambda(
lambda inputs: tf.tile(inputs[0], multiples=inputs[1]))
a = tile_layer((a, [1, 1, variable_length, 1])) # a shape: [B, ?, ?, d]
b = tile_layer((b, [1, 1, variable_length, 1])) # b shape: [B, ?, ?, d]
b = tf.keras.layers.Permute((2, 1, 3))(b) # b shape: [B, ?, ?, d]
return tf.keras.layers.concatenate([a, b]) # shape: [B, ?, ?, 2*d]
| 32.380734 | 79 | 0.696841 |
ed753328e567a24c6d1169588942c86a984af1ee
| 4,437 |
py
|
Python
|
wolk/logger_factory.py
|
Wolkabout/WolkConnect-Python-
|
11412e3f88911170f587b5e857d07ab41c8f52b5
|
[
"Apache-2.0"
] | 6 |
2016-12-19T13:36:44.000Z
|
2018-05-10T15:08:15.000Z
|
wolk/logger_factory.py
|
Wolkabout/WolkConnect-Python
|
11412e3f88911170f587b5e857d07ab41c8f52b5
|
[
"Apache-2.0"
] | 5 |
2019-02-23T09:37:12.000Z
|
2021-09-17T13:54:58.000Z
|
wolk/logger_factory.py
|
Wolkabout/WolkConnect-Python-
|
11412e3f88911170f587b5e857d07ab41c8f52b5
|
[
"Apache-2.0"
] | 3 |
2016-08-15T22:19:00.000Z
|
2017-12-28T09:48:37.000Z
|
"""LoggerFactory Module."""
# Copyright 2020 WolkAbout Technology s.r.o.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import List
from typing import Optional
# Logging levels available: NOTSET, INFO, DEBUG
logger_factory = LoggerFactory(level=logging.INFO)
LEVELS = {
"debug": logging.DEBUG,
"info": logging.INFO,
"warning": logging.WARNING,
"error": logging.ERROR,
"critical": logging.CRITICAL,
"notset": logging.NOTSET,
}
def logging_config(level: str, log_file: Optional[str] = None) -> None:
"""
Set desired log level and designate a log file.
:param level: Available levels : debug, info, notset
:type level: str
:param log_file: path to log file
:type log_file: str or None
"""
if log_file is not None:
logger_factory.log_file = log_file
if level not in LEVELS:
print(f"Invalid level '{level}'")
return
if LEVELS[level] == logger_factory.level:
return
logger_factory.level = LEVELS[level]
for logger in logger_factory.loggers:
logger.setLevel(logger_factory.level)
for handler in logger.handlers:
handler.setLevel(logger_factory.level)
| 30.8125 | 88 | 0.610773 |
ed7563752fb4afab443eb59eb4484ffff4182b40
| 1,830 |
py
|
Python
|
raw.py
|
andre-marcos-perez/data-pipeline-demo
|
2647cce6e90d39798eda352608dc0f6d6ab5255a
|
[
"MIT"
] | 3 |
2021-05-20T23:24:13.000Z
|
2021-08-20T12:23:18.000Z
|
raw.py
|
andre-marcos-perez/data-pipeline-demo
|
2647cce6e90d39798eda352608dc0f6d6ab5255a
|
[
"MIT"
] | null | null | null |
raw.py
|
andre-marcos-perez/data-pipeline-demo
|
2647cce6e90d39798eda352608dc0f6d6ab5255a
|
[
"MIT"
] | 3 |
2021-05-26T14:49:20.000Z
|
2022-03-21T23:17:54.000Z
|
import json
import gzip
import requests
from datetime import datetime
import pendulum
import boto3
from botocore.exceptions import ClientError
from util.log import Log
from settings.aws_settings import AWSSettings
from settings.telegram_settings import TelegramSettings
| 31.016949 | 103 | 0.62623 |
ed75b8a825782f227e671daaa305387cdcbcd9d0
| 2,688 |
py
|
Python
|
v2_hier/site_stat.py
|
ruslan-ok/ruslan
|
fc402e53d2683581e13f4d6c69a6f21e5c2ca1f8
|
[
"MIT"
] | null | null | null |
v2_hier/site_stat.py
|
ruslan-ok/ruslan
|
fc402e53d2683581e13f4d6c69a6f21e5c2ca1f8
|
[
"MIT"
] | null | null | null |
v2_hier/site_stat.py
|
ruslan-ok/ruslan
|
fc402e53d2683581e13f4d6c69a6f21e5c2ca1f8
|
[
"MIT"
] | null | null | null |
"""Collecting statistics of site visits."""
import collections
from datetime import datetime
from functools import reduce
from django.utils.translation import gettext_lazy as _
from hier.models import IPInfo, AccessLog, SiteStat
from v2_hier.utils import APPS
def get_site_stat(user):
"""Processing a new portion of log file records.
The site applications that users have visited and information about their IP addresses will be shown.
"""
TOTAL_IP = _('total different').capitalize() + ' IP'
TOTAL_LOG = _('total log records').capitalize()
NEW_LOG = _('new log records').capitalize()
cnt = collections.Counter()
cnt[TOTAL_IP] = len(IPInfo.objects.all())
cnt[TOTAL_LOG] = len(AccessLog.objects.all())
#Determining the last previously processed log file entry
last = datetime.min
site_stat = None
if SiteStat.objects.filter(user=user.id).exists():
site_stat = SiteStat.objects.filter(user = user.id).get()
if site_stat.record and site_stat.record.event:
last = site_stat.record.event
# New records
records = AccessLog.objects.filter(event__gt=last).order_by('-event')
cnt[NEW_LOG] += len(records)
# Save last processed log record
last_rec = None
if (len(records) > 0):
last_rec = records[0]
if site_stat:
site_stat.record = last_rec
site_stat.save()
else:
SiteStat.objects.create(user=user, record=last_rec)
#raise Exception(last_rec.event)
apps = {}
for rec in records:
uri = valid_uri(rec)
if not uri:
continue
# Determining the access to the site application
a_app = list(filter(lambda x: '/{}/'.format(x) in uri, APPS))
if not a_app:
continue
app = a_app[0]
if not app in apps:
apps[app] = {}
host = str(rec.host.info())
#raise Exception('aaa = ', aaa)
if not host in apps[app]:
apps[app][host] = []
page = '{} {}'.format(rec.method, uri)
if not page in apps[app][host]:
apps[app][host].append(page)
return cnt.most_common(), apps
| 31.623529 | 115 | 0.599702 |
ed75ce190b9f65a6716720968d522d43762ebdb0
| 16,643 |
py
|
Python
|
cli/pcluster/utils.py
|
mkosmo/cfncluster
|
f1817cc187f2b92127d48f16debb4b7ea4f4a80f
|
[
"Apache-2.0"
] | 1 |
2021-04-08T05:08:07.000Z
|
2021-04-08T05:08:07.000Z
|
cli/pcluster/utils.py
|
mkosmo/cfncluster
|
f1817cc187f2b92127d48f16debb4b7ea4f4a80f
|
[
"Apache-2.0"
] | null | null | null |
cli/pcluster/utils.py
|
mkosmo/cfncluster
|
f1817cc187f2b92127d48f16debb4b7ea4f4a80f
|
[
"Apache-2.0"
] | 1 |
2019-05-10T16:03:19.000Z
|
2019-05-10T16:03:19.000Z
|
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
# the License. A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "LICENSE.txt" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions and
# limitations under the License.
# fmt: off
from __future__ import absolute_import, print_function # isort:skip
from future import standard_library # isort:skip
standard_library.install_aliases()
# fmt: on
import json
import logging
import os
import sys
import time
import urllib.request
import zipfile
from io import BytesIO
import boto3
import pkg_resources
from botocore.exceptions import ClientError
LOGGER = logging.getLogger(__name__)
PCLUSTER_STACK_PREFIX = "parallelcluster-"
PCLUSTER_ISSUES_LINK = "https://github.com/aws/aws-parallelcluster/issues"
def get_region():
"""Get AWS_DEFAULT_REGION from the environment."""
return os.environ.get("AWS_DEFAULT_REGION")
def get_partition():
"""Get partition for the AWS_DEFAULT_REGION set in the environment."""
return "aws-us-gov" if get_region().startswith("us-gov") else "aws"
def create_s3_bucket(bucket_name, region):
"""
Create a new S3 bucket.
:param bucket_name: name of the S3 bucket to create
:param region: aws region
"""
s3_client = boto3.client("s3")
""" :type : pyboto3.s3 """
try:
if region != "us-east-1":
s3_client.create_bucket(Bucket=bucket_name, CreateBucketConfiguration={"LocationConstraint": region})
else:
s3_client.create_bucket(Bucket=bucket_name)
except s3_client.exceptions.BucketAlreadyOwnedByYou:
print("Bucket already exists")
def delete_s3_bucket(bucket_name):
"""
Delete an S3 bucket together with all stored objects.
:param bucket_name: name of the S3 bucket to delete
"""
try:
bucket = boto3.resource("s3").Bucket(bucket_name)
bucket.objects.all().delete()
bucket.delete()
except boto3.client("s3").exceptions.NoSuchBucket:
pass
except ClientError:
print("Failed to delete bucket %s. Please delete it manually." % bucket_name)
def zip_dir(path):
"""
Create a zip archive containing all files and dirs rooted in path.
The archive is created in memory and a file handler is returned by the function.
:param path: directory containing the resources to archive.
:return file handler pointing to the compressed archive.
"""
file_out = BytesIO()
with zipfile.ZipFile(file_out, "w", zipfile.ZIP_DEFLATED) as ziph:
for root, _, files in os.walk(path):
for file in files:
ziph.write(os.path.join(root, file), os.path.relpath(os.path.join(root, file), start=path))
file_out.seek(0)
return file_out
def upload_resources_artifacts(bucket_name, root):
"""
Upload to the specified S3 bucket the content of the directory rooted in root path.
All dirs contained in root dir will be uploaded as zip files to $bucket_name/$dir_name/artifacts.zip.
All files contained in root dir will be uploaded to $bucket_name.
:param bucket_name: name of the S3 bucket where files are uploaded
:param root: root directory containing the resources to upload.
"""
bucket = boto3.resource("s3").Bucket(bucket_name)
for res in os.listdir(root):
if os.path.isdir(os.path.join(root, res)):
bucket.upload_fileobj(zip_dir(os.path.join(root, res)), "%s/artifacts.zip" % res)
elif os.path.isfile(os.path.join(root, res)):
bucket.upload_file(os.path.join(root, res), res)
def _get_json_from_s3(region, file_name):
"""
Get pricing file (if none) and parse content as json.
:param region: AWS Region
:param file_name the object name to get
:return: a json object representing the file content
:raises ClientError if unable to download the file
:raises ValueError if unable to decode the file content
"""
bucket_name = "{0}-aws-parallelcluster".format(region)
file_contents = boto3.resource("s3").Object(bucket_name, file_name).get()["Body"].read().decode("utf-8")
return json.loads(file_contents)
def get_supported_features(region, feature):
"""
Get a json object containing the attributes supported by a feature, for example.
{
"Features": {
"efa": {
"instances": ["c5n.18xlarge", "p3dn.24xlarge", "i3en.24xlarge"],
"baseos": ["alinux", "centos7"],
"schedulers": ["sge", "slurm", "torque"]
},
"batch": {
"instances": ["r3.8xlarge", ..., "m5.4xlarge"]
}
}
}
:param region: AWS Region
:param feature: the feature to search for, i.e. "efa" "awsbatch"
:return: json object containing all the attributes supported by feature
"""
try:
features = _get_json_from_s3(region, "features/feature_whitelist.json")
supported_features = features.get("Features").get(feature)
except (ValueError, ClientError, KeyError) as e:
if isinstance(e, ClientError):
code = e.response.get("Error").get("Code")
if code == "InvalidAccessKeyId":
error(e.response.get("Error").get("Message"))
error(
"Failed validate {0}. This is probably a bug on our end. "
"Please submit an issue {1}".format(feature, PCLUSTER_ISSUES_LINK)
)
return supported_features
def get_instance_vcpus(region, instance_type):
"""
Get number of vcpus for the given instance type.
:param region: AWS Region
:param instance_type: the instance type to search for.
:return: the number of vcpus or -1 if the instance type cannot be found
or the pricing file cannot be retrieved/parsed
"""
try:
instances = _get_json_from_s3(region, "instances/instances.json")
vcpus = int(instances[instance_type]["vcpus"])
except (KeyError, ValueError, ClientError):
vcpus = -1
return vcpus
def get_supported_os(scheduler):
"""
Return a tuple of the os supported by parallelcluster for the specific scheduler.
:param scheduler: the scheduler for which we want to know the supported os
:return: a tuple of strings of the supported os
"""
return "alinux" if scheduler == "awsbatch" else "alinux", "centos6", "centos7", "ubuntu1604", "ubuntu1804"
def get_supported_schedulers():
"""
Return a tuple of the scheduler supported by parallelcluster.
:return: a tuple of strings of the supported scheduler
"""
return "sge", "torque", "slurm", "awsbatch"
def get_stack_output_value(stack_outputs, output_key):
"""
Get output value from Cloudformation Stack Output.
:param stack_outputs: Cloudformation Stack Outputs
:param output_key: Output Key
:return: OutputValue if that output exists, otherwise None
"""
return next((o.get("OutputValue") for o in stack_outputs if o.get("OutputKey") == output_key), None)
def get_stack(stack_name, cfn_client=None):
"""
Get the output for a DescribeStacks action for the given Stack.
:param stack_name: the CFN Stack name
:param cfn_client: boto3 cloudformation client
:return: the Stack data type
"""
try:
if not cfn_client:
cfn_client = boto3.client("cloudformation")
return cfn_client.describe_stacks(StackName=stack_name).get("Stacks")[0]
except (ClientError, IndexError) as e:
error(e.response.get("Error").get("Message"))
def verify_stack_creation(stack_name, cfn_client):
"""
Wait for the stack creation to be completed and notify if the stack creation fails.
:param stack_name: the stack name that we should verify
:param cfn_client: the CloudFormation client to use to verify stack status
:return: True if the creation was successful, false otherwise.
"""
status = get_stack(stack_name, cfn_client).get("StackStatus")
resource_status = ""
while status == "CREATE_IN_PROGRESS":
status = get_stack(stack_name, cfn_client).get("StackStatus")
events = cfn_client.describe_stack_events(StackName=stack_name).get("StackEvents")[0]
resource_status = ("Status: %s - %s" % (events.get("LogicalResourceId"), events.get("ResourceStatus"))).ljust(
80
)
sys.stdout.write("\r%s" % resource_status)
sys.stdout.flush()
time.sleep(5)
# print the last status update in the logs
if resource_status != "":
LOGGER.debug(resource_status)
if status != "CREATE_COMPLETE":
LOGGER.critical("\nCluster creation failed. Failed events:")
events = cfn_client.describe_stack_events(StackName=stack_name).get("StackEvents")
for event in events:
if event.get("ResourceStatus") == "CREATE_FAILED":
LOGGER.info(
" - %s %s %s",
event.get("ResourceType"),
event.get("LogicalResourceId"),
event.get("ResourceStatusReason"),
)
return False
return True
def get_templates_bucket_path():
"""Return a string containing the path of bucket."""
region = get_region()
s3_suffix = ".cn" if region.startswith("cn") else ""
return "https://s3.{REGION}.amazonaws.com{S3_SUFFIX}/{REGION}-aws-parallelcluster/templates/".format(
REGION=region, S3_SUFFIX=s3_suffix
)
def get_installed_version():
"""Get the version of the installed aws-parallelcluster package."""
return pkg_resources.get_distribution("aws-parallelcluster").version
def check_if_latest_version():
"""Check if the current package version is the latest one."""
try:
latest = json.loads(urllib.request.urlopen("https://pypi.python.org/pypi/aws-parallelcluster/json").read())[
"info"
]["version"]
if get_installed_version() < latest:
print("Info: There is a newer version %s of AWS ParallelCluster available." % latest)
except Exception:
pass
def warn(message):
"""Print a warning message."""
print("WARNING: {0}".format(message))
def error(message, fail_on_error=True):
"""Print an error message and Raise SystemExit exception to the stderr if fail_on_error is true."""
if fail_on_error:
sys.exit("ERROR: {0}".format(message))
else:
print("ERROR: {0}".format(message))
def get_cfn_param(params, key_name):
"""
Get parameter value from Cloudformation Stack Parameters.
:param params: Cloudformation Stack Parameters
:param key_name: Parameter Key
:return: ParameterValue if that parameter exists, otherwise None
"""
param_value = next((i.get("ParameterValue") for i in params if i.get("ParameterKey") == key_name), "NONE")
return param_value.strip()
def get_efs_mount_target_id(efs_fs_id, avail_zone):
"""
Search for a Mount Target Id in given availability zone for the given EFS file system id.
:param efs_fs_id: EFS file system Id
:param avail_zone: Availability zone to verify
:return: the mount_target_id or None
"""
mount_target_id = None
if efs_fs_id:
mount_targets = boto3.client("efs").describe_mount_targets(FileSystemId=efs_fs_id)
for mount_target in mount_targets.get("MountTargets"):
# Check to see if there is an existing mt in the az of the stack
mount_target_subnet = mount_target.get("SubnetId")
if avail_zone == get_avail_zone(mount_target_subnet):
mount_target_id = mount_target.get("MountTargetId")
return mount_target_id
def get_latest_alinux_ami_id():
"""Get latest alinux ami id."""
try:
alinux_ami_id = (
boto3.client("ssm")
.get_parameters_by_path(Path="/aws/service/ami-amazon-linux-latest")
.get("Parameters")[0]
.get("Value")
)
except ClientError as e:
error("Unable to retrieve Amazon Linux AMI id.\n{0}".format(e.response.get("Error").get("Message")))
return alinux_ami_id
def list_ec2_instance_types():
"""Return a list of all the instance types available on EC2, independent by the region."""
return boto3.client("ec2").meta.service_model.shape_for("InstanceType").enum
def get_master_server_id(stack_name):
"""Return the physical id of the master server, or [] if no master server."""
try:
resources = boto3.client("cloudformation").describe_stack_resource(
StackName=stack_name, LogicalResourceId="MasterServer"
)
return resources.get("StackResourceDetail").get("PhysicalResourceId")
except ClientError as e:
error(e.response.get("Error").get("Message"))
def _get_master_server_ip(stack_name):
"""
Get the IP Address of the MasterServer.
:param stack_name: The name of the cloudformation stack
:param config: Config object
:return private/public ip address
"""
ec2 = boto3.client("ec2")
master_id = get_master_server_id(stack_name)
if not master_id:
error("MasterServer not running. Can't SSH")
instance = ec2.describe_instances(InstanceIds=[master_id]).get("Reservations")[0].get("Instances")[0]
ip_address = instance.get("PublicIpAddress")
if ip_address is None:
ip_address = instance.get("PrivateIpAddress")
state = instance.get("State").get("Name")
if state != "running" or ip_address is None:
error("MasterServer: %s\nCannot get ip address.", state.upper())
return ip_address
| 35.714592 | 119 | 0.672715 |
ed75ef3dbcd90991f3b2e3a5c73442983622bbb5
| 452 |
py
|
Python
|
thinkutils_plus/eventbus/sample/myeventbus.py
|
ThinkmanWang/thinkutils_plus
|
65d56a1a0cfce22dff08a4f0baea6b4eb08a2e35
|
[
"MIT"
] | null | null | null |
thinkutils_plus/eventbus/sample/myeventbus.py
|
ThinkmanWang/thinkutils_plus
|
65d56a1a0cfce22dff08a4f0baea6b4eb08a2e35
|
[
"MIT"
] | null | null | null |
thinkutils_plus/eventbus/sample/myeventbus.py
|
ThinkmanWang/thinkutils_plus
|
65d56a1a0cfce22dff08a4f0baea6b4eb08a2e35
|
[
"MIT"
] | null | null | null |
__author__ = 'Xsank'
import time
from thinkutils_plus.eventbus.eventbus import EventBus
from myevent import GreetEvent
from myevent import ByeEvent
from mylistener import MyListener
if __name__=="__main__":
eventbus=EventBus()
eventbus.register(MyListener())
ge=GreetEvent('world')
be=ByeEvent('world')
eventbus.async_post(be)
eventbus.post(ge)
time.sleep(0.1)
eventbus.unregister(MyListener())
eventbus.destroy()
| 23.789474 | 54 | 0.743363 |
ed7707a9a93d2eb459c06d85459c2db5718ad3cc
| 3,963 |
py
|
Python
|
tools/telemetry/telemetry/core/platform/android_device_unittest.py
|
kjthegod/chromium
|
cf940f7f418436b77e15b1ea23e6fa100ca1c91a
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1 |
2019-11-28T10:46:52.000Z
|
2019-11-28T10:46:52.000Z
|
tools/telemetry/telemetry/core/platform/android_device_unittest.py
|
kjthegod/chromium
|
cf940f7f418436b77e15b1ea23e6fa100ca1c91a
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
tools/telemetry/telemetry/core/platform/android_device_unittest.py
|
kjthegod/chromium
|
cf940f7f418436b77e15b1ea23e6fa100ca1c91a
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 2 |
2015-03-27T11:15:39.000Z
|
2016-08-17T14:19:56.000Z
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from telemetry import benchmark
from telemetry.core import browser_options
from telemetry.core.platform import android_device
from telemetry.core.platform import android_platform_backend
from telemetry.unittest_util import system_stub
| 39.63 | 78 | 0.7459 |
ed7911d27c0fa532add30880dc5c7b6aaf924408
| 1,265 |
py
|
Python
|
logger.py
|
bekaaa/xgboost_tuner
|
2d93f6cc751b3a8778420a88caf73fd1dc8ef2ce
|
[
"MIT"
] | null | null | null |
logger.py
|
bekaaa/xgboost_tuner
|
2d93f6cc751b3a8778420a88caf73fd1dc8ef2ce
|
[
"MIT"
] | null | null | null |
logger.py
|
bekaaa/xgboost_tuner
|
2d93f6cc751b3a8778420a88caf73fd1dc8ef2ce
|
[
"MIT"
] | 1 |
2019-03-16T14:30:07.000Z
|
2019-03-16T14:30:07.000Z
|
#! /usr/bin/env python
import logging
#---------------------------------------
| 31.625 | 93 | 0.622925 |
ed79b8872d0353b944045d77a3b550a09342bdbf
| 5,536 |
py
|
Python
|
baselines/prep_baseline.py
|
lessleslie/slm-code-generation
|
017ac0828faf3467e9f85883e27be09ec3898b14
|
[
"MIT"
] | 64 |
2020-06-23T06:27:42.000Z
|
2022-03-30T07:44:52.000Z
|
baselines/prep_baseline.py
|
lessleslie/slm-code-generation
|
017ac0828faf3467e9f85883e27be09ec3898b14
|
[
"MIT"
] | 11 |
2020-07-14T23:29:31.000Z
|
2021-09-17T15:17:49.000Z
|
baselines/prep_baseline.py
|
tech-srl/slm-code-generation
|
15fe4e1df82e49587f725577f870ca12dc42903a
|
[
"MIT"
] | 6 |
2020-07-09T08:42:04.000Z
|
2021-03-02T14:35:31.000Z
|
import json
import multiprocessing as mp
import re
from argparse import ArgumentParser
from enum import Enum, auto
import javalang
from functools import partial
PRED_TOKEN = 'PRED'
modifiers = ['public', 'private', 'protected', 'static']
target_type = TargetType.seq
RE_WORDS = re.compile(r'''
# Find words in a string. Order matters!
[A-Z]+(?=[A-Z][a-z]) | # All upper case before a capitalized word
[A-Z]?[a-z]+ | # Capitalized words / all lower case
[A-Z]+ | # All upper case
\d+ | # Numbers
_ |
\" |
.+
''', re.VERBOSE)
TREE_SPLIT = re.compile(r'([(),])')
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument("-trd", "--train_data", dest="train_data_path",
help="path to training data file", required=True)
parser.add_argument("-ted", "--test_data", dest="test_data_path",
help="path to test data file", required=True)
parser.add_argument("-vd", "--val_data", dest="val_data_path",
help="path to validation data file", required=True)
parser.add_argument("-o", "--output_name", dest="output_name",
help="output name - the base name for the created dataset", metavar="FILE", required=True,
default='data')
parser.add_argument("--target_type", dest="target_type", type=TargetType.from_string, choices=list(TargetType), required=True)
parser.add_argument("--max_targets", dest="max_targets", type=int, required=False, default=40)
parser.add_argument("--max_nodes", dest="max_nodes", type=int, required=False, default=None)
parser.add_argument('--local', action='store_true')
args = parser.parse_args()
train_data_path = args.train_data_path
test_data_path = args.test_data_path
val_data_path = args.val_data_path
for data_file_path, data_role in zip([train_data_path, test_data_path, val_data_path], ['train', 'test', 'val']):
process_file(file_path=data_file_path, data_file_role=data_role, dataset_name=args.output_name,
target_type=args.target_type, max_targets=args.max_targets, max_nodes=args.max_nodes)
| 40.408759 | 146 | 0.629516 |
ed79d1c413293473a8ee91b105de6df7218762d5
| 1,865 |
py
|
Python
|
var/spack/repos/builtin/packages/r-multicool/package.py
|
varioustoxins/spack
|
cab0e4cb240f34891a6d753f3393e512f9a99e9a
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
var/spack/repos/builtin/packages/r-multicool/package.py
|
varioustoxins/spack
|
cab0e4cb240f34891a6d753f3393e512f9a99e9a
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 6 |
2022-01-08T08:41:11.000Z
|
2022-03-14T19:28:07.000Z
|
var/spack/repos/builtin/packages/r-multicool/package.py
|
foeroyingur/spack
|
5300cbbb2e569190015c72d0970d25425ea38647
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
| 51.805556 | 97 | 0.759249 |
ed7af640d287226ceb10c1b2fceda155d15712f4
| 7,426 |
py
|
Python
|
updatetranslations.py
|
erincerys/ergo
|
0aeedcdcccb5348d8eedb5faa6a0536d93ca3ae3
|
[
"MIT"
] | 1,122 |
2017-06-15T05:44:52.000Z
|
2021-05-26T16:27:43.000Z
|
updatetranslations.py
|
erincerys/ergo
|
0aeedcdcccb5348d8eedb5faa6a0536d93ca3ae3
|
[
"MIT"
] | 1,031 |
2017-06-18T13:57:51.000Z
|
2021-05-26T19:51:37.000Z
|
updatetranslations.py
|
erincerys/ergo
|
0aeedcdcccb5348d8eedb5faa6a0536d93ca3ae3
|
[
"MIT"
] | 113 |
2017-06-21T18:32:53.000Z
|
2021-05-26T13:12:46.000Z
|
#!/usr/bin/env python3
# updatetranslations.py
#
# tl;dr this script updates our translation file with the newest, coolest strings we've added!
# it manually searches the source code, extracts strings and then updates the language files.
# Written in 2018 by Daniel Oaks <[email protected]>
#
# To the extent possible under law, the author(s) have dedicated all copyright
# and related and neighboring rights to this software to the public domain
# worldwide. This software is distributed without any warranty.
#
# You should have received a copy of the CC0 Public Domain Dedication along
# with this software. If not, see
# <http://creativecommons.org/publicdomain/zero/1.0/>.
"""updatetranslations.py
Usage:
updatetranslations.py run <irc-dir> <languages-dir>
updatetranslations.py --version
updatetranslations.py (-h | --help)
Options:
<irc-dir> Oragono's irc subdirectory where the Go code is kept.
<languages-dir> Languages directory."""
import os
import re
import json
from docopt import docopt
import yaml
ignored_strings = [
'none', 'saset'
]
if __name__ == '__main__':
arguments = docopt(__doc__, version="0.1.0")
if arguments['run']:
# general IRC strings
irc_strings = []
for subdir, dirs, files in os.walk(arguments['<irc-dir>']):
for fname in files:
filepath = subdir + os.sep + fname
if filepath.endswith('.go'):
content = open(filepath, 'r', encoding='UTF-8').read()
matches = re.findall(r'\.t\("((?:[^"]|\\")+)"\)', content)
for match in matches:
if match not in irc_strings:
irc_strings.append(match)
matches = re.findall(r'\.t\(\`([^\`]+)\`\)', content)
for match in matches:
if match not in irc_strings:
irc_strings.append(match)
for s in ignored_strings:
try:
irc_strings.remove(s)
except ValueError:
# ignore any that don't exist
...
print("irc strings:", len(irc_strings))
with open(os.path.join(arguments['<languages-dir>'], 'example', 'irc.lang.json'), 'w') as f:
f.write(json.dumps({k:k for k in irc_strings}, sort_keys=True, indent=2, separators=(',', ': ')))
f.write('\n')
for string in irc_strings:
if 1 < string.count('%s') + string.count('%d') + string.count('%f'):
print(' confirm:', string)
# help entries
help_strings = []
for subdir, dirs, files in os.walk(arguments['<irc-dir>']):
for fname in files:
filepath = subdir + os.sep + fname
if fname == 'help.go':
content = open(filepath, 'r', encoding='UTF-8').read()
matches = re.findall(r'\`([^\`]+)\`', content)
for match in matches:
if '\n' in match and match not in help_strings:
help_strings.append(match)
for s in ignored_strings:
try:
help_strings.remove(s)
except ValueError:
# ignore any that don't exist
...
print("help strings:", len(help_strings))
with open(os.path.join(arguments['<languages-dir>'], 'example', 'help.lang.json'), 'w') as f:
f.write(json.dumps({k:k for k in help_strings}, sort_keys=True, indent=2, separators=(',', ': ')))
f.write('\n')
for string in help_strings:
if 1 < string.count('%s') + string.count('%d') + string.count('%f'):
print(' confirm:', string.split('\n')[0])
# nickserv help entries
help_strings = []
for subdir, dirs, files in os.walk(arguments['<irc-dir>']):
for fname in files:
filepath = subdir + os.sep + fname
if fname == 'nickserv.go':
content = open(filepath, 'r', encoding='UTF-8').read()
matches = re.findall(r'\`([^\`]+)\`', content)
for match in matches:
if match not in help_strings:
help_strings.append(match)
for s in ignored_strings:
try:
help_strings.remove(s)
except ValueError:
# ignore any that don't exist
...
print("nickserv help strings:", len(help_strings))
with open(os.path.join(arguments['<languages-dir>'], 'example', 'nickserv.lang.json'), 'w') as f:
f.write(json.dumps({k:k for k in help_strings}, sort_keys=True, indent=2, separators=(',', ': ')))
f.write('\n')
for string in help_strings:
if 1 < string.count('%s') + string.count('%d') + string.count('%f'):
print(' confirm:', string)
# chanserv help entries
help_strings = []
for subdir, dirs, files in os.walk(arguments['<irc-dir>']):
for fname in files:
filepath = subdir + os.sep + fname
if fname == 'chanserv.go':
content = open(filepath, 'r', encoding='UTF-8').read()
matches = re.findall(r'\`([^\`]+)\`', content)
for match in matches:
if match not in help_strings:
help_strings.append(match)
for s in ignored_strings:
try:
help_strings.remove(s)
except ValueError:
# ignore any that don't exist
...
print("chanserv help strings:", len(help_strings))
with open(os.path.join(arguments['<languages-dir>'], 'example', 'chanserv.lang.json'), 'w') as f:
f.write(json.dumps({k:k for k in help_strings}, sort_keys=True, indent=2, separators=(',', ': ')))
f.write('\n')
for string in help_strings:
if 1 < string.count('%s') + string.count('%d') + string.count('%f'):
print(' confirm:', string)
# hostserv help entries
help_strings = []
for subdir, dirs, files in os.walk(arguments['<irc-dir>']):
for fname in files:
filepath = subdir + os.sep + fname
if fname == 'hostserv.go':
content = open(filepath, 'r', encoding='UTF-8').read()
matches = re.findall(r'\`([^\`]+)\`', content)
for match in matches:
if match not in help_strings:
help_strings.append(match)
for s in ignored_strings:
try:
help_strings.remove(s)
except ValueError:
# ignore any that don't exist
...
print("hostserv help strings:", len(help_strings))
with open(os.path.join(arguments['<languages-dir>'], 'example', 'hostserv.lang.json'), 'w') as f:
f.write(json.dumps({k:k for k in help_strings}, sort_keys=True, indent=2, separators=(',', ': ')))
f.write('\n')
for string in help_strings:
if 1 < string.count('%s') + string.count('%d') + string.count('%f'):
print(' confirm:', string)
| 37.887755 | 110 | 0.524239 |
ed7b8022569fdf95c3598fcd38e2d1c4182f053f
| 1,437 |
py
|
Python
|
processing_tools/number_of_tenants.py
|
apanda/modeling
|
e032abd413bb3325ad6e5995abadeef74314f383
|
[
"BSD-3-Clause"
] | 3 |
2017-08-30T05:24:11.000Z
|
2021-02-25T12:17:19.000Z
|
processing_tools/number_of_tenants.py
|
apanda/modeling
|
e032abd413bb3325ad6e5995abadeef74314f383
|
[
"BSD-3-Clause"
] | null | null | null |
processing_tools/number_of_tenants.py
|
apanda/modeling
|
e032abd413bb3325ad6e5995abadeef74314f383
|
[
"BSD-3-Clause"
] | 2 |
2017-11-15T07:00:48.000Z
|
2020-12-13T17:29:03.000Z
|
import sys
from collections import defaultdict
if __name__ == "__main__":
Process(sys.argv[1:])
| 35.04878 | 68 | 0.526792 |
ed7d1c9bb5710045f4cb95dccf219d3b5c6faaa9
| 2,564 |
py
|
Python
|
pyfisher/mpi.py
|
borisbolliet/pyfisher
|
715e192baa4fadbff754416d2b001c3708c9276c
|
[
"BSD-3-Clause"
] | 7 |
2017-12-06T18:16:13.000Z
|
2021-02-09T19:25:26.000Z
|
pyfisher/mpi.py
|
borisbolliet/pyfisher
|
715e192baa4fadbff754416d2b001c3708c9276c
|
[
"BSD-3-Clause"
] | 34 |
2016-01-25T19:48:07.000Z
|
2021-02-03T22:34:09.000Z
|
pyfisher/mpi.py
|
borisbolliet/pyfisher
|
715e192baa4fadbff754416d2b001c3708c9276c
|
[
"BSD-3-Clause"
] | 10 |
2017-02-01T15:14:22.000Z
|
2021-02-16T01:34:16.000Z
|
from __future__ import print_function
import numpy as np
import os,sys,time
"""
Copied from orphics.mpi
"""
try:
disable_mpi_env = os.environ['DISABLE_MPI']
disable_mpi = True if disable_mpi_env.lower().strip() == "true" else False
except:
disable_mpi = False
"""
Use the below cleanup stuff only for intel-mpi!
If you use it on openmpi, you will have no traceback for errors
causing hours of endless confusion and frustration! - Sincerely, past frustrated Mat
"""
# From Sigurd's enlib.mpi:
# Uncaught exceptions don't cause mpi to abort. This can lead to thousands of
# wasted CPU hours
# def cleanup(type, value, traceback):
# sys.__excepthook__(type, value, traceback)
# MPI.COMM_WORLD.Abort(1)
# sys.excepthook = cleanup
try:
if disable_mpi: raise
from mpi4py import MPI
except:
if not(disable_mpi): print("WARNING: mpi4py could not be loaded. Falling back to fake MPI. This means that if you submitted multiple processes, they will all be assigned the same rank of 0, and they are potentially doing the same thing.")
MPI = template()
MPI.COMM_WORLD = fakeMpiComm()
| 29.813953 | 242 | 0.697738 |
ed7d572858561992a56ab8312f08925dad1d2745
| 6,260 |
py
|
Python
|
ebay.py
|
SpironoZeppeli/Magic-The-Scannening
|
93c595a4c98fb725a79eeddfaba99cb0409d41fb
|
[
"MIT"
] | null | null | null |
ebay.py
|
SpironoZeppeli/Magic-The-Scannening
|
93c595a4c98fb725a79eeddfaba99cb0409d41fb
|
[
"MIT"
] | null | null | null |
ebay.py
|
SpironoZeppeli/Magic-The-Scannening
|
93c595a4c98fb725a79eeddfaba99cb0409d41fb
|
[
"MIT"
] | null | null | null |
import requests
import urllib.request
import urllib.parse
import PIL
import re
import configparser
import json
from PIL import Image
from ebaysdk.trading import Connection as Trading
from ebaysdk.exception import ConnectionError
from yaml import load
from PyQt5.QtWidgets import QMessageBox
| 42.013423 | 119 | 0.546486 |
ed7de0c98f16f1e656d840a2c9ad1e60a28cfa7f
| 3,175 |
py
|
Python
|
bot/exts/github/github.py
|
v1nam/gurkbot
|
a0f5e05a5f65e6169accc90271fca58f4df211fe
|
[
"MIT"
] | 24 |
2020-12-18T07:26:14.000Z
|
2022-03-30T22:56:49.000Z
|
bot/exts/github/github.py
|
v1nam/gurkbot
|
a0f5e05a5f65e6169accc90271fca58f4df211fe
|
[
"MIT"
] | 143 |
2020-12-18T09:13:51.000Z
|
2022-03-02T19:27:44.000Z
|
bot/exts/github/github.py
|
v1nam/gurkbot
|
a0f5e05a5f65e6169accc90271fca58f4df211fe
|
[
"MIT"
] | 44 |
2020-12-18T09:05:29.000Z
|
2022-03-02T20:06:23.000Z
|
import typing
from bot.constants import BOT_REPO_URL
from discord import Embed
from discord.ext import commands
from discord.ext.commands.cooldowns import BucketType
from . import _issues, _profile, _source
def setup(bot: commands.Bot) -> None:
"""Load the Github cog."""
bot.add_cog(Github(bot))
| 34.89011 | 88 | 0.640945 |
ed7de7f5235ff8dd0c5f7e122b59415ab3622dc6
| 1,154 |
py
|
Python
|
log/slack_sender.py
|
SmashKs/BarBarian
|
b308dcb9e24ec621abbbc121847923e14e5b6a4b
|
[
"MIT"
] | null | null | null |
log/slack_sender.py
|
SmashKs/BarBarian
|
b308dcb9e24ec621abbbc121847923e14e5b6a4b
|
[
"MIT"
] | 2 |
2020-06-05T19:25:24.000Z
|
2021-06-10T20:56:57.000Z
|
log/slack_sender.py
|
SmashKs/BarBarian
|
b308dcb9e24ec621abbbc121847923e14e5b6a4b
|
[
"MIT"
] | null | null | null |
from slackclient import SlackClient
from external import SLACK_API_KEY
if __name__ == '__main__':
SlackBot().send_msg_to('hello world!!')
| 32.971429 | 70 | 0.559792 |
ed7e4d3da4d7bdad5eca61e8c5160dfe0d14608f
| 2,379 |
py
|
Python
|
src/pytezos/block/forge.py
|
miracle2k/pytezos
|
e6b99f00f342d9a05b0c36a9883040961fd6d58e
|
[
"MIT"
] | 98 |
2019-02-07T16:33:38.000Z
|
2022-03-31T15:53:41.000Z
|
src/pytezos/block/forge.py
|
miracle2k/pytezos
|
e6b99f00f342d9a05b0c36a9883040961fd6d58e
|
[
"MIT"
] | 152 |
2019-05-20T16:38:56.000Z
|
2022-03-30T14:24:38.000Z
|
src/pytezos/block/forge.py
|
miracle2k/pytezos
|
e6b99f00f342d9a05b0c36a9883040961fd6d58e
|
[
"MIT"
] | 34 |
2019-07-25T12:03:51.000Z
|
2021-11-11T22:23:38.000Z
|
from typing import Any, Dict, List, Tuple
from pytezos.michelson.forge import forge_array, forge_base58, optimize_timestamp
| 33.985714 | 87 | 0.666246 |
ed7f467835f32242a9650f226b4a5ad9d6d87af4
| 5,321 |
py
|
Python
|
python/paddle/fluid/tests/unittests/test_roi_pool_op.py
|
jichangjichang/Paddle
|
4fa3cee5499c6df0ad6043b0cfa220d09f2034e8
|
[
"Apache-2.0"
] | 9 |
2017-12-04T02:58:01.000Z
|
2020-12-03T14:46:30.000Z
|
python/paddle/fluid/tests/unittests/test_roi_pool_op.py
|
jichangjichang/Paddle
|
4fa3cee5499c6df0ad6043b0cfa220d09f2034e8
|
[
"Apache-2.0"
] | 7 |
2017-12-05T20:29:08.000Z
|
2018-10-15T08:57:40.000Z
|
python/paddle/fluid/tests/unittests/test_roi_pool_op.py
|
jichangjichang/Paddle
|
4fa3cee5499c6df0ad6043b0cfa220d09f2034e8
|
[
"Apache-2.0"
] | 6 |
2018-03-19T22:38:46.000Z
|
2019-11-01T22:28:27.000Z
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
import math
import sys
import paddle.compat as cpt
from op_test import OpTest
if __name__ == '__main__':
unittest.main()
| 37.20979 | 80 | 0.543131 |
ed7fc7d6fb252e1b75bf1b904b18ffd861a8c42f
| 1,162 |
py
|
Python
|
testproject/testapp/tests/__init__.py
|
movermeyer/django-firestone
|
e045089f6ff4a6686633f9c5909c314a010bd4a0
|
[
"WTFPL"
] | 1 |
2017-03-08T22:58:35.000Z
|
2017-03-08T22:58:35.000Z
|
testproject/testapp/tests/__init__.py
|
movermeyer/django-firestone
|
e045089f6ff4a6686633f9c5909c314a010bd4a0
|
[
"WTFPL"
] | null | null | null |
testproject/testapp/tests/__init__.py
|
movermeyer/django-firestone
|
e045089f6ff4a6686633f9c5909c314a010bd4a0
|
[
"WTFPL"
] | 1 |
2018-03-05T17:40:55.000Z
|
2018-03-05T17:40:55.000Z
|
from test_proxy import *
from test_serializers import *
from test_deserializers import *
from test_exceptions import *
from test_authentication import *
from test_whole_flow import *
from test_handlers_metaclass_magic import *
from test_handlers_serialize_to_python import *
from test_handlers_is_method_allowed import *
from test_handlers_data_control import *
from test_handlers_package import *
from test_handlers_finalize_pending import *
from test_handlers_cleanse_body import *
from test_handlers_validate import *
from test_handlers_clean_models import *
from test_handlers_get import *
from test_handlers_is_catastrophic import *
from test_handlers_post import *
from test_handlers_put import *
from test_handlers_delete import *
from test_handlers_patch_response import *
from test_handlers_authentication_hook import *
from test_handlers_filter_data import *
from test_handlers_order import *
from test_handlers_order_data import *
from test_handlers_paginate import *
from test_handlers_paginate_data import *
from test_handlers_inject_data_hook import *
from test_handlers_handle_exception import *
from test_handlers_deserialize_body import *
| 31.405405 | 47 | 0.865749 |
ed801190784fa836d2752af1e6b10b54a93fa361
| 2,518 |
py
|
Python
|
Day20.py
|
SheepiCagio/Advent-of-Code-2021
|
52f0035da2cb258810d8947cbf56b51b65a9fe8b
|
[
"MIT"
] | null | null | null |
Day20.py
|
SheepiCagio/Advent-of-Code-2021
|
52f0035da2cb258810d8947cbf56b51b65a9fe8b
|
[
"MIT"
] | null | null | null |
Day20.py
|
SheepiCagio/Advent-of-Code-2021
|
52f0035da2cb258810d8947cbf56b51b65a9fe8b
|
[
"MIT"
] | null | null | null |
import numpy as np
raw = open("inputs/20.txt","r").readlines()
input_array= [(i.replace('\n', '').replace('.','0').replace('#', '1')) for i in raw]
test_raw = open("inputs/20_test.txt","r").readlines()
test_array= [(i.replace('\n', '').replace('.','0').replace('#', '1')) for i in test_raw]
#pictureEnhancer(test_array,2)
#pictureEnhancer(input_array,2)
pictureEnhancer(test_array,50)
pictureEnhancer(input_array,50)
| 35.464789 | 110 | 0.635822 |
ed8134024179e7e4607f23c5ef95e9da1da3820b
| 1,674 |
py
|
Python
|
questions/53349623/main.py
|
sesu089/stackoverflow
|
6fae69be6fa74fba9d554e6b5f387e5d3c1aad73
|
[
"MIT"
] | 302 |
2017-03-04T00:05:23.000Z
|
2022-03-28T22:51:29.000Z
|
questions/53349623/main.py
|
sesu089/stackoverflow
|
6fae69be6fa74fba9d554e6b5f387e5d3c1aad73
|
[
"MIT"
] | 30 |
2017-12-02T19:26:43.000Z
|
2022-03-28T07:40:36.000Z
|
questions/53349623/main.py
|
sesu089/stackoverflow
|
6fae69be6fa74fba9d554e6b5f387e5d3c1aad73
|
[
"MIT"
] | 388 |
2017-07-04T16:53:12.000Z
|
2022-03-18T22:20:19.000Z
|
import sys
from PyQt5 import QtCore, QtGui, QtWidgets
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
demo = Demo()
demo.show()
sys.exit(app.exec_())
| 32.823529 | 84 | 0.642772 |
ed81492b65a1f232ede7d038b4670a415f3f191c
| 1,638 |
py
|
Python
|
tests/test_error_descriptions_from_raises.py
|
iterait/apistrap
|
e83460fa97f13a95a928971b0d2defe0ac611911
|
[
"MIT"
] | 6 |
2018-09-06T18:32:48.000Z
|
2021-05-28T01:03:32.000Z
|
tests/test_error_descriptions_from_raises.py
|
iterait/apistrap
|
e83460fa97f13a95a928971b0d2defe0ac611911
|
[
"MIT"
] | 53 |
2018-09-06T16:16:53.000Z
|
2021-05-19T14:36:58.000Z
|
tests/test_error_descriptions_from_raises.py
|
iterait/apistrap
|
e83460fa97f13a95a928971b0d2defe0ac611911
|
[
"MIT"
] | null | null | null |
import pytest
from apistrap.flask import FlaskApistrap
from apistrap.schemas import ErrorResponse
def test_error_descriptions_from_raises(app_with_raises, client):
response = client.get("/spec.json")
assert response.json["paths"]["/"]["get"]["responses"] == {
"500": {
"description": "KeyError description",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
}
}
def test_http_code_from_handler(app_with_raises_and_handler, client):
response = client.get("/spec.json")
assert response.json["paths"]["/"]["get"]["responses"] == {
"515": {
"description": "KeyError description",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
}
}
| 23.4 | 69 | 0.527473 |
ed82dc9fed173aeada3cbab76076165a4c9b3932
| 1,126 |
py
|
Python
|
projects/api/UsersApi.py
|
chamathshashika/projects-python-wrappers
|
33e9f6bccba16a581b115c582033a93d43bb159c
|
[
"MIT"
] | null | null | null |
projects/api/UsersApi.py
|
chamathshashika/projects-python-wrappers
|
33e9f6bccba16a581b115c582033a93d43bb159c
|
[
"MIT"
] | null | null | null |
projects/api/UsersApi.py
|
chamathshashika/projects-python-wrappers
|
33e9f6bccba16a581b115c582033a93d43bb159c
|
[
"MIT"
] | null | null | null |
#$Id$
from projects.util.ZohoHttpClient import ZohoHttpClient
from projects.api.Api import Api
from projects.parser.UsersParser import UsersParser
base_url = Api().base_url
zoho_http_client = ZohoHttpClient()
parser = UsersParser()
| 25.590909 | 101 | 0.617229 |
ed82e608ff9e5d51a3d3e7cab08afa27210afbdb
| 11,340 |
py
|
Python
|
useless/tuck_arms.py
|
leader1313/Baxter_teleoperation_system
|
856d999acd73e6c1dc15a342cb6c4fcd1a482863
|
[
"Apache-2.0"
] | null | null | null |
useless/tuck_arms.py
|
leader1313/Baxter_teleoperation_system
|
856d999acd73e6c1dc15a342cb6c4fcd1a482863
|
[
"Apache-2.0"
] | 2 |
2019-10-15T07:24:24.000Z
|
2019-10-15T07:28:19.000Z
|
useless/tuck_arms.py
|
leader1313/Baxter_teleoperation_system
|
856d999acd73e6c1dc15a342cb6c4fcd1a482863
|
[
"Apache-2.0"
] | 1 |
2020-09-15T12:37:13.000Z
|
2020-09-15T12:37:13.000Z
|
#!/usr/bin/env python
# Copyright (c) 2013-2015, Rethink Robotics
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the Rethink Robotics nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
Tool to tuck/untuck Baxter's arms to/from the shipping pose
"""
import argparse
from copy import deepcopy
import rospy
from std_msgs.msg import (
Empty,
Bool,
)
import baxter_interface
from baxter_core_msgs.msg import (
CollisionAvoidanceState,
)
from baxter_interface import CHECK_VERSION
if __name__ == "__main__":
main()
| 42.47191 | 79 | 0.568078 |
ed8397333a807416a6c46033a45a38d2847fa564
| 203 |
py
|
Python
|
django-system/src/tsm_api/serializers.py
|
Deepak-Kharah/ioe-project
|
6f83ddcfced25130e0f05c3380dde97429d1f224
|
[
"MIT"
] | null | null | null |
django-system/src/tsm_api/serializers.py
|
Deepak-Kharah/ioe-project
|
6f83ddcfced25130e0f05c3380dde97429d1f224
|
[
"MIT"
] | null | null | null |
django-system/src/tsm_api/serializers.py
|
Deepak-Kharah/ioe-project
|
6f83ddcfced25130e0f05c3380dde97429d1f224
|
[
"MIT"
] | null | null | null |
from rest_framework import serializers
from .models import Measurement
| 20.3 | 57 | 0.748768 |
ed83e8c908ff960c5bf16835dd114bff6b5f51a1
| 123 |
py
|
Python
|
src/GalaxyDynamicsFromVc/units.py
|
pabferde/galaxy_dynamics_from_Vc
|
7232f7bfd6d2338bcec3bbf87478f7a482c749ef
|
[
"MIT"
] | null | null | null |
src/GalaxyDynamicsFromVc/units.py
|
pabferde/galaxy_dynamics_from_Vc
|
7232f7bfd6d2338bcec3bbf87478f7a482c749ef
|
[
"MIT"
] | null | null | null |
src/GalaxyDynamicsFromVc/units.py
|
pabferde/galaxy_dynamics_from_Vc
|
7232f7bfd6d2338bcec3bbf87478f7a482c749ef
|
[
"MIT"
] | null | null | null |
_Msun_kpc3_to_GeV_cm3_factor = 0.3/8.0e6
| 17.571429 | 45 | 0.821138 |
ed84e43c822be0079982b64c82f1405fcf82f103
| 1,114 |
py
|
Python
|
poll/models/telemetry_models.py
|
mirokrastev/poll-website
|
4f26cce3f838ab05de91f0e1dba34d9bc59927b4
|
[
"MIT"
] | 3 |
2021-07-03T19:05:56.000Z
|
2022-02-02T17:22:17.000Z
|
poll/models/telemetry_models.py
|
mirokrastev/poll-website
|
4f26cce3f838ab05de91f0e1dba34d9bc59927b4
|
[
"MIT"
] | null | null | null |
poll/models/telemetry_models.py
|
mirokrastev/poll-website
|
4f26cce3f838ab05de91f0e1dba34d9bc59927b4
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.contrib.auth import get_user_model
from poll.models.poll_models import Poll
| 27.85 | 91 | 0.72711 |
ed84fba26398a78d3cfbc33d60c7ec3256e7da8a
| 208 |
py
|
Python
|
pcdet/models/backbones_2d/__init__.py
|
HenryLittle/OpenPCDet-HL
|
7dba01750e10d170849314723ec0665782236a70
|
[
"Apache-2.0"
] | null | null | null |
pcdet/models/backbones_2d/__init__.py
|
HenryLittle/OpenPCDet-HL
|
7dba01750e10d170849314723ec0665782236a70
|
[
"Apache-2.0"
] | null | null | null |
pcdet/models/backbones_2d/__init__.py
|
HenryLittle/OpenPCDet-HL
|
7dba01750e10d170849314723ec0665782236a70
|
[
"Apache-2.0"
] | null | null | null |
from .base_bev_backbone import BaseBEVBackbone
from .decouple_bev_backbone import DecoupledBEVBackbone
__all__ = {
'BaseBEVBackbone': BaseBEVBackbone,
'DecoupledBEVBackbone': DecoupledBEVBackbone,
}
| 26 | 55 | 0.817308 |
ed86102b88fe53e5292e7840680746dc239293e9
| 4,883 |
py
|
Python
|
test/unit/app/tools/test_select_parameters.py
|
beatrizserrano/galaxy
|
e149d9d32e1bca6c07c38b1a9cdabfee60323610
|
[
"CC-BY-3.0"
] | null | null | null |
test/unit/app/tools/test_select_parameters.py
|
beatrizserrano/galaxy
|
e149d9d32e1bca6c07c38b1a9cdabfee60323610
|
[
"CC-BY-3.0"
] | 6 |
2021-11-11T20:57:49.000Z
|
2021-12-10T15:30:33.000Z
|
test/unit/app/tools/test_select_parameters.py
|
beatrizserrano/galaxy
|
e149d9d32e1bca6c07c38b1a9cdabfee60323610
|
[
"CC-BY-3.0"
] | null | null | null |
from unittest.mock import Mock
import pytest
from galaxy import model
from galaxy.tools.parameters import basic
from .util import BaseParameterTestCase
| 45.635514 | 137 | 0.643662 |
ed874fc7d5498d0b2a5744718551a9a50f16e262
| 141 |
py
|
Python
|
recumpiler/__init__.py
|
Toasterstein/recumpiler
|
390957cfaa8f60ffeb24adb43b91981dd445c6b9
|
[
"MIT"
] | null | null | null |
recumpiler/__init__.py
|
Toasterstein/recumpiler
|
390957cfaa8f60ffeb24adb43b91981dd445c6b9
|
[
"MIT"
] | 8 |
2021-03-23T21:54:29.000Z
|
2021-03-30T23:17:06.000Z
|
recumpiler/__init__.py
|
nklapste/recumpiler
|
80efcfad995211c010f4d962221ab576ca50229f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""recumpiler
Recompile text to be semi-readable memey garbage.
"""
__version__ = (0, 0, 0)
| 14.1 | 49 | 0.638298 |
ed87f843694726a8943e8ca3120951277107fd9a
| 108,939 |
gyp
|
Python
|
net/net.gyp
|
codenote/chromium-test
|
0637af0080f7e80bf7d20b29ce94c5edc817f390
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
net/net.gyp
|
codenote/chromium-test
|
0637af0080f7e80bf7d20b29ce94c5edc817f390
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
net/net.gyp
|
codenote/chromium-test
|
0637af0080f7e80bf7d20b29ce94c5edc817f390
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1 |
2020-11-04T07:25:45.000Z
|
2020-11-04T07:25:45.000Z
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
'linux_link_kerberos%': 0,
'conditions': [
['chromeos==1 or OS=="android" or OS=="ios"', {
# Disable Kerberos on ChromeOS, Android and iOS, at least for now.
# It needs configuration (krb5.conf and so on).
'use_kerberos%': 0,
}, { # chromeos == 0
'use_kerberos%': 1,
}],
['OS=="android" and target_arch != "ia32"', {
# The way the cache uses mmap() is inefficient on some Android devices.
# If this flag is set, we hackily avoid using mmap() in the disk cache.
# We are pretty confident that mmap-ing the index would not hurt any
# existing x86 android devices, but we cannot be so sure about the
# variety of ARM devices. So enable it for x86 only for now.
'posix_avoid_mmap%': 1,
}, {
'posix_avoid_mmap%': 0,
}],
['OS=="ios"', {
# Websockets and socket stream are not used on iOS.
'enable_websockets%': 0,
# iOS does not use V8.
'use_v8_in_net%': 0,
'enable_built_in_dns%': 0,
}, {
'enable_websockets%': 1,
'use_v8_in_net%': 1,
'enable_built_in_dns%': 1,
}],
],
},
'includes': [
'../build/win_precompile.gypi',
],
'targets': [
{
'target_name': 'net',
'type': '<(component)',
'variables': { 'enable_wexit_time_destructors': 1, },
'dependencies': [
'../base/base.gyp:base',
'../base/base.gyp:base_i18n',
'../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
'../build/temp_gyp/googleurl.gyp:googleurl',
'../crypto/crypto.gyp:crypto',
'../sdch/sdch.gyp:sdch',
'../third_party/icu/icu.gyp:icui18n',
'../third_party/icu/icu.gyp:icuuc',
'../third_party/zlib/zlib.gyp:zlib',
'net_resources',
],
'sources': [
'android/cert_verify_result_android.h',
'android/cert_verify_result_android_list.h',
'android/gurl_utils.cc',
'android/gurl_utils.h',
'android/keystore.cc',
'android/keystore.h',
'android/keystore_openssl.cc',
'android/keystore_openssl.h',
'android/net_jni_registrar.cc',
'android/net_jni_registrar.h',
'android/network_change_notifier_android.cc',
'android/network_change_notifier_android.h',
'android/network_change_notifier_delegate_android.cc',
'android/network_change_notifier_delegate_android.h',
'android/network_change_notifier_factory_android.cc',
'android/network_change_notifier_factory_android.h',
'android/network_library.cc',
'android/network_library.h',
'base/address_family.h',
'base/address_list.cc',
'base/address_list.h',
'base/address_tracker_linux.cc',
'base/address_tracker_linux.h',
'base/auth.cc',
'base/auth.h',
'base/backoff_entry.cc',
'base/backoff_entry.h',
'base/bandwidth_metrics.cc',
'base/bandwidth_metrics.h',
'base/big_endian.cc',
'base/big_endian.h',
'base/cache_type.h',
'base/completion_callback.h',
'base/connection_type_histograms.cc',
'base/connection_type_histograms.h',
'base/crypto_module.h',
'base/crypto_module_nss.cc',
'base/crypto_module_openssl.cc',
'base/data_url.cc',
'base/data_url.h',
'base/directory_lister.cc',
'base/directory_lister.h',
'base/dns_reloader.cc',
'base/dns_reloader.h',
'base/dns_util.cc',
'base/dns_util.h',
'base/escape.cc',
'base/escape.h',
'base/expiring_cache.h',
'base/file_stream.cc',
'base/file_stream.h',
'base/file_stream_context.cc',
'base/file_stream_context.h',
'base/file_stream_context_posix.cc',
'base/file_stream_context_win.cc',
'base/file_stream_metrics.cc',
'base/file_stream_metrics.h',
'base/file_stream_metrics_posix.cc',
'base/file_stream_metrics_win.cc',
'base/file_stream_net_log_parameters.cc',
'base/file_stream_net_log_parameters.h',
'base/file_stream_whence.h',
'base/filter.cc',
'base/filter.h',
'base/int128.cc',
'base/int128.h',
'base/gzip_filter.cc',
'base/gzip_filter.h',
'base/gzip_header.cc',
'base/gzip_header.h',
'base/hash_value.cc',
'base/hash_value.h',
'base/host_mapping_rules.cc',
'base/host_mapping_rules.h',
'base/host_port_pair.cc',
'base/host_port_pair.h',
'base/io_buffer.cc',
'base/io_buffer.h',
'base/ip_endpoint.cc',
'base/ip_endpoint.h',
'base/keygen_handler.cc',
'base/keygen_handler.h',
'base/keygen_handler_mac.cc',
'base/keygen_handler_nss.cc',
'base/keygen_handler_openssl.cc',
'base/keygen_handler_win.cc',
'base/linked_hash_map.h',
'base/load_flags.h',
'base/load_flags_list.h',
'base/load_states.h',
'base/load_states_list.h',
'base/load_timing_info.cc',
'base/load_timing_info.h',
'base/mime_sniffer.cc',
'base/mime_sniffer.h',
'base/mime_util.cc',
'base/mime_util.h',
'base/net_error_list.h',
'base/net_errors.cc',
'base/net_errors.h',
'base/net_errors_posix.cc',
'base/net_errors_win.cc',
'base/net_export.h',
'base/net_log.cc',
'base/net_log.h',
'base/net_log_event_type_list.h',
'base/net_log_source_type_list.h',
'base/net_module.cc',
'base/net_module.h',
'base/net_util.cc',
'base/net_util.h',
'base/net_util_posix.cc',
'base/net_util_win.cc',
'base/network_change_notifier.cc',
'base/network_change_notifier.h',
'base/network_change_notifier_factory.h',
'base/network_change_notifier_linux.cc',
'base/network_change_notifier_linux.h',
'base/network_change_notifier_mac.cc',
'base/network_change_notifier_mac.h',
'base/network_change_notifier_win.cc',
'base/network_change_notifier_win.h',
'base/network_config_watcher_mac.cc',
'base/network_config_watcher_mac.h',
'base/network_delegate.cc',
'base/network_delegate.h',
'base/nss_memio.c',
'base/nss_memio.h',
'base/openssl_private_key_store.h',
'base/openssl_private_key_store_android.cc',
'base/openssl_private_key_store_memory.cc',
'base/platform_mime_util.h',
# TODO(tc): gnome-vfs? xdgmime? /etc/mime.types?
'base/platform_mime_util_linux.cc',
'base/platform_mime_util_mac.mm',
'base/platform_mime_util_win.cc',
'base/prioritized_dispatcher.cc',
'base/prioritized_dispatcher.h',
'base/priority_queue.h',
'base/rand_callback.h',
'base/registry_controlled_domains/registry_controlled_domain.cc',
'base/registry_controlled_domains/registry_controlled_domain.h',
'base/request_priority.h',
'base/sdch_filter.cc',
'base/sdch_filter.h',
'base/sdch_manager.cc',
'base/sdch_manager.h',
'base/static_cookie_policy.cc',
'base/static_cookie_policy.h',
'base/sys_addrinfo.h',
'base/test_data_stream.cc',
'base/test_data_stream.h',
'base/upload_bytes_element_reader.cc',
'base/upload_bytes_element_reader.h',
'base/upload_data.cc',
'base/upload_data.h',
'base/upload_data_stream.cc',
'base/upload_data_stream.h',
'base/upload_element.cc',
'base/upload_element.h',
'base/upload_element_reader.cc',
'base/upload_element_reader.h',
'base/upload_file_element_reader.cc',
'base/upload_file_element_reader.h',
'base/upload_progress.h',
'base/url_util.cc',
'base/url_util.h',
'base/winsock_init.cc',
'base/winsock_init.h',
'base/winsock_util.cc',
'base/winsock_util.h',
'base/zap.cc',
'base/zap.h',
'cert/asn1_util.cc',
'cert/asn1_util.h',
'cert/cert_database.cc',
'cert/cert_database.h',
'cert/cert_database_android.cc',
'cert/cert_database_ios.cc',
'cert/cert_database_mac.cc',
'cert/cert_database_nss.cc',
'cert/cert_database_openssl.cc',
'cert/cert_database_win.cc',
'cert/cert_status_flags.cc',
'cert/cert_status_flags.h',
'cert/cert_trust_anchor_provider.h',
'cert/cert_verifier.cc',
'cert/cert_verifier.h',
'cert/cert_verify_proc.cc',
'cert/cert_verify_proc.h',
'cert/cert_verify_proc_android.cc',
'cert/cert_verify_proc_android.h',
'cert/cert_verify_proc_mac.cc',
'cert/cert_verify_proc_mac.h',
'cert/cert_verify_proc_nss.cc',
'cert/cert_verify_proc_nss.h',
'cert/cert_verify_proc_openssl.cc',
'cert/cert_verify_proc_openssl.h',
'cert/cert_verify_proc_win.cc',
'cert/cert_verify_proc_win.h',
'cert/cert_verify_result.cc',
'cert/cert_verify_result.h',
'cert/crl_set.cc',
'cert/crl_set.h',
'cert/ev_root_ca_metadata.cc',
'cert/ev_root_ca_metadata.h',
'cert/multi_threaded_cert_verifier.cc',
'cert/multi_threaded_cert_verifier.h',
'cert/nss_cert_database.cc',
'cert/nss_cert_database.h',
'cert/pem_tokenizer.cc',
'cert/pem_tokenizer.h',
'cert/single_request_cert_verifier.cc',
'cert/single_request_cert_verifier.h',
'cert/test_root_certs.cc',
'cert/test_root_certs.h',
'cert/test_root_certs_mac.cc',
'cert/test_root_certs_nss.cc',
'cert/test_root_certs_openssl.cc',
'cert/test_root_certs_android.cc',
'cert/test_root_certs_win.cc',
'cert/x509_cert_types.cc',
'cert/x509_cert_types.h',
'cert/x509_cert_types_mac.cc',
'cert/x509_cert_types_win.cc',
'cert/x509_certificate.cc',
'cert/x509_certificate.h',
'cert/x509_certificate_ios.cc',
'cert/x509_certificate_mac.cc',
'cert/x509_certificate_net_log_param.cc',
'cert/x509_certificate_net_log_param.h',
'cert/x509_certificate_nss.cc',
'cert/x509_certificate_openssl.cc',
'cert/x509_certificate_win.cc',
'cert/x509_util.h',
'cert/x509_util.cc',
'cert/x509_util_ios.cc',
'cert/x509_util_ios.h',
'cert/x509_util_mac.cc',
'cert/x509_util_mac.h',
'cert/x509_util_nss.cc',
'cert/x509_util_nss.h',
'cert/x509_util_openssl.cc',
'cert/x509_util_openssl.h',
'cookies/canonical_cookie.cc',
'cookies/canonical_cookie.h',
'cookies/cookie_monster.cc',
'cookies/cookie_monster.h',
'cookies/cookie_options.h',
'cookies/cookie_store.cc',
'cookies/cookie_store.h',
'cookies/cookie_util.cc',
'cookies/cookie_util.h',
'cookies/parsed_cookie.cc',
'cookies/parsed_cookie.h',
'disk_cache/addr.cc',
'disk_cache/addr.h',
'disk_cache/backend_impl.cc',
'disk_cache/backend_impl.h',
'disk_cache/bitmap.cc',
'disk_cache/bitmap.h',
'disk_cache/block_files.cc',
'disk_cache/block_files.h',
'disk_cache/cache_creator.cc',
'disk_cache/cache_util.h',
'disk_cache/cache_util.cc',
'disk_cache/cache_util_posix.cc',
'disk_cache/cache_util_win.cc',
'disk_cache/disk_cache.h',
'disk_cache/disk_format.cc',
'disk_cache/disk_format.h',
'disk_cache/entry_impl.cc',
'disk_cache/entry_impl.h',
'disk_cache/errors.h',
'disk_cache/eviction.cc',
'disk_cache/eviction.h',
'disk_cache/experiments.h',
'disk_cache/file.cc',
'disk_cache/file.h',
'disk_cache/file_block.h',
'disk_cache/file_lock.cc',
'disk_cache/file_lock.h',
'disk_cache/file_posix.cc',
'disk_cache/file_win.cc',
'disk_cache/histogram_macros.h',
'disk_cache/in_flight_backend_io.cc',
'disk_cache/in_flight_backend_io.h',
'disk_cache/in_flight_io.cc',
'disk_cache/in_flight_io.h',
'disk_cache/mapped_file.h',
'disk_cache/mapped_file_posix.cc',
'disk_cache/mapped_file_avoid_mmap_posix.cc',
'disk_cache/mapped_file_win.cc',
'disk_cache/mem_backend_impl.cc',
'disk_cache/mem_backend_impl.h',
'disk_cache/mem_entry_impl.cc',
'disk_cache/mem_entry_impl.h',
'disk_cache/mem_rankings.cc',
'disk_cache/mem_rankings.h',
'disk_cache/net_log_parameters.cc',
'disk_cache/net_log_parameters.h',
'disk_cache/rankings.cc',
'disk_cache/rankings.h',
'disk_cache/sparse_control.cc',
'disk_cache/sparse_control.h',
'disk_cache/stats.cc',
'disk_cache/stats.h',
'disk_cache/stats_histogram.cc',
'disk_cache/stats_histogram.h',
'disk_cache/storage_block-inl.h',
'disk_cache/storage_block.h',
'disk_cache/stress_support.h',
'disk_cache/trace.cc',
'disk_cache/trace.h',
'disk_cache/simple/simple_backend_impl.cc',
'disk_cache/simple/simple_backend_impl.h',
'disk_cache/simple/simple_disk_format.cc',
'disk_cache/simple/simple_disk_format.h',
'disk_cache/simple/simple_entry_impl.cc',
'disk_cache/simple/simple_entry_impl.h',
'disk_cache/simple/simple_index.cc',
'disk_cache/simple/simple_index.h',
'disk_cache/simple/simple_synchronous_entry.cc',
'disk_cache/simple/simple_synchronous_entry.h',
'disk_cache/flash/flash_entry_impl.cc',
'disk_cache/flash/flash_entry_impl.h',
'disk_cache/flash/format.h',
'disk_cache/flash/internal_entry.cc',
'disk_cache/flash/internal_entry.h',
'disk_cache/flash/log_store.cc',
'disk_cache/flash/log_store.h',
'disk_cache/flash/log_store_entry.cc',
'disk_cache/flash/log_store_entry.h',
'disk_cache/flash/segment.cc',
'disk_cache/flash/segment.h',
'disk_cache/flash/storage.cc',
'disk_cache/flash/storage.h',
'dns/address_sorter.h',
'dns/address_sorter_posix.cc',
'dns/address_sorter_posix.h',
'dns/address_sorter_win.cc',
'dns/dns_client.cc',
'dns/dns_client.h',
'dns/dns_config_service.cc',
'dns/dns_config_service.h',
'dns/dns_config_service_posix.cc',
'dns/dns_config_service_posix.h',
'dns/dns_config_service_win.cc',
'dns/dns_config_service_win.h',
'dns/dns_hosts.cc',
'dns/dns_hosts.h',
'dns/dns_protocol.h',
'dns/dns_query.cc',
'dns/dns_query.h',
'dns/dns_response.cc',
'dns/dns_response.h',
'dns/dns_session.cc',
'dns/dns_session.h',
'dns/dns_socket_pool.cc',
'dns/dns_socket_pool.h',
'dns/dns_transaction.cc',
'dns/dns_transaction.h',
'dns/host_cache.cc',
'dns/host_cache.h',
'dns/host_resolver.cc',
'dns/host_resolver.h',
'dns/host_resolver_impl.cc',
'dns/host_resolver_impl.h',
'dns/host_resolver_proc.cc',
'dns/host_resolver_proc.h',
'dns/mapped_host_resolver.cc',
'dns/mapped_host_resolver.h',
'dns/notify_watcher_mac.cc',
'dns/notify_watcher_mac.h',
'dns/serial_worker.cc',
'dns/serial_worker.h',
'dns/single_request_host_resolver.cc',
'dns/single_request_host_resolver.h',
'ftp/ftp_auth_cache.cc',
'ftp/ftp_auth_cache.h',
'ftp/ftp_ctrl_response_buffer.cc',
'ftp/ftp_ctrl_response_buffer.h',
'ftp/ftp_directory_listing_parser.cc',
'ftp/ftp_directory_listing_parser.h',
'ftp/ftp_directory_listing_parser_ls.cc',
'ftp/ftp_directory_listing_parser_ls.h',
'ftp/ftp_directory_listing_parser_netware.cc',
'ftp/ftp_directory_listing_parser_netware.h',
'ftp/ftp_directory_listing_parser_os2.cc',
'ftp/ftp_directory_listing_parser_os2.h',
'ftp/ftp_directory_listing_parser_vms.cc',
'ftp/ftp_directory_listing_parser_vms.h',
'ftp/ftp_directory_listing_parser_windows.cc',
'ftp/ftp_directory_listing_parser_windows.h',
'ftp/ftp_network_layer.cc',
'ftp/ftp_network_layer.h',
'ftp/ftp_network_session.cc',
'ftp/ftp_network_session.h',
'ftp/ftp_network_transaction.cc',
'ftp/ftp_network_transaction.h',
'ftp/ftp_request_info.h',
'ftp/ftp_response_info.cc',
'ftp/ftp_response_info.h',
'ftp/ftp_server_type_histograms.cc',
'ftp/ftp_server_type_histograms.h',
'ftp/ftp_transaction.h',
'ftp/ftp_transaction_factory.h',
'ftp/ftp_util.cc',
'ftp/ftp_util.h',
'http/des.cc',
'http/des.h',
'http/http_atom_list.h',
'http/http_auth.cc',
'http/http_auth.h',
'http/http_auth_cache.cc',
'http/http_auth_cache.h',
'http/http_auth_controller.cc',
'http/http_auth_controller.h',
'http/http_auth_filter.cc',
'http/http_auth_filter.h',
'http/http_auth_filter_win.h',
'http/http_auth_gssapi_posix.cc',
'http/http_auth_gssapi_posix.h',
'http/http_auth_handler.cc',
'http/http_auth_handler.h',
'http/http_auth_handler_basic.cc',
'http/http_auth_handler_basic.h',
'http/http_auth_handler_digest.cc',
'http/http_auth_handler_digest.h',
'http/http_auth_handler_factory.cc',
'http/http_auth_handler_factory.h',
'http/http_auth_handler_negotiate.cc',
'http/http_auth_handler_negotiate.h',
'http/http_auth_handler_ntlm.cc',
'http/http_auth_handler_ntlm.h',
'http/http_auth_handler_ntlm_portable.cc',
'http/http_auth_handler_ntlm_win.cc',
'http/http_auth_sspi_win.cc',
'http/http_auth_sspi_win.h',
'http/http_basic_stream.cc',
'http/http_basic_stream.h',
'http/http_byte_range.cc',
'http/http_byte_range.h',
'http/http_cache.cc',
'http/http_cache.h',
'http/http_cache_transaction.cc',
'http/http_cache_transaction.h',
'http/http_content_disposition.cc',
'http/http_content_disposition.h',
'http/http_chunked_decoder.cc',
'http/http_chunked_decoder.h',
'http/http_network_layer.cc',
'http/http_network_layer.h',
'http/http_network_session.cc',
'http/http_network_session.h',
'http/http_network_session_peer.cc',
'http/http_network_session_peer.h',
'http/http_network_transaction.cc',
'http/http_network_transaction.h',
'http/http_pipelined_connection.h',
'http/http_pipelined_connection_impl.cc',
'http/http_pipelined_connection_impl.h',
'http/http_pipelined_host.cc',
'http/http_pipelined_host.h',
'http/http_pipelined_host_capability.h',
'http/http_pipelined_host_forced.cc',
'http/http_pipelined_host_forced.h',
'http/http_pipelined_host_impl.cc',
'http/http_pipelined_host_impl.h',
'http/http_pipelined_host_pool.cc',
'http/http_pipelined_host_pool.h',
'http/http_pipelined_stream.cc',
'http/http_pipelined_stream.h',
'http/http_proxy_client_socket.cc',
'http/http_proxy_client_socket.h',
'http/http_proxy_client_socket_pool.cc',
'http/http_proxy_client_socket_pool.h',
'http/http_request_headers.cc',
'http/http_request_headers.h',
'http/http_request_info.cc',
'http/http_request_info.h',
'http/http_response_body_drainer.cc',
'http/http_response_body_drainer.h',
'http/http_response_headers.cc',
'http/http_response_headers.h',
'http/http_response_info.cc',
'http/http_response_info.h',
'http/http_security_headers.cc',
'http/http_security_headers.h',
'http/http_server_properties.cc',
'http/http_server_properties.h',
'http/http_server_properties_impl.cc',
'http/http_server_properties_impl.h',
'http/http_status_code.h',
'http/http_stream.h',
'http/http_stream_base.h',
'http/http_stream_factory.cc',
'http/http_stream_factory.h',
'http/http_stream_factory_impl.cc',
'http/http_stream_factory_impl.h',
'http/http_stream_factory_impl_job.cc',
'http/http_stream_factory_impl_job.h',
'http/http_stream_factory_impl_request.cc',
'http/http_stream_factory_impl_request.h',
'http/http_stream_parser.cc',
'http/http_stream_parser.h',
'http/http_transaction.h',
'http/http_transaction_delegate.h',
'http/http_transaction_factory.h',
'http/http_util.cc',
'http/http_util.h',
'http/http_util_icu.cc',
'http/http_vary_data.cc',
'http/http_vary_data.h',
'http/http_version.h',
'http/md4.cc',
'http/md4.h',
'http/partial_data.cc',
'http/partial_data.h',
'http/proxy_client_socket.h',
'http/proxy_client_socket.cc',
'http/transport_security_state.cc',
'http/transport_security_state.h',
'http/transport_security_state_static.h',
'http/url_security_manager.cc',
'http/url_security_manager.h',
'http/url_security_manager_posix.cc',
'http/url_security_manager_win.cc',
'ocsp/nss_ocsp.cc',
'ocsp/nss_ocsp.h',
'proxy/dhcp_proxy_script_adapter_fetcher_win.cc',
'proxy/dhcp_proxy_script_adapter_fetcher_win.h',
'proxy/dhcp_proxy_script_fetcher.cc',
'proxy/dhcp_proxy_script_fetcher.h',
'proxy/dhcp_proxy_script_fetcher_factory.cc',
'proxy/dhcp_proxy_script_fetcher_factory.h',
'proxy/dhcp_proxy_script_fetcher_win.cc',
'proxy/dhcp_proxy_script_fetcher_win.h',
'proxy/dhcpcsvc_init_win.cc',
'proxy/dhcpcsvc_init_win.h',
'proxy/multi_threaded_proxy_resolver.cc',
'proxy/multi_threaded_proxy_resolver.h',
'proxy/network_delegate_error_observer.cc',
'proxy/network_delegate_error_observer.h',
'proxy/polling_proxy_config_service.cc',
'proxy/polling_proxy_config_service.h',
'proxy/proxy_bypass_rules.cc',
'proxy/proxy_bypass_rules.h',
'proxy/proxy_config.cc',
'proxy/proxy_config.h',
'proxy/proxy_config_service.h',
'proxy/proxy_config_service_android.cc',
'proxy/proxy_config_service_android.h',
'proxy/proxy_config_service_fixed.cc',
'proxy/proxy_config_service_fixed.h',
'proxy/proxy_config_service_ios.cc',
'proxy/proxy_config_service_ios.h',
'proxy/proxy_config_service_linux.cc',
'proxy/proxy_config_service_linux.h',
'proxy/proxy_config_service_mac.cc',
'proxy/proxy_config_service_mac.h',
'proxy/proxy_config_service_win.cc',
'proxy/proxy_config_service_win.h',
'proxy/proxy_config_source.cc',
'proxy/proxy_config_source.h',
'proxy/proxy_info.cc',
'proxy/proxy_info.h',
'proxy/proxy_list.cc',
'proxy/proxy_list.h',
'proxy/proxy_resolver.h',
'proxy/proxy_resolver_error_observer.h',
'proxy/proxy_resolver_mac.cc',
'proxy/proxy_resolver_mac.h',
'proxy/proxy_resolver_script.h',
'proxy/proxy_resolver_script_data.cc',
'proxy/proxy_resolver_script_data.h',
'proxy/proxy_resolver_winhttp.cc',
'proxy/proxy_resolver_winhttp.h',
'proxy/proxy_retry_info.h',
'proxy/proxy_script_decider.cc',
'proxy/proxy_script_decider.h',
'proxy/proxy_script_fetcher.h',
'proxy/proxy_script_fetcher_impl.cc',
'proxy/proxy_script_fetcher_impl.h',
'proxy/proxy_server.cc',
'proxy/proxy_server.h',
'proxy/proxy_server_mac.cc',
'proxy/proxy_service.cc',
'proxy/proxy_service.h',
'quic/blocked_list.h',
'quic/congestion_control/available_channel_estimator.cc',
'quic/congestion_control/available_channel_estimator.h',
'quic/congestion_control/channel_estimator.cc',
'quic/congestion_control/channel_estimator.h',
'quic/congestion_control/cube_root.cc',
'quic/congestion_control/cube_root.h',
'quic/congestion_control/cubic.cc',
'quic/congestion_control/cubic.h',
'quic/congestion_control/fix_rate_receiver.cc',
'quic/congestion_control/fix_rate_receiver.h',
'quic/congestion_control/fix_rate_sender.cc',
'quic/congestion_control/fix_rate_sender.h',
'quic/congestion_control/hybrid_slow_start.cc',
'quic/congestion_control/hybrid_slow_start.h',
'quic/congestion_control/inter_arrival_bitrate_ramp_up.cc',
'quic/congestion_control/inter_arrival_bitrate_ramp_up.h',
'quic/congestion_control/inter_arrival_overuse_detector.cc',
'quic/congestion_control/inter_arrival_overuse_detector.h',
'quic/congestion_control/inter_arrival_probe.cc',
'quic/congestion_control/inter_arrival_probe.h',
'quic/congestion_control/inter_arrival_receiver.cc',
'quic/congestion_control/inter_arrival_receiver.h',
'quic/congestion_control/inter_arrival_sender.cc',
'quic/congestion_control/inter_arrival_sender.h',
'quic/congestion_control/inter_arrival_state_machine.cc',
'quic/congestion_control/inter_arrival_state_machine.h',
'quic/congestion_control/leaky_bucket.cc',
'quic/congestion_control/leaky_bucket.h',
'quic/congestion_control/paced_sender.cc',
'quic/congestion_control/paced_sender.h',
'quic/congestion_control/quic_congestion_manager.cc',
'quic/congestion_control/quic_congestion_manager.h',
'quic/congestion_control/quic_max_sized_map.h',
'quic/congestion_control/receive_algorithm_interface.cc',
'quic/congestion_control/receive_algorithm_interface.h',
'quic/congestion_control/send_algorithm_interface.cc',
'quic/congestion_control/send_algorithm_interface.h',
'quic/congestion_control/tcp_cubic_sender.cc',
'quic/congestion_control/tcp_cubic_sender.h',
'quic/congestion_control/tcp_receiver.cc',
'quic/congestion_control/tcp_receiver.h',
'quic/crypto/aes_128_gcm_decrypter.h',
'quic/crypto/aes_128_gcm_decrypter_nss.cc',
'quic/crypto/aes_128_gcm_decrypter_openssl.cc',
'quic/crypto/aes_128_gcm_encrypter.h',
'quic/crypto/aes_128_gcm_encrypter_nss.cc',
'quic/crypto/aes_128_gcm_encrypter_openssl.cc',
'quic/crypto/crypto_framer.cc',
'quic/crypto/crypto_framer.h',
'quic/crypto/crypto_handshake.cc',
'quic/crypto/crypto_handshake.h',
'quic/crypto/crypto_protocol.h',
'quic/crypto/crypto_utils.cc',
'quic/crypto/crypto_utils.h',
'quic/crypto/curve25519_key_exchange.cc',
'quic/crypto/curve25519_key_exchange.h',
'quic/crypto/key_exchange.h',
'quic/crypto/null_decrypter.cc',
'quic/crypto/null_decrypter.h',
'quic/crypto/null_encrypter.cc',
'quic/crypto/null_encrypter.h',
'quic/crypto/p256_key_exchange.h',
'quic/crypto/p256_key_exchange_nss.cc',
'quic/crypto/p256_key_exchange_openssl.cc',
'quic/crypto/quic_decrypter.cc',
'quic/crypto/quic_decrypter.h',
'quic/crypto/quic_encrypter.cc',
'quic/crypto/quic_encrypter.h',
'quic/crypto/quic_random.cc',
'quic/crypto/quic_random.h',
'quic/crypto/scoped_evp_cipher_ctx.h',
'quic/crypto/strike_register.cc',
'quic/crypto/strike_register.h',
'quic/quic_bandwidth.cc',
'quic/quic_bandwidth.h',
'quic/quic_blocked_writer_interface.h',
'quic/quic_client_session.cc',
'quic/quic_client_session.h',
'quic/quic_crypto_client_stream.cc',
'quic/quic_crypto_client_stream.h',
'quic/quic_crypto_client_stream_factory.h',
'quic/quic_crypto_server_stream.cc',
'quic/quic_crypto_server_stream.h',
'quic/quic_crypto_stream.cc',
'quic/quic_crypto_stream.h',
'quic/quic_clock.cc',
'quic/quic_clock.h',
'quic/quic_connection.cc',
'quic/quic_connection.h',
'quic/quic_connection_helper.cc',
'quic/quic_connection_helper.h',
'quic/quic_connection_logger.cc',
'quic/quic_connection_logger.h',
'quic/quic_data_reader.cc',
'quic/quic_data_reader.h',
'quic/quic_data_writer.cc',
'quic/quic_data_writer.h',
'quic/quic_fec_group.cc',
'quic/quic_fec_group.h',
'quic/quic_framer.cc',
'quic/quic_framer.h',
'quic/quic_http_stream.cc',
'quic/quic_http_stream.h',
'quic/quic_packet_creator.cc',
'quic/quic_packet_creator.h',
'quic/quic_packet_entropy_manager.cc',
'quic/quic_packet_entropy_manager.h',
'quic/quic_packet_generator.cc',
'quic/quic_packet_generator.h',
'quic/quic_protocol.cc',
'quic/quic_protocol.h',
'quic/quic_reliable_client_stream.cc',
'quic/quic_reliable_client_stream.h',
'quic/quic_session.cc',
'quic/quic_session.h',
'quic/quic_stats.cc',
'quic/quic_stats.h',
'quic/quic_stream_factory.cc',
'quic/quic_stream_factory.h',
'quic/quic_stream_sequencer.cc',
'quic/quic_stream_sequencer.h',
'quic/quic_time.cc',
'quic/quic_time.h',
'quic/quic_utils.cc',
'quic/quic_utils.h',
'quic/reliable_quic_stream.cc',
'quic/reliable_quic_stream.h',
'socket/buffered_write_stream_socket.cc',
'socket/buffered_write_stream_socket.h',
'socket/client_socket_factory.cc',
'socket/client_socket_factory.h',
'socket/client_socket_handle.cc',
'socket/client_socket_handle.h',
'socket/client_socket_pool.cc',
'socket/client_socket_pool.h',
'socket/client_socket_pool_base.cc',
'socket/client_socket_pool_base.h',
'socket/client_socket_pool_histograms.cc',
'socket/client_socket_pool_histograms.h',
'socket/client_socket_pool_manager.cc',
'socket/client_socket_pool_manager.h',
'socket/client_socket_pool_manager_impl.cc',
'socket/client_socket_pool_manager_impl.h',
'socket/next_proto.h',
'socket/nss_ssl_util.cc',
'socket/nss_ssl_util.h',
'socket/server_socket.h',
'socket/socket_net_log_params.cc',
'socket/socket_net_log_params.h',
'socket/socket.h',
'socket/socks5_client_socket.cc',
'socket/socks5_client_socket.h',
'socket/socks_client_socket.cc',
'socket/socks_client_socket.h',
'socket/socks_client_socket_pool.cc',
'socket/socks_client_socket_pool.h',
'socket/ssl_client_socket.cc',
'socket/ssl_client_socket.h',
'socket/ssl_client_socket_nss.cc',
'socket/ssl_client_socket_nss.h',
'socket/ssl_client_socket_openssl.cc',
'socket/ssl_client_socket_openssl.h',
'socket/ssl_client_socket_pool.cc',
'socket/ssl_client_socket_pool.h',
'socket/ssl_error_params.cc',
'socket/ssl_error_params.h',
'socket/ssl_server_socket.h',
'socket/ssl_server_socket_nss.cc',
'socket/ssl_server_socket_nss.h',
'socket/ssl_server_socket_openssl.cc',
'socket/ssl_socket.h',
'socket/stream_listen_socket.cc',
'socket/stream_listen_socket.h',
'socket/stream_socket.cc',
'socket/stream_socket.h',
'socket/tcp_client_socket.cc',
'socket/tcp_client_socket.h',
'socket/tcp_client_socket_libevent.cc',
'socket/tcp_client_socket_libevent.h',
'socket/tcp_client_socket_win.cc',
'socket/tcp_client_socket_win.h',
'socket/tcp_listen_socket.cc',
'socket/tcp_listen_socket.h',
'socket/tcp_server_socket.h',
'socket/tcp_server_socket_libevent.cc',
'socket/tcp_server_socket_libevent.h',
'socket/tcp_server_socket_win.cc',
'socket/tcp_server_socket_win.h',
'socket/transport_client_socket_pool.cc',
'socket/transport_client_socket_pool.h',
'socket/unix_domain_socket_posix.cc',
'socket/unix_domain_socket_posix.h',
'socket_stream/socket_stream.cc',
'socket_stream/socket_stream.h',
'socket_stream/socket_stream_job.cc',
'socket_stream/socket_stream_job.h',
'socket_stream/socket_stream_job_manager.cc',
'socket_stream/socket_stream_job_manager.h',
'socket_stream/socket_stream_metrics.cc',
'socket_stream/socket_stream_metrics.h',
'spdy/buffered_spdy_framer.cc',
'spdy/buffered_spdy_framer.h',
'spdy/spdy_bitmasks.h',
'spdy/spdy_credential_builder.cc',
'spdy/spdy_credential_builder.h',
'spdy/spdy_credential_state.cc',
'spdy/spdy_credential_state.h',
'spdy/spdy_frame_builder.cc',
'spdy/spdy_frame_builder.h',
'spdy/spdy_frame_reader.cc',
'spdy/spdy_frame_reader.h',
'spdy/spdy_framer.cc',
'spdy/spdy_framer.h',
'spdy/spdy_header_block.cc',
'spdy/spdy_header_block.h',
'spdy/spdy_http_stream.cc',
'spdy/spdy_http_stream.h',
'spdy/spdy_http_utils.cc',
'spdy/spdy_http_utils.h',
'spdy/spdy_io_buffer.cc',
'spdy/spdy_io_buffer.h',
'spdy/spdy_priority_forest.h',
'spdy/spdy_protocol.cc',
'spdy/spdy_protocol.h',
'spdy/spdy_proxy_client_socket.cc',
'spdy/spdy_proxy_client_socket.h',
'spdy/spdy_session.cc',
'spdy/spdy_session.h',
'spdy/spdy_session_pool.cc',
'spdy/spdy_session_pool.h',
'spdy/spdy_stream.cc',
'spdy/spdy_stream.h',
'spdy/spdy_websocket_stream.cc',
'spdy/spdy_websocket_stream.h',
'ssl/client_cert_store.h',
'ssl/client_cert_store_impl.h',
'ssl/client_cert_store_impl_mac.cc',
'ssl/client_cert_store_impl_nss.cc',
'ssl/client_cert_store_impl_win.cc',
'ssl/default_server_bound_cert_store.cc',
'ssl/default_server_bound_cert_store.h',
'ssl/openssl_client_key_store.cc',
'ssl/openssl_client_key_store.h',
'ssl/server_bound_cert_service.cc',
'ssl/server_bound_cert_service.h',
'ssl/server_bound_cert_store.cc',
'ssl/server_bound_cert_store.h',
'ssl/ssl_cert_request_info.cc',
'ssl/ssl_cert_request_info.h',
'ssl/ssl_cipher_suite_names.cc',
'ssl/ssl_cipher_suite_names.h',
'ssl/ssl_client_auth_cache.cc',
'ssl/ssl_client_auth_cache.h',
'ssl/ssl_client_cert_type.h',
'ssl/ssl_config_service.cc',
'ssl/ssl_config_service.h',
'ssl/ssl_config_service_defaults.cc',
'ssl/ssl_config_service_defaults.h',
'ssl/ssl_info.cc',
'ssl/ssl_info.h',
'third_party/mozilla_security_manager/nsKeygenHandler.cpp',
'third_party/mozilla_security_manager/nsKeygenHandler.h',
'third_party/mozilla_security_manager/nsNSSCertificateDB.cpp',
'third_party/mozilla_security_manager/nsNSSCertificateDB.h',
'third_party/mozilla_security_manager/nsPKCS12Blob.cpp',
'third_party/mozilla_security_manager/nsPKCS12Blob.h',
'udp/datagram_client_socket.h',
'udp/datagram_server_socket.h',
'udp/datagram_socket.h',
'udp/udp_client_socket.cc',
'udp/udp_client_socket.h',
'udp/udp_net_log_parameters.cc',
'udp/udp_net_log_parameters.h',
'udp/udp_server_socket.cc',
'udp/udp_server_socket.h',
'udp/udp_socket.h',
'udp/udp_socket_libevent.cc',
'udp/udp_socket_libevent.h',
'udp/udp_socket_win.cc',
'udp/udp_socket_win.h',
'url_request/data_protocol_handler.cc',
'url_request/data_protocol_handler.h',
'url_request/file_protocol_handler.cc',
'url_request/file_protocol_handler.h',
'url_request/fraudulent_certificate_reporter.h',
'url_request/ftp_protocol_handler.cc',
'url_request/ftp_protocol_handler.h',
'url_request/http_user_agent_settings.h',
'url_request/protocol_intercept_job_factory.cc',
'url_request/protocol_intercept_job_factory.h',
'url_request/static_http_user_agent_settings.cc',
'url_request/static_http_user_agent_settings.h',
'url_request/url_fetcher.cc',
'url_request/url_fetcher.h',
'url_request/url_fetcher_core.cc',
'url_request/url_fetcher_core.h',
'url_request/url_fetcher_delegate.cc',
'url_request/url_fetcher_delegate.h',
'url_request/url_fetcher_factory.h',
'url_request/url_fetcher_impl.cc',
'url_request/url_fetcher_impl.h',
'url_request/url_fetcher_response_writer.cc',
'url_request/url_fetcher_response_writer.h',
'url_request/url_request.cc',
'url_request/url_request.h',
'url_request/url_request_about_job.cc',
'url_request/url_request_about_job.h',
'url_request/url_request_context.cc',
'url_request/url_request_context.h',
'url_request/url_request_context_builder.cc',
'url_request/url_request_context_builder.h',
'url_request/url_request_context_getter.cc',
'url_request/url_request_context_getter.h',
'url_request/url_request_context_storage.cc',
'url_request/url_request_context_storage.h',
'url_request/url_request_data_job.cc',
'url_request/url_request_data_job.h',
'url_request/url_request_error_job.cc',
'url_request/url_request_error_job.h',
'url_request/url_request_file_dir_job.cc',
'url_request/url_request_file_dir_job.h',
'url_request/url_request_file_job.cc',
'url_request/url_request_file_job.h',
'url_request/url_request_filter.cc',
'url_request/url_request_filter.h',
'url_request/url_request_ftp_job.cc',
'url_request/url_request_ftp_job.h',
'url_request/url_request_http_job.cc',
'url_request/url_request_http_job.h',
'url_request/url_request_job.cc',
'url_request/url_request_job.h',
'url_request/url_request_job_factory.cc',
'url_request/url_request_job_factory.h',
'url_request/url_request_job_factory_impl.cc',
'url_request/url_request_job_factory_impl.h',
'url_request/url_request_job_manager.cc',
'url_request/url_request_job_manager.h',
'url_request/url_request_netlog_params.cc',
'url_request/url_request_netlog_params.h',
'url_request/url_request_redirect_job.cc',
'url_request/url_request_redirect_job.h',
'url_request/url_request_simple_job.cc',
'url_request/url_request_simple_job.h',
'url_request/url_request_status.h',
'url_request/url_request_test_job.cc',
'url_request/url_request_test_job.h',
'url_request/url_request_throttler_entry.cc',
'url_request/url_request_throttler_entry.h',
'url_request/url_request_throttler_entry_interface.h',
'url_request/url_request_throttler_header_adapter.cc',
'url_request/url_request_throttler_header_adapter.h',
'url_request/url_request_throttler_header_interface.h',
'url_request/url_request_throttler_manager.cc',
'url_request/url_request_throttler_manager.h',
'url_request/view_cache_helper.cc',
'url_request/view_cache_helper.h',
'websockets/websocket_errors.cc',
'websockets/websocket_errors.h',
'websockets/websocket_frame.cc',
'websockets/websocket_frame.h',
'websockets/websocket_frame_parser.cc',
'websockets/websocket_frame_parser.h',
'websockets/websocket_handshake_handler.cc',
'websockets/websocket_handshake_handler.h',
'websockets/websocket_job.cc',
'websockets/websocket_job.h',
'websockets/websocket_net_log_params.cc',
'websockets/websocket_net_log_params.h',
'websockets/websocket_stream.h',
'websockets/websocket_throttle.cc',
'websockets/websocket_throttle.h',
],
'defines': [
'NET_IMPLEMENTATION',
],
'export_dependent_settings': [
'../base/base.gyp:base',
],
'conditions': [
['chromeos==1', {
'sources!': [
'base/network_change_notifier_linux.cc',
'base/network_change_notifier_linux.h',
'base/network_change_notifier_netlink_linux.cc',
'base/network_change_notifier_netlink_linux.h',
'proxy/proxy_config_service_linux.cc',
'proxy/proxy_config_service_linux.h',
],
}],
['use_kerberos==1', {
'defines': [
'USE_KERBEROS',
],
'conditions': [
['OS=="openbsd"', {
'include_dirs': [
'/usr/include/kerberosV'
],
}],
['linux_link_kerberos==1', {
'link_settings': {
'ldflags': [
'<!@(krb5-config --libs gssapi)',
],
},
}, { # linux_link_kerberos==0
'defines': [
'DLOPEN_KERBEROS',
],
}],
],
}, { # use_kerberos == 0
'sources!': [
'http/http_auth_gssapi_posix.cc',
'http/http_auth_gssapi_posix.h',
'http/http_auth_handler_negotiate.h',
'http/http_auth_handler_negotiate.cc',
],
}],
['posix_avoid_mmap==1', {
'defines': [
'POSIX_AVOID_MMAP',
],
'direct_dependent_settings': {
'defines': [
'POSIX_AVOID_MMAP',
],
},
'sources!': [
'disk_cache/mapped_file_posix.cc',
],
}, { # else
'sources!': [
'disk_cache/mapped_file_avoid_mmap_posix.cc',
],
}],
['disable_ftp_support==1', {
'sources/': [
['exclude', '^ftp/'],
],
'sources!': [
'url_request/ftp_protocol_handler.cc',
'url_request/ftp_protocol_handler.h',
'url_request/url_request_ftp_job.cc',
'url_request/url_request_ftp_job.h',
],
}],
['enable_built_in_dns==1', {
'defines': [
'ENABLE_BUILT_IN_DNS',
]
}, { # else
'sources!': [
'dns/address_sorter_posix.cc',
'dns/address_sorter_posix.h',
'dns/dns_client.cc',
],
}],
['use_openssl==1', {
'sources!': [
'base/crypto_module_nss.cc',
'base/keygen_handler_nss.cc',
'base/nss_memio.c',
'base/nss_memio.h',
'cert/cert_database_nss.cc',
'cert/cert_verify_proc_nss.cc',
'cert/cert_verify_proc_nss.h',
'cert/nss_cert_database.cc',
'cert/nss_cert_database.h',
'cert/test_root_certs_nss.cc',
'cert/x509_certificate_nss.cc',
'cert/x509_util_nss.cc',
'cert/x509_util_nss.h',
'ocsp/nss_ocsp.cc',
'ocsp/nss_ocsp.h',
'quic/crypto/aes_128_gcm_decrypter_nss.cc',
'quic/crypto/aes_128_gcm_encrypter_nss.cc',
'quic/crypto/p256_key_exchange_nss.cc',
'socket/nss_ssl_util.cc',
'socket/nss_ssl_util.h',
'socket/ssl_client_socket_nss.cc',
'socket/ssl_client_socket_nss.h',
'socket/ssl_server_socket_nss.cc',
'socket/ssl_server_socket_nss.h',
'ssl/client_cert_store_impl_nss.cc',
'third_party/mozilla_security_manager/nsKeygenHandler.cpp',
'third_party/mozilla_security_manager/nsKeygenHandler.h',
'third_party/mozilla_security_manager/nsNSSCertificateDB.cpp',
'third_party/mozilla_security_manager/nsNSSCertificateDB.h',
'third_party/mozilla_security_manager/nsPKCS12Blob.cpp',
'third_party/mozilla_security_manager/nsPKCS12Blob.h',
],
},
{ # else !use_openssl: remove the unneeded files
'sources!': [
'base/crypto_module_openssl.cc',
'base/keygen_handler_openssl.cc',
'base/openssl_private_key_store.h',
'base/openssl_private_key_store_android.cc',
'base/openssl_private_key_store_memory.cc',
'cert/cert_database_openssl.cc',
'cert/cert_verify_proc_openssl.cc',
'cert/cert_verify_proc_openssl.h',
'cert/test_root_certs_openssl.cc',
'cert/x509_certificate_openssl.cc',
'cert/x509_util_openssl.cc',
'cert/x509_util_openssl.h',
'quic/crypto/aes_128_gcm_decrypter_openssl.cc',
'quic/crypto/aes_128_gcm_encrypter_openssl.cc',
'quic/crypto/p256_key_exchange_openssl.cc',
'quic/crypto/scoped_evp_cipher_ctx.h',
'socket/ssl_client_socket_openssl.cc',
'socket/ssl_client_socket_openssl.h',
'socket/ssl_server_socket_openssl.cc',
'ssl/openssl_client_key_store.cc',
'ssl/openssl_client_key_store.h',
],
},
],
[ 'use_glib == 1', {
'dependencies': [
'../build/linux/system.gyp:gconf',
'../build/linux/system.gyp:gio',
],
'conditions': [
['use_openssl==1', {
'dependencies': [
'../third_party/openssl/openssl.gyp:openssl',
],
},
{ # else use_openssl==0, use NSS
'dependencies': [
'../build/linux/system.gyp:ssl',
],
}],
['os_bsd==1', {
'sources!': [
'base/network_change_notifier_linux.cc',
'base/network_change_notifier_netlink_linux.cc',
'proxy/proxy_config_service_linux.cc',
],
},{
'dependencies': [
'../build/linux/system.gyp:libresolv',
],
}],
['OS=="solaris"', {
'link_settings': {
'ldflags': [
'-R/usr/lib/mps',
],
},
}],
],
},
{ # else: OS is not in the above list
'sources!': [
'base/crypto_module_nss.cc',
'base/keygen_handler_nss.cc',
'cert/cert_database_nss.cc',
'cert/nss_cert_database.cc',
'cert/nss_cert_database.h',
'cert/test_root_certs_nss.cc',
'cert/x509_certificate_nss.cc',
'ocsp/nss_ocsp.cc',
'ocsp/nss_ocsp.h',
'third_party/mozilla_security_manager/nsKeygenHandler.cpp',
'third_party/mozilla_security_manager/nsKeygenHandler.h',
'third_party/mozilla_security_manager/nsNSSCertificateDB.cpp',
'third_party/mozilla_security_manager/nsNSSCertificateDB.h',
'third_party/mozilla_security_manager/nsPKCS12Blob.cpp',
'third_party/mozilla_security_manager/nsPKCS12Blob.h',
],
},
],
[ 'toolkit_uses_gtk == 1', {
'dependencies': [
'../build/linux/system.gyp:gdk',
],
}],
[ 'use_nss != 1', {
'sources!': [
'cert/cert_verify_proc_nss.cc',
'cert/cert_verify_proc_nss.h',
'ssl/client_cert_store_impl_nss.cc',
],
}],
[ 'enable_websockets != 1', {
'sources/': [
['exclude', '^socket_stream/'],
['exclude', '^websockets/'],
],
'sources!': [
'spdy/spdy_websocket_stream.cc',
'spdy/spdy_websocket_stream.h',
],
}],
[ 'OS == "win"', {
'sources!': [
'http/http_auth_handler_ntlm_portable.cc',
'socket/tcp_client_socket_libevent.cc',
'socket/tcp_client_socket_libevent.h',
'socket/tcp_server_socket_libevent.cc',
'socket/tcp_server_socket_libevent.h',
'ssl/client_cert_store_impl_nss.cc',
'udp/udp_socket_libevent.cc',
'udp/udp_socket_libevent.h',
],
'dependencies': [
'../third_party/nss/nss.gyp:nspr',
'../third_party/nss/nss.gyp:nss',
'third_party/nss/ssl.gyp:libssl',
'tld_cleanup',
],
# TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [4267, ],
}, { # else: OS != "win"
'sources!': [
'base/winsock_init.cc',
'base/winsock_init.h',
'base/winsock_util.cc',
'base/winsock_util.h',
'proxy/proxy_resolver_winhttp.cc',
'proxy/proxy_resolver_winhttp.h',
],
},
],
[ 'OS == "mac"', {
'sources!': [
'ssl/client_cert_store_impl_nss.cc',
],
'dependencies': [
'../third_party/nss/nss.gyp:nspr',
'../third_party/nss/nss.gyp:nss',
'third_party/nss/ssl.gyp:libssl',
],
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
'$(SDKROOT)/System/Library/Frameworks/Security.framework',
'$(SDKROOT)/System/Library/Frameworks/SystemConfiguration.framework',
'$(SDKROOT)/usr/lib/libresolv.dylib',
]
},
},
],
[ 'OS == "ios"', {
'dependencies': [
'../third_party/nss/nss.gyp:nss',
'third_party/nss/ssl.gyp:libssl',
],
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/CFNetwork.framework',
'$(SDKROOT)/System/Library/Frameworks/MobileCoreServices.framework',
'$(SDKROOT)/System/Library/Frameworks/Security.framework',
'$(SDKROOT)/System/Library/Frameworks/SystemConfiguration.framework',
'$(SDKROOT)/usr/lib/libresolv.dylib',
],
},
},
],
['OS=="android" and _toolset=="target" and android_webview_build == 0', {
'dependencies': [
'net_java',
],
}],
[ 'OS == "android"', {
'dependencies': [
'../third_party/openssl/openssl.gyp:openssl',
'net_jni_headers',
],
'sources!': [
'base/openssl_private_key_store_memory.cc',
'cert/cert_database_openssl.cc',
'cert/cert_verify_proc_openssl.cc',
'cert/test_root_certs_openssl.cc',
],
# The net/android/keystore_openssl.cc source file needs to
# access an OpenSSL-internal header.
'include_dirs': [
'../third_party/openssl',
],
}, { # else OS != "android"
'defines': [
# These are the features Android doesn't support.
'ENABLE_MEDIA_CODEC_THEORA',
],
},
],
[ 'OS == "linux"', {
'dependencies': [
'../build/linux/system.gyp:dbus',
'../dbus/dbus.gyp:dbus',
],
},
],
],
'target_conditions': [
# These source files are excluded by default platform rules, but they
# are needed in specific cases on other platforms. Re-including them can
# only be done in target_conditions as it is evaluated after the
# platform rules.
['OS == "android"', {
'sources/': [
['include', '^base/platform_mime_util_linux\\.cc$'],
],
}],
['OS == "ios"', {
'sources/': [
['include', '^base/network_change_notifier_mac\\.cc$'],
['include', '^base/network_config_watcher_mac\\.cc$'],
['include', '^base/platform_mime_util_mac\\.mm$'],
# The iOS implementation only partially uses NSS and thus does not
# defines |use_nss|. In particular the |USE_NSS| preprocessor
# definition is not used. The following files are needed though:
['include', '^cert/cert_verify_proc_nss\\.cc$'],
['include', '^cert/cert_verify_proc_nss\\.h$'],
['include', '^cert/test_root_certs_nss\\.cc$'],
['include', '^cert/x509_util_nss\\.cc$'],
['include', '^cert/x509_util_nss\\.h$'],
['include', '^dns/notify_watcher_mac\\.cc$'],
['include', '^proxy/proxy_resolver_mac\\.cc$'],
['include', '^proxy/proxy_server_mac\\.cc$'],
['include', '^ocsp/nss_ocsp\\.cc$'],
['include', '^ocsp/nss_ocsp\\.h$'],
],
}],
],
},
{
'target_name': 'net_unittests',
'type': '<(gtest_target_type)',
'dependencies': [
'../base/base.gyp:base',
'../base/base.gyp:base_i18n',
'../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
'../build/temp_gyp/googleurl.gyp:googleurl',
'../crypto/crypto.gyp:crypto',
'../testing/gmock.gyp:gmock',
'../testing/gtest.gyp:gtest',
'../third_party/zlib/zlib.gyp:zlib',
'net',
'net_test_support',
],
'sources': [
'android/keystore_unittest.cc',
'android/network_change_notifier_android_unittest.cc',
'base/address_list_unittest.cc',
'base/address_tracker_linux_unittest.cc',
'base/backoff_entry_unittest.cc',
'base/big_endian_unittest.cc',
'base/data_url_unittest.cc',
'base/directory_lister_unittest.cc',
'base/dns_util_unittest.cc',
'base/escape_unittest.cc',
'base/expiring_cache_unittest.cc',
'base/file_stream_unittest.cc',
'base/filter_unittest.cc',
'base/int128_unittest.cc',
'base/gzip_filter_unittest.cc',
'base/host_mapping_rules_unittest.cc',
'base/host_port_pair_unittest.cc',
'base/ip_endpoint_unittest.cc',
'base/keygen_handler_unittest.cc',
'base/mime_sniffer_unittest.cc',
'base/mime_util_unittest.cc',
'base/mock_filter_context.cc',
'base/mock_filter_context.h',
'base/net_log_unittest.cc',
'base/net_log_unittest.h',
'base/net_util_unittest.cc',
'base/network_change_notifier_win_unittest.cc',
'base/prioritized_dispatcher_unittest.cc',
'base/priority_queue_unittest.cc',
'base/registry_controlled_domains/registry_controlled_domain_unittest.cc',
'base/sdch_filter_unittest.cc',
'base/static_cookie_policy_unittest.cc',
'base/test_completion_callback_unittest.cc',
'base/upload_bytes_element_reader_unittest.cc',
'base/upload_data_stream_unittest.cc',
'base/upload_file_element_reader_unittest.cc',
'base/url_util_unittest.cc',
'cert/cert_verify_proc_unittest.cc',
'cert/crl_set_unittest.cc',
'cert/ev_root_ca_metadata_unittest.cc',
'cert/multi_threaded_cert_verifier_unittest.cc',
'cert/nss_cert_database_unittest.cc',
'cert/pem_tokenizer_unittest.cc',
'cert/x509_certificate_unittest.cc',
'cert/x509_cert_types_unittest.cc',
'cert/x509_util_unittest.cc',
'cert/x509_util_nss_unittest.cc',
'cert/x509_util_openssl_unittest.cc',
'cookies/canonical_cookie_unittest.cc',
'cookies/cookie_monster_unittest.cc',
'cookies/cookie_store_unittest.h',
'cookies/cookie_util_unittest.cc',
'cookies/parsed_cookie_unittest.cc',
'disk_cache/addr_unittest.cc',
'disk_cache/backend_unittest.cc',
'disk_cache/bitmap_unittest.cc',
'disk_cache/block_files_unittest.cc',
'disk_cache/cache_util_unittest.cc',
'disk_cache/entry_unittest.cc',
'disk_cache/mapped_file_unittest.cc',
'disk_cache/storage_block_unittest.cc',
'disk_cache/flash/flash_entry_unittest.cc',
'disk_cache/flash/log_store_entry_unittest.cc',
'disk_cache/flash/log_store_unittest.cc',
'disk_cache/flash/segment_unittest.cc',
'disk_cache/flash/storage_unittest.cc',
'dns/address_sorter_posix_unittest.cc',
'dns/address_sorter_unittest.cc',
'dns/dns_config_service_posix_unittest.cc',
'dns/dns_config_service_unittest.cc',
'dns/dns_config_service_win_unittest.cc',
'dns/dns_hosts_unittest.cc',
'dns/dns_query_unittest.cc',
'dns/dns_response_unittest.cc',
'dns/dns_session_unittest.cc',
'dns/dns_transaction_unittest.cc',
'dns/host_cache_unittest.cc',
'dns/host_resolver_impl_unittest.cc',
'dns/mapped_host_resolver_unittest.cc',
'dns/serial_worker_unittest.cc',
'dns/single_request_host_resolver_unittest.cc',
'ftp/ftp_auth_cache_unittest.cc',
'ftp/ftp_ctrl_response_buffer_unittest.cc',
'ftp/ftp_directory_listing_parser_ls_unittest.cc',
'ftp/ftp_directory_listing_parser_netware_unittest.cc',
'ftp/ftp_directory_listing_parser_os2_unittest.cc',
'ftp/ftp_directory_listing_parser_unittest.cc',
'ftp/ftp_directory_listing_parser_unittest.h',
'ftp/ftp_directory_listing_parser_vms_unittest.cc',
'ftp/ftp_directory_listing_parser_windows_unittest.cc',
'ftp/ftp_network_transaction_unittest.cc',
'ftp/ftp_util_unittest.cc',
'http/des_unittest.cc',
'http/http_auth_cache_unittest.cc',
'http/http_auth_controller_unittest.cc',
'http/http_auth_filter_unittest.cc',
'http/http_auth_gssapi_posix_unittest.cc',
'http/http_auth_handler_basic_unittest.cc',
'http/http_auth_handler_digest_unittest.cc',
'http/http_auth_handler_factory_unittest.cc',
'http/http_auth_handler_mock.cc',
'http/http_auth_handler_mock.h',
'http/http_auth_handler_negotiate_unittest.cc',
'http/http_auth_handler_unittest.cc',
'http/http_auth_sspi_win_unittest.cc',
'http/http_auth_unittest.cc',
'http/http_byte_range_unittest.cc',
'http/http_cache_unittest.cc',
'http/http_chunked_decoder_unittest.cc',
'http/http_content_disposition_unittest.cc',
'http/http_network_layer_unittest.cc',
'http/http_network_transaction_spdy3_unittest.cc',
'http/http_network_transaction_spdy2_unittest.cc',
'http/http_pipelined_connection_impl_unittest.cc',
'http/http_pipelined_host_forced_unittest.cc',
'http/http_pipelined_host_impl_unittest.cc',
'http/http_pipelined_host_pool_unittest.cc',
'http/http_pipelined_host_test_util.cc',
'http/http_pipelined_host_test_util.h',
'http/http_pipelined_network_transaction_unittest.cc',
'http/http_proxy_client_socket_pool_spdy2_unittest.cc',
'http/http_proxy_client_socket_pool_spdy3_unittest.cc',
'http/http_request_headers_unittest.cc',
'http/http_response_body_drainer_unittest.cc',
'http/http_response_headers_unittest.cc',
'http/http_security_headers_unittest.cc',
'http/http_server_properties_impl_unittest.cc',
'http/http_stream_factory_impl_unittest.cc',
'http/http_stream_parser_unittest.cc',
'http/http_transaction_unittest.cc',
'http/http_transaction_unittest.h',
'http/http_util_unittest.cc',
'http/http_vary_data_unittest.cc',
'http/mock_allow_url_security_manager.cc',
'http/mock_allow_url_security_manager.h',
'http/mock_gssapi_library_posix.cc',
'http/mock_gssapi_library_posix.h',
'http/mock_http_cache.cc',
'http/mock_http_cache.h',
'http/mock_sspi_library_win.cc',
'http/mock_sspi_library_win.h',
'http/transport_security_state_unittest.cc',
'http/url_security_manager_unittest.cc',
'proxy/dhcp_proxy_script_adapter_fetcher_win_unittest.cc',
'proxy/dhcp_proxy_script_fetcher_factory_unittest.cc',
'proxy/dhcp_proxy_script_fetcher_win_unittest.cc',
'proxy/multi_threaded_proxy_resolver_unittest.cc',
'proxy/network_delegate_error_observer_unittest.cc',
'proxy/proxy_bypass_rules_unittest.cc',
'proxy/proxy_config_service_android_unittest.cc',
'proxy/proxy_config_service_linux_unittest.cc',
'proxy/proxy_config_service_win_unittest.cc',
'proxy/proxy_config_unittest.cc',
'proxy/proxy_info_unittest.cc',
'proxy/proxy_list_unittest.cc',
'proxy/proxy_resolver_v8_tracing_unittest.cc',
'proxy/proxy_resolver_v8_unittest.cc',
'proxy/proxy_script_decider_unittest.cc',
'proxy/proxy_script_fetcher_impl_unittest.cc',
'proxy/proxy_server_unittest.cc',
'proxy/proxy_service_unittest.cc',
'quic/blocked_list_test.cc',
'quic/congestion_control/available_channel_estimator_test.cc',
'quic/congestion_control/channel_estimator_test.cc',
'quic/congestion_control/cube_root_test.cc',
'quic/congestion_control/cubic_test.cc',
'quic/congestion_control/fix_rate_test.cc',
'quic/congestion_control/hybrid_slow_start_test.cc',
'quic/congestion_control/inter_arrival_bitrate_ramp_up_test.cc',
'quic/congestion_control/inter_arrival_overuse_detector_test.cc',
'quic/congestion_control/inter_arrival_probe_test.cc',
'quic/congestion_control/inter_arrival_receiver_test.cc',
'quic/congestion_control/inter_arrival_state_machine_test.cc',
'quic/congestion_control/inter_arrival_sender_test.cc',
'quic/congestion_control/leaky_bucket_test.cc',
'quic/congestion_control/paced_sender_test.cc',
'quic/congestion_control/quic_congestion_control_test.cc',
'quic/congestion_control/quic_congestion_manager_test.cc',
'quic/congestion_control/quic_max_sized_map_test.cc',
'quic/congestion_control/tcp_cubic_sender_test.cc',
'quic/congestion_control/tcp_receiver_test.cc',
'quic/crypto/aes_128_gcm_decrypter_test.cc',
'quic/crypto/aes_128_gcm_encrypter_test.cc',
'quic/crypto/crypto_framer_test.cc',
'quic/crypto/crypto_handshake_test.cc',
'quic/crypto/curve25519_key_exchange_test.cc',
'quic/crypto/null_decrypter_test.cc',
'quic/crypto/null_encrypter_test.cc',
'quic/crypto/p256_key_exchange_test.cc',
'quic/crypto/quic_random_test.cc',
'quic/crypto/strike_register_test.cc',
'quic/test_tools/crypto_test_utils.cc',
'quic/test_tools/crypto_test_utils.h',
'quic/test_tools/mock_clock.cc',
'quic/test_tools/mock_clock.h',
'quic/test_tools/mock_crypto_client_stream.cc',
'quic/test_tools/mock_crypto_client_stream.h',
'quic/test_tools/mock_crypto_client_stream_factory.cc',
'quic/test_tools/mock_crypto_client_stream_factory.h',
'quic/test_tools/mock_random.cc',
'quic/test_tools/mock_random.h',
'quic/test_tools/quic_connection_peer.cc',
'quic/test_tools/quic_connection_peer.h',
'quic/test_tools/quic_framer_peer.cc',
'quic/test_tools/quic_framer_peer.h',
'quic/test_tools/quic_packet_creator_peer.cc',
'quic/test_tools/quic_packet_creator_peer.h',
'quic/test_tools/quic_session_peer.cc',
'quic/test_tools/quic_session_peer.h',
'quic/test_tools/quic_test_utils.cc',
'quic/test_tools/quic_test_utils.h',
'quic/test_tools/reliable_quic_stream_peer.cc',
'quic/test_tools/reliable_quic_stream_peer.h',
'quic/test_tools/simple_quic_framer.cc',
'quic/test_tools/simple_quic_framer.h',
'quic/test_tools/test_task_runner.cc',
'quic/test_tools/test_task_runner.h',
'quic/quic_bandwidth_test.cc',
'quic/quic_client_session_test.cc',
'quic/quic_clock_test.cc',
'quic/quic_connection_helper_test.cc',
'quic/quic_connection_test.cc',
'quic/quic_crypto_client_stream_test.cc',
'quic/quic_crypto_server_stream_test.cc',
'quic/quic_crypto_stream_test.cc',
'quic/quic_data_writer_test.cc',
'quic/quic_fec_group_test.cc',
'quic/quic_framer_test.cc',
'quic/quic_http_stream_test.cc',
'quic/quic_network_transaction_unittest.cc',
'quic/quic_packet_creator_test.cc',
'quic/quic_packet_entropy_manager_test.cc',
'quic/quic_packet_generator_test.cc',
'quic/quic_protocol_test.cc',
'quic/quic_reliable_client_stream_test.cc',
'quic/quic_session_test.cc',
'quic/quic_stream_factory_test.cc',
'quic/quic_stream_sequencer_test.cc',
'quic/quic_time_test.cc',
'quic/quic_utils_test.cc',
'quic/reliable_quic_stream_test.cc',
'socket/buffered_write_stream_socket_unittest.cc',
'socket/client_socket_pool_base_unittest.cc',
'socket/deterministic_socket_data_unittest.cc',
'socket/mock_client_socket_pool_manager.cc',
'socket/mock_client_socket_pool_manager.h',
'socket/socks5_client_socket_unittest.cc',
'socket/socks_client_socket_pool_unittest.cc',
'socket/socks_client_socket_unittest.cc',
'socket/ssl_client_socket_openssl_unittest.cc',
'socket/ssl_client_socket_pool_unittest.cc',
'socket/ssl_client_socket_unittest.cc',
'socket/ssl_server_socket_unittest.cc',
'socket/tcp_client_socket_unittest.cc',
'socket/tcp_listen_socket_unittest.cc',
'socket/tcp_listen_socket_unittest.h',
'socket/tcp_server_socket_unittest.cc',
'socket/transport_client_socket_pool_unittest.cc',
'socket/transport_client_socket_unittest.cc',
'socket/unix_domain_socket_posix_unittest.cc',
'socket_stream/socket_stream_metrics_unittest.cc',
'socket_stream/socket_stream_unittest.cc',
'spdy/buffered_spdy_framer_spdy3_unittest.cc',
'spdy/buffered_spdy_framer_spdy2_unittest.cc',
'spdy/spdy_credential_builder_unittest.cc',
'spdy/spdy_credential_state_unittest.cc',
'spdy/spdy_frame_builder_test.cc',
'spdy/spdy_frame_reader_test.cc',
'spdy/spdy_framer_test.cc',
'spdy/spdy_header_block_unittest.cc',
'spdy/spdy_http_stream_spdy3_unittest.cc',
'spdy/spdy_http_stream_spdy2_unittest.cc',
'spdy/spdy_http_utils_unittest.cc',
'spdy/spdy_network_transaction_spdy3_unittest.cc',
'spdy/spdy_network_transaction_spdy2_unittest.cc',
'spdy/spdy_priority_forest_test.cc',
'spdy/spdy_protocol_test.cc',
'spdy/spdy_proxy_client_socket_spdy3_unittest.cc',
'spdy/spdy_proxy_client_socket_spdy2_unittest.cc',
'spdy/spdy_session_spdy3_unittest.cc',
'spdy/spdy_session_spdy2_unittest.cc',
'spdy/spdy_stream_spdy3_unittest.cc',
'spdy/spdy_stream_spdy2_unittest.cc',
'spdy/spdy_stream_test_util.cc',
'spdy/spdy_stream_test_util.h',
'spdy/spdy_test_util_common.cc',
'spdy/spdy_test_util_common.h',
'spdy/spdy_test_util_spdy3.cc',
'spdy/spdy_test_util_spdy3.h',
'spdy/spdy_test_util_spdy2.cc',
'spdy/spdy_test_util_spdy2.h',
'spdy/spdy_test_utils.cc',
'spdy/spdy_test_utils.h',
'spdy/spdy_websocket_stream_spdy2_unittest.cc',
'spdy/spdy_websocket_stream_spdy3_unittest.cc',
'spdy/spdy_websocket_test_util_spdy2.cc',
'spdy/spdy_websocket_test_util_spdy2.h',
'spdy/spdy_websocket_test_util_spdy3.cc',
'spdy/spdy_websocket_test_util_spdy3.h',
'ssl/client_cert_store_impl_unittest.cc',
'ssl/default_server_bound_cert_store_unittest.cc',
'ssl/openssl_client_key_store_unittest.cc',
'ssl/server_bound_cert_service_unittest.cc',
'ssl/ssl_cipher_suite_names_unittest.cc',
'ssl/ssl_client_auth_cache_unittest.cc',
'ssl/ssl_config_service_unittest.cc',
'test/python_utils_unittest.cc',
'test/run_all_unittests.cc',
'test/test_certificate_data.h',
'tools/dump_cache/url_to_filename_encoder.cc',
'tools/dump_cache/url_to_filename_encoder.h',
'tools/dump_cache/url_to_filename_encoder_unittest.cc',
'tools/dump_cache/url_utilities.h',
'tools/dump_cache/url_utilities.cc',
'tools/dump_cache/url_utilities_unittest.cc',
'udp/udp_socket_unittest.cc',
'url_request/url_fetcher_impl_unittest.cc',
'url_request/url_request_context_builder_unittest.cc',
'url_request/url_request_filter_unittest.cc',
'url_request/url_request_ftp_job_unittest.cc',
'url_request/url_request_http_job_unittest.cc',
'url_request/url_request_job_factory_impl_unittest.cc',
'url_request/url_request_job_unittest.cc',
'url_request/url_request_throttler_simulation_unittest.cc',
'url_request/url_request_throttler_test_support.cc',
'url_request/url_request_throttler_test_support.h',
'url_request/url_request_throttler_unittest.cc',
'url_request/url_request_unittest.cc',
'url_request/view_cache_helper_unittest.cc',
'websockets/websocket_errors_unittest.cc',
'websockets/websocket_frame_parser_unittest.cc',
'websockets/websocket_frame_unittest.cc',
'websockets/websocket_handshake_handler_unittest.cc',
'websockets/websocket_handshake_handler_spdy2_unittest.cc',
'websockets/websocket_handshake_handler_spdy3_unittest.cc',
'websockets/websocket_job_spdy2_unittest.cc',
'websockets/websocket_job_spdy3_unittest.cc',
'websockets/websocket_net_log_params_unittest.cc',
'websockets/websocket_throttle_unittest.cc',
],
'conditions': [
['chromeos==1', {
'sources!': [
'base/network_change_notifier_linux_unittest.cc',
'proxy/proxy_config_service_linux_unittest.cc',
],
}],
[ 'OS == "android"', {
'sources!': [
# No res_ninit() et al on Android, so this doesn't make a lot of
# sense.
'dns/dns_config_service_posix_unittest.cc',
'ssl/client_cert_store_impl_unittest.cc',
],
'dependencies': [
'net_javatests',
'net_test_jni_headers',
],
}],
[ 'use_glib == 1', {
'dependencies': [
'../build/linux/system.gyp:ssl',
],
}, { # else use_glib == 0: !posix || mac
'sources!': [
'cert/nss_cert_database_unittest.cc',
],
},
],
[ 'toolkit_uses_gtk == 1', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
},
],
[ 'os_posix == 1 and OS != "mac" and OS != "android" and OS != "ios"', {
'conditions': [
['linux_use_tcmalloc==1', {
'dependencies': [
'../base/allocator/allocator.gyp:allocator',
],
}],
],
}],
[ 'use_kerberos==1', {
'defines': [
'USE_KERBEROS',
],
}, { # use_kerberos == 0
'sources!': [
'http/http_auth_gssapi_posix_unittest.cc',
'http/http_auth_handler_negotiate_unittest.cc',
'http/mock_gssapi_library_posix.cc',
'http/mock_gssapi_library_posix.h',
],
}],
[ 'use_openssl==1', {
# When building for OpenSSL, we need to exclude NSS specific tests.
# TODO(bulach): Add equivalent tests when the underlying
# functionality is ported to OpenSSL.
'sources!': [
'cert/nss_cert_database_unittest.cc',
'cert/x509_util_nss_unittest.cc',
'ssl/client_cert_store_impl_unittest.cc',
],
}, { # else !use_openssl: remove the unneeded files
'sources!': [
'cert/x509_util_openssl_unittest.cc',
'socket/ssl_client_socket_openssl_unittest.cc',
'ssl/openssl_client_key_store_unittest.cc',
],
},
],
[ 'enable_websockets != 1', {
'sources/': [
['exclude', '^socket_stream/'],
['exclude', '^websockets/'],
['exclude', '^spdy/spdy_websocket_stream_spdy._unittest\\.cc$'],
],
}],
[ 'disable_ftp_support==1', {
'sources/': [
['exclude', '^ftp/'],
],
'sources!': [
'url_request/url_request_ftp_job_unittest.cc',
],
},
],
[ 'enable_built_in_dns!=1', {
'sources!': [
'dns/address_sorter_posix_unittest.cc',
'dns/address_sorter_unittest.cc',
],
},
],
[ 'use_v8_in_net==1', {
'dependencies': [
'net_with_v8',
],
}, { # else: !use_v8_in_net
'sources!': [
'proxy/proxy_resolver_v8_unittest.cc',
'proxy/proxy_resolver_v8_tracing_unittest.cc',
],
},
],
[ 'OS == "win"', {
'sources!': [
'dns/dns_config_service_posix_unittest.cc',
'http/http_auth_gssapi_posix_unittest.cc',
],
# This is needed to trigger the dll copy step on windows.
# TODO(mark): Specifying this here shouldn't be necessary.
'dependencies': [
'../third_party/icu/icu.gyp:icudata',
'../third_party/nss/nss.gyp:nspr',
'../third_party/nss/nss.gyp:nss',
'third_party/nss/ssl.gyp:libssl',
],
# TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [4267, ],
},
],
[ 'OS == "mac"', {
'dependencies': [
'../third_party/nss/nss.gyp:nspr',
'../third_party/nss/nss.gyp:nss',
'third_party/nss/ssl.gyp:libssl',
],
},
],
[ 'OS == "ios"', {
'dependencies': [
'../third_party/nss/nss.gyp:nss',
],
'actions': [
{
'action_name': 'copy_test_data',
'variables': {
'test_data_files': [
'data/ssl/certificates/',
'data/url_request_unittest/',
],
'test_data_prefix': 'net',
},
'includes': [ '../build/copy_test_data_ios.gypi' ],
},
],
'sources!': [
# TODO(droger): The following tests are disabled because the
# implementation is missing or incomplete.
# KeygenHandler::GenKeyAndSignChallenge() is not ported to iOS.
'base/keygen_handler_unittest.cc',
# Need to read input data files.
'base/gzip_filter_unittest.cc',
'disk_cache/backend_unittest.cc',
'disk_cache/block_files_unittest.cc',
'socket/ssl_server_socket_unittest.cc',
# Need TestServer.
'proxy/proxy_script_fetcher_impl_unittest.cc',
'socket/ssl_client_socket_unittest.cc',
'ssl/client_cert_store_impl_unittest.cc',
'url_request/url_fetcher_impl_unittest.cc',
'url_request/url_request_context_builder_unittest.cc',
# Needs GetAppOutput().
'test/python_utils_unittest.cc',
# The following tests are disabled because they don't apply to
# iOS.
# OS is not "linux" or "freebsd" or "openbsd".
'socket/unix_domain_socket_posix_unittest.cc',
],
'conditions': [
['coverage != 0', {
'sources!': [
# These sources can't be built with coverage due to a
# toolchain bug: http://openradar.appspot.com/radar?id=1499403
'http/transport_security_state_unittest.cc',
# These tests crash when run with coverage turned on due to an
# issue with llvm_gcda_increment_indirect_counter:
# http://crbug.com/156058
'cookies/cookie_monster_unittest.cc',
'cookies/cookie_store_unittest.h',
'http/http_auth_controller_unittest.cc',
'http/http_network_layer_unittest.cc',
'http/http_network_transaction_spdy2_unittest.cc',
'http/http_network_transaction_spdy3_unittest.cc',
'spdy/spdy_http_stream_spdy2_unittest.cc',
'spdy/spdy_http_stream_spdy3_unittest.cc',
'spdy/spdy_proxy_client_socket_spdy3_unittest.cc',
'spdy/spdy_session_spdy3_unittest.cc',
# These tests crash when run with coverage turned on:
# http://crbug.com/177203
'proxy/proxy_service_unittest.cc',
],
}],
],
}],
[ 'OS == "linux"', {
'dependencies': [
'../build/linux/system.gyp:dbus',
'../dbus/dbus.gyp:dbus_test_support',
],
},
],
[ 'OS == "android"', {
'dependencies': [
'../third_party/openssl/openssl.gyp:openssl',
],
'sources!': [
'dns/dns_config_service_posix_unittest.cc',
],
},
],
['OS == "android" and gtest_target_type == "shared_library"', {
'dependencies': [
'../testing/android/native_test.gyp:native_test_native_code',
]
}],
[ 'OS != "win" and OS != "mac"', {
'sources!': [
'cert/x509_cert_types_unittest.cc',
],
}],
],
},
{
'target_name': 'net_perftests',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
'../base/base.gyp:base_i18n',
'../base/base.gyp:test_support_perf',
'../build/temp_gyp/googleurl.gyp:googleurl',
'../testing/gtest.gyp:gtest',
'net',
'net_test_support',
],
'sources': [
'cookies/cookie_monster_perftest.cc',
'disk_cache/disk_cache_perftest.cc',
'proxy/proxy_resolver_perftest.cc',
],
'conditions': [
[ 'use_v8_in_net==1', {
'dependencies': [
'net_with_v8',
],
}, { # else: !use_v8_in_net
'sources!': [
'proxy/proxy_resolver_perftest.cc',
],
},
],
# This is needed to trigger the dll copy step on windows.
# TODO(mark): Specifying this here shouldn't be necessary.
[ 'OS == "win"', {
'dependencies': [
'../third_party/icu/icu.gyp:icudata',
],
# TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [4267, ],
},
],
],
},
{
'target_name': 'net_test_support',
'type': 'static_library',
'dependencies': [
'../base/base.gyp:base',
'../base/base.gyp:test_support_base',
'../build/temp_gyp/googleurl.gyp:googleurl',
'../testing/gtest.gyp:gtest',
'net',
],
'export_dependent_settings': [
'../base/base.gyp:base',
'../base/base.gyp:test_support_base',
'../testing/gtest.gyp:gtest',
],
'sources': [
'base/capturing_net_log.cc',
'base/capturing_net_log.h',
'base/load_timing_info_test_util.cc',
'base/load_timing_info_test_util.h',
'base/mock_file_stream.cc',
'base/mock_file_stream.h',
'base/test_completion_callback.cc',
'base/test_completion_callback.h',
'base/test_data_directory.cc',
'base/test_data_directory.h',
'cert/mock_cert_verifier.cc',
'cert/mock_cert_verifier.h',
'cookies/cookie_monster_store_test.cc',
'cookies/cookie_monster_store_test.h',
'cookies/cookie_store_test_callbacks.cc',
'cookies/cookie_store_test_callbacks.h',
'cookies/cookie_store_test_helpers.cc',
'cookies/cookie_store_test_helpers.h',
'disk_cache/disk_cache_test_base.cc',
'disk_cache/disk_cache_test_base.h',
'disk_cache/disk_cache_test_util.cc',
'disk_cache/disk_cache_test_util.h',
'disk_cache/flash/flash_cache_test_base.h',
'disk_cache/flash/flash_cache_test_base.cc',
'dns/dns_test_util.cc',
'dns/dns_test_util.h',
'dns/mock_host_resolver.cc',
'dns/mock_host_resolver.h',
'proxy/mock_proxy_resolver.cc',
'proxy/mock_proxy_resolver.h',
'proxy/mock_proxy_script_fetcher.cc',
'proxy/mock_proxy_script_fetcher.h',
'proxy/proxy_config_service_common_unittest.cc',
'proxy/proxy_config_service_common_unittest.h',
'socket/socket_test_util.cc',
'socket/socket_test_util.h',
'test/base_test_server.cc',
'test/base_test_server.h',
'test/cert_test_util.cc',
'test/cert_test_util.h',
'test/local_test_server_posix.cc',
'test/local_test_server_win.cc',
'test/local_test_server.cc',
'test/local_test_server.h',
'test/net_test_suite.cc',
'test/net_test_suite.h',
'test/python_utils.cc',
'test/python_utils.h',
'test/remote_test_server.cc',
'test/remote_test_server.h',
'test/spawner_communicator.cc',
'test/spawner_communicator.h',
'test/test_server.h',
'url_request/test_url_fetcher_factory.cc',
'url_request/test_url_fetcher_factory.h',
'url_request/url_request_test_util.cc',
'url_request/url_request_test_util.h',
],
'conditions': [
['inside_chromium_build==1 and OS != "ios"', {
'dependencies': [
'../third_party/protobuf/protobuf.gyp:py_proto',
],
}],
['os_posix == 1 and OS != "mac" and OS != "android" and OS != "ios"', {
'conditions': [
['use_openssl==1', {
'dependencies': [
'../third_party/openssl/openssl.gyp:openssl',
],
}, {
'dependencies': [
'../build/linux/system.gyp:ssl',
],
}],
],
}],
['os_posix == 1 and OS != "mac" and OS != "android" and OS != "ios"', {
'conditions': [
['linux_use_tcmalloc==1', {
'dependencies': [
'../base/allocator/allocator.gyp:allocator',
],
}],
],
}],
['OS != "android"', {
'sources!': [
'test/remote_test_server.cc',
'test/remote_test_server.h',
'test/spawner_communicator.cc',
'test/spawner_communicator.h',
],
}],
['OS == "ios"', {
'dependencies': [
'../third_party/nss/nss.gyp:nss',
],
}],
[ 'use_v8_in_net==1', {
'dependencies': [
'net_with_v8',
],
},
],
],
# TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [4267, ],
},
{
'target_name': 'net_resources',
'type': 'none',
'variables': {
'grit_out_dir': '<(SHARED_INTERMEDIATE_DIR)/net',
},
'actions': [
{
'action_name': 'net_resources',
'variables': {
'grit_grd_file': 'base/net_resources.grd',
},
'includes': [ '../build/grit_action.gypi' ],
},
],
'includes': [ '../build/grit_target.gypi' ],
},
{
'target_name': 'http_server',
'type': 'static_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'dependencies': [
'../base/base.gyp:base',
'net',
],
'sources': [
'server/http_connection.cc',
'server/http_connection.h',
'server/http_server.cc',
'server/http_server.h',
'server/http_server_request_info.cc',
'server/http_server_request_info.h',
'server/web_socket.cc',
'server/web_socket.h',
],
# TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [4267, ],
},
{
'target_name': 'dump_cache',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
'net',
'net_test_support',
],
'sources': [
'tools/dump_cache/cache_dumper.cc',
'tools/dump_cache/cache_dumper.h',
'tools/dump_cache/dump_cache.cc',
'tools/dump_cache/dump_files.cc',
'tools/dump_cache/dump_files.h',
'tools/dump_cache/simple_cache_dumper.cc',
'tools/dump_cache/simple_cache_dumper.h',
'tools/dump_cache/upgrade_win.cc',
'tools/dump_cache/upgrade_win.h',
'tools/dump_cache/url_to_filename_encoder.cc',
'tools/dump_cache/url_to_filename_encoder.h',
'tools/dump_cache/url_utilities.h',
'tools/dump_cache/url_utilities.cc',
],
# TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [4267, ],
},
],
'conditions': [
['use_v8_in_net == 1', {
'targets': [
{
'target_name': 'net_with_v8',
'type': '<(component)',
'variables': { 'enable_wexit_time_destructors': 1, },
'dependencies': [
'../base/base.gyp:base',
'../build/temp_gyp/googleurl.gyp:googleurl',
'../v8/tools/gyp/v8.gyp:v8',
'net'
],
'defines': [
'NET_IMPLEMENTATION',
],
'sources': [
'proxy/proxy_resolver_v8.cc',
'proxy/proxy_resolver_v8.h',
'proxy/proxy_resolver_v8_tracing.cc',
'proxy/proxy_resolver_v8_tracing.h',
'proxy/proxy_service_v8.cc',
'proxy/proxy_service_v8.h',
],
# TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [4267, ],
},
],
}],
['OS != "ios"', {
'targets': [
# iOS doesn't have the concept of simple executables, these targets
# can't be compiled on the platform.
{
'target_name': 'crash_cache',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
'net',
'net_test_support',
],
'sources': [
'tools/crash_cache/crash_cache.cc',
],
# TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [4267, ],
},
{
'target_name': 'crl_set_dump',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
'net',
],
'sources': [
'tools/crl_set_dump/crl_set_dump.cc',
],
# TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [4267, ],
},
{
'target_name': 'dns_fuzz_stub',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
'net',
],
'sources': [
'tools/dns_fuzz_stub/dns_fuzz_stub.cc',
],
# TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [4267, ],
},
{
'target_name': 'fetch_client',
'type': 'executable',
'variables': { 'enable_wexit_time_destructors': 1, },
'dependencies': [
'../base/base.gyp:base',
'../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
'../build/temp_gyp/googleurl.gyp:googleurl',
'../testing/gtest.gyp:gtest',
'net',
'net_with_v8',
],
'sources': [
'tools/fetch/fetch_client.cc',
],
# TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [4267, ],
},
{
'target_name': 'fetch_server',
'type': 'executable',
'variables': { 'enable_wexit_time_destructors': 1, },
'dependencies': [
'../base/base.gyp:base',
'../build/temp_gyp/googleurl.gyp:googleurl',
'net',
],
'sources': [
'tools/fetch/fetch_server.cc',
'tools/fetch/http_listen_socket.cc',
'tools/fetch/http_listen_socket.h',
'tools/fetch/http_server.cc',
'tools/fetch/http_server.h',
'tools/fetch/http_server_request_info.cc',
'tools/fetch/http_server_request_info.h',
'tools/fetch/http_server_response_info.cc',
'tools/fetch/http_server_response_info.h',
'tools/fetch/http_session.cc',
'tools/fetch/http_session.h',
],
# TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [4267, ],
},
{
'target_name': 'gdig',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
'net',
],
'sources': [
'tools/gdig/file_net_log.cc',
'tools/gdig/gdig.cc',
],
},
{
'target_name': 'get_server_time',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
'../base/base.gyp:base_i18n',
'../build/temp_gyp/googleurl.gyp:googleurl',
'net',
],
'sources': [
'tools/get_server_time/get_server_time.cc',
],
# TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [4267, ],
},
{
'target_name': 'net_watcher',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
'net',
'net_with_v8',
],
'conditions': [
[ 'use_glib == 1', {
'dependencies': [
'../build/linux/system.gyp:gconf',
'../build/linux/system.gyp:gio',
],
},
],
],
'sources': [
'tools/net_watcher/net_watcher.cc',
],
},
{
'target_name': 'run_testserver',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
'../base/base.gyp:test_support_base',
'../testing/gtest.gyp:gtest',
'net_test_support',
],
'sources': [
'tools/testserver/run_testserver.cc',
],
},
{
'target_name': 'stress_cache',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
'net',
'net_test_support',
],
'sources': [
'disk_cache/stress_cache.cc',
],
# TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [4267, ],
},
{
'target_name': 'tld_cleanup',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
'../base/base.gyp:base_i18n',
'../build/temp_gyp/googleurl.gyp:googleurl',
],
'sources': [
'tools/tld_cleanup/tld_cleanup.cc',
],
# TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [4267, ],
},
],
}],
['os_posix == 1 and OS != "mac" and OS != "ios" and OS != "android"', {
'targets': [
{
'target_name': 'flip_balsa_and_epoll_library',
'type': 'static_library',
'dependencies': [
'../base/base.gyp:base',
'net',
],
'sources': [
'tools/flip_server/balsa_enums.h',
'tools/flip_server/balsa_frame.cc',
'tools/flip_server/balsa_frame.h',
'tools/flip_server/balsa_headers.cc',
'tools/flip_server/balsa_headers.h',
'tools/flip_server/balsa_headers_token_utils.cc',
'tools/flip_server/balsa_headers_token_utils.h',
'tools/flip_server/balsa_visitor_interface.h',
'tools/flip_server/constants.h',
'tools/flip_server/epoll_server.cc',
'tools/flip_server/epoll_server.h',
'tools/flip_server/http_message_constants.cc',
'tools/flip_server/http_message_constants.h',
'tools/flip_server/split.h',
'tools/flip_server/split.cc',
],
},
{
'target_name': 'flip_in_mem_edsm_server',
'type': 'executable',
'cflags': [
'-Wno-deprecated',
],
'dependencies': [
'../base/base.gyp:base',
'../third_party/openssl/openssl.gyp:openssl',
'flip_balsa_and_epoll_library',
'net',
],
'sources': [
'tools/dump_cache/url_to_filename_encoder.cc',
'tools/dump_cache/url_to_filename_encoder.h',
'tools/dump_cache/url_utilities.h',
'tools/dump_cache/url_utilities.cc',
'tools/flip_server/acceptor_thread.h',
'tools/flip_server/acceptor_thread.cc',
'tools/flip_server/buffer_interface.h',
'tools/flip_server/create_listener.cc',
'tools/flip_server/create_listener.h',
'tools/flip_server/flip_config.cc',
'tools/flip_server/flip_config.h',
'tools/flip_server/flip_in_mem_edsm_server.cc',
'tools/flip_server/http_interface.cc',
'tools/flip_server/http_interface.h',
'tools/flip_server/loadtime_measurement.h',
'tools/flip_server/mem_cache.h',
'tools/flip_server/mem_cache.cc',
'tools/flip_server/output_ordering.cc',
'tools/flip_server/output_ordering.h',
'tools/flip_server/ring_buffer.cc',
'tools/flip_server/ring_buffer.h',
'tools/flip_server/simple_buffer.cc',
'tools/flip_server/simple_buffer.h',
'tools/flip_server/sm_connection.cc',
'tools/flip_server/sm_connection.h',
'tools/flip_server/sm_interface.h',
'tools/flip_server/spdy_ssl.cc',
'tools/flip_server/spdy_ssl.h',
'tools/flip_server/spdy_interface.cc',
'tools/flip_server/spdy_interface.h',
'tools/flip_server/spdy_util.cc',
'tools/flip_server/spdy_util.h',
'tools/flip_server/streamer_interface.cc',
'tools/flip_server/streamer_interface.h',
'tools/flip_server/string_piece_utils.h',
],
},
{
'target_name': 'quic_library',
'type': 'static_library',
'dependencies': [
'../base/base.gyp:base',
'../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
'../build/temp_gyp/googleurl.gyp:googleurl',
'../third_party/openssl/openssl.gyp:openssl',
'flip_balsa_and_epoll_library',
'net',
],
'sources': [
'tools/quic/quic_client.cc',
'tools/quic/quic_client.h',
'tools/quic/quic_client_session.cc',
'tools/quic/quic_client_session.h',
'tools/quic/quic_dispatcher.h',
'tools/quic/quic_dispatcher.cc',
'tools/quic/quic_epoll_clock.cc',
'tools/quic/quic_epoll_clock.h',
'tools/quic/quic_epoll_connection_helper.cc',
'tools/quic/quic_epoll_connection_helper.h',
'tools/quic/quic_in_memory_cache.cc',
'tools/quic/quic_in_memory_cache.h',
'tools/quic/quic_packet_writer.h',
'tools/quic/quic_reliable_client_stream.cc',
'tools/quic/quic_reliable_client_stream.h',
'tools/quic/quic_reliable_server_stream.cc',
'tools/quic/quic_reliable_server_stream.h',
'tools/quic/quic_server.cc',
'tools/quic/quic_server.h',
'tools/quic/quic_server_session.cc',
'tools/quic/quic_server_session.h',
'tools/quic/quic_socket_utils.cc',
'tools/quic/quic_socket_utils.h',
'tools/quic/quic_spdy_client_stream.cc',
'tools/quic/quic_spdy_client_stream.h',
'tools/quic/quic_spdy_server_stream.cc',
'tools/quic/quic_spdy_server_stream.h',
'tools/quic/quic_time_wait_list_manager.h',
'tools/quic/quic_time_wait_list_manager.cc',
'tools/quic/spdy_utils.cc',
'tools/quic/spdy_utils.h',
],
},
{
'target_name': 'quic_client',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
'../third_party/openssl/openssl.gyp:openssl',
'net',
'quic_library',
],
'sources': [
'tools/quic/quic_client_bin.cc',
],
},
{
'target_name': 'quic_server',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
'../third_party/openssl/openssl.gyp:openssl',
'net',
'quic_library',
],
'sources': [
'tools/quic/quic_server_bin.cc',
],
},
{
'target_name': 'quic_unittests',
'type': '<(gtest_target_type)',
'dependencies': [
'../base/base.gyp:test_support_base',
'../testing/gmock.gyp:gmock',
'../testing/gtest.gyp:gtest',
'net',
'quic_library',
],
'sources': [
'quic/test_tools/quic_session_peer.cc',
'quic/test_tools/quic_session_peer.h',
'quic/test_tools/crypto_test_utils.cc',
'quic/test_tools/crypto_test_utils.h',
'quic/test_tools/mock_clock.cc',
'quic/test_tools/mock_clock.h',
'quic/test_tools/mock_random.cc',
'quic/test_tools/mock_random.h',
'quic/test_tools/simple_quic_framer.cc',
'quic/test_tools/simple_quic_framer.h',
'quic/test_tools/quic_connection_peer.cc',
'quic/test_tools/quic_connection_peer.h',
'quic/test_tools/quic_framer_peer.cc',
'quic/test_tools/quic_framer_peer.h',
'quic/test_tools/quic_session_peer.cc',
'quic/test_tools/quic_session_peer.h',
'quic/test_tools/quic_test_utils.cc',
'quic/test_tools/quic_test_utils.h',
'quic/test_tools/reliable_quic_stream_peer.cc',
'quic/test_tools/reliable_quic_stream_peer.h',
'tools/flip_server/simple_buffer.cc',
'tools/flip_server/simple_buffer.h',
'tools/quic/end_to_end_test.cc',
'tools/quic/quic_client_session_test.cc',
'tools/quic/quic_dispatcher_test.cc',
'tools/quic/quic_epoll_clock_test.cc',
'tools/quic/quic_epoll_connection_helper_test.cc',
'tools/quic/quic_reliable_client_stream_test.cc',
'tools/quic/quic_reliable_server_stream_test.cc',
'tools/quic/test_tools/http_message_test_utils.cc',
'tools/quic/test_tools/http_message_test_utils.h',
'tools/quic/test_tools/mock_epoll_server.cc',
'tools/quic/test_tools/mock_epoll_server.h',
'tools/quic/test_tools/quic_test_client.cc',
'tools/quic/test_tools/quic_test_client.h',
'tools/quic/test_tools/quic_test_utils.cc',
'tools/quic/test_tools/quic_test_utils.h',
'tools/quic/test_tools/run_all_unittests.cc',
],
}
]
}],
['OS=="android"', {
'targets': [
{
'target_name': 'net_jni_headers',
'type': 'none',
'sources': [
'android/java/src/org/chromium/net/AndroidKeyStore.java',
'android/java/src/org/chromium/net/AndroidNetworkLibrary.java',
'android/java/src/org/chromium/net/GURLUtils.java',
'android/java/src/org/chromium/net/NetworkChangeNotifier.java',
'android/java/src/org/chromium/net/ProxyChangeListener.java',
],
'variables': {
'jni_gen_package': 'net',
},
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/net',
],
},
'includes': [ '../build/jni_generator.gypi' ],
},
{
'target_name': 'net_test_jni_headers',
'type': 'none',
'sources': [
'android/javatests/src/org/chromium/net/AndroidKeyStoreTestUtil.java',
],
'variables': {
'jni_gen_package': 'net',
},
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/net',
],
},
'includes': [ '../build/jni_generator.gypi' ],
},
{
'target_name': 'net_java',
'type': 'none',
'variables': {
'java_in_dir': '../net/android/java',
},
'dependencies': [
'../base/base.gyp:base',
'cert_verify_result_android_java',
'certificate_mime_types_java',
'net_errors_java',
'private_key_types_java',
],
'includes': [ '../build/java.gypi' ],
},
{
'target_name': 'net_java_test_support',
'type': 'none',
'variables': {
'java_in_dir': '../net/test/android/javatests',
},
'includes': [ '../build/java.gypi' ],
},
{
'target_name': 'net_javatests',
'type': 'none',
'variables': {
'java_in_dir': '../net/android/javatests',
},
'dependencies': [
'../base/base.gyp:base',
'../base/base.gyp:base_java_test_support',
'net_java',
],
'includes': [ '../build/java.gypi' ],
},
{
'target_name': 'net_errors_java',
'type': 'none',
'sources': [
'android/java/NetError.template',
],
'variables': {
'package_name': 'org/chromium/net',
'template_deps': ['base/net_error_list.h'],
},
'includes': [ '../build/android/java_cpp_template.gypi' ],
},
{
'target_name': 'certificate_mime_types_java',
'type': 'none',
'sources': [
'android/java/CertificateMimeType.template',
],
'variables': {
'package_name': 'org/chromium/net',
'template_deps': ['base/mime_util_certificate_type_list.h'],
},
'includes': [ '../build/android/java_cpp_template.gypi' ],
},
{
'target_name': 'cert_verify_result_android_java',
'type': 'none',
'sources': [
'android/java/CertVerifyResultAndroid.template',
],
'variables': {
'package_name': 'org/chromium/net',
'template_deps': ['android/cert_verify_result_android_list.h'],
},
'includes': [ '../build/android/java_cpp_template.gypi' ],
},
{
'target_name': 'private_key_types_java',
'type': 'none',
'sources': [
'android/java/PrivateKeyType.template',
],
'variables': {
'package_name': 'org/chromium/net',
'template_deps': ['android/private_key_type_list.h'],
},
'includes': [ '../build/android/java_cpp_template.gypi' ],
},
],
}],
# Special target to wrap a gtest_target_type==shared_library
# net_unittests into an android apk for execution.
# See base.gyp for TODO(jrg)s about this strategy.
['OS == "android" and gtest_target_type == "shared_library"', {
'targets': [
{
'target_name': 'net_unittests_apk',
'type': 'none',
'dependencies': [
'net_java',
'net_javatests',
'net_unittests',
],
'variables': {
'test_suite_name': 'net_unittests',
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)net_unittests<(SHARED_LIB_SUFFIX)',
},
'includes': [ '../build/apk_test.gypi' ],
},
],
}],
['test_isolation_mode != "noop"', {
'targets': [
{
'target_name': 'net_unittests_run',
'type': 'none',
'dependencies': [
'net_unittests',
],
'includes': [
'net_unittests.isolate',
],
'actions': [
{
'action_name': 'isolate',
'inputs': [
'net_unittests.isolate',
'<@(isolate_dependency_tracked)',
],
'outputs': [
'<(PRODUCT_DIR)/net_unittests.isolated',
],
'action': [
'python',
'../tools/swarm_client/isolate.py',
'<(test_isolation_mode)',
'--outdir', '<(test_isolation_outdir)',
'--variable', 'PRODUCT_DIR', '<(PRODUCT_DIR)',
'--variable', 'OS', '<(OS)',
'--result', '<@(_outputs)',
'--isolate', 'net_unittests.isolate',
],
},
],
},
],
}],
],
}
| 39.074247 | 106 | 0.589587 |
ed88b978abfea5e45767847b3da18555a2c06112
| 2,268 |
py
|
Python
|
python/clx/analytics/detector.py
|
mdemoret-nv/clx
|
3737706187d8f5720561e10b85cbd638c77b9267
|
[
"Apache-2.0"
] | null | null | null |
python/clx/analytics/detector.py
|
mdemoret-nv/clx
|
3737706187d8f5720561e10b85cbd638c77b9267
|
[
"Apache-2.0"
] | null | null | null |
python/clx/analytics/detector.py
|
mdemoret-nv/clx
|
3737706187d8f5720561e10b85cbd638c77b9267
|
[
"Apache-2.0"
] | null | null | null |
import logging
import torch
import torch.nn as nn
from abc import ABC, abstractmethod
log = logging.getLogger(__name__)
def save_model(self, file_path):
""" This function saves model to given location.
:param file_path: File path to save model.
:type file_path: string
"""
torch.save(self.model, file_path)
def leverage_model(self, model):
"""This function leverages model by setting parallelism parameters.
:param model: Model instance.
:type model: RNNClassifier
"""
self.__model = model
self.__set_parallelism()
self.__set_optimizer()
| 25.772727 | 76 | 0.618166 |
ed89a6742f6b5bd9686166ceba6ffc217d6833e0
| 5,366 |
py
|
Python
|
street_score/project/models.py
|
openplans/streetscore
|
ea27f70399b070d3199f236d99575c5962943d67
|
[
"MIT"
] | 4 |
2017-01-02T22:30:44.000Z
|
2019-10-08T20:50:02.000Z
|
street_score/project/models.py
|
openplans/streetscore
|
ea27f70399b070d3199f236d99575c5962943d67
|
[
"MIT"
] | 3 |
2020-02-11T21:42:47.000Z
|
2021-06-10T17:28:28.000Z
|
street_score/project/models.py
|
openplans/streetscore
|
ea27f70399b070d3199f236d99575c5962943d67
|
[
"MIT"
] | 2 |
2017-03-09T12:24:57.000Z
|
2019-12-20T20:39:24.000Z
|
import math
import random
from django.db import models
def init_questions(self):
"""
Load a set of questions at random.
"""
all_questions = (
Criterion.objects.all()
.annotate(num_ratings=models.Count('ratings'))
)
self.__questions = all_questions
return self.__questions
| 28.391534 | 89 | 0.617965 |
ed8c04e174410b92850aae3e034c73bb05a4abae
| 4,351 |
py
|
Python
|
src/selfdroid/appstorage/crud/AppAdder.py
|
vitlabuda/selfdroid-web-app
|
9eac9ee2c34038de13e179b6afb3d530a086e7b2
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 1 |
2022-03-13T14:57:04.000Z
|
2022-03-13T14:57:04.000Z
|
src/selfdroid/appstorage/crud/AppAdder.py
|
vitlabuda/selfdroid-web-app
|
9eac9ee2c34038de13e179b6afb3d530a086e7b2
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/selfdroid/appstorage/crud/AppAdder.py
|
vitlabuda/selfdroid-web-app
|
9eac9ee2c34038de13e179b6afb3d530a086e7b2
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
# SPDX-License-Identifier: BSD-3-Clause
#
# Copyright (c) 2021 Vt Labuda. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
# following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
# disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import sqlalchemy.exc
from selfdroid.appstorage.AppMetadata import AppMetadata
from selfdroid.appstorage.AppMetadataDBModel import AppMetadataDBModel
from selfdroid.appstorage.AppStorageConsistencyEnsurer import AppStorageConsistencyEnsurer
from selfdroid.appstorage.apk.APKParser import APKParser
from selfdroid.appstorage.apk.ParsedAPK import ParsedAPK
from selfdroid.appstorage.crud.AppAdderException import AppAdderException
from selfdroid.web.WebStatusMessageCollector import WebStatusMessageCollector
from selfdroid import db
| 44.397959 | 234 | 0.75385 |
ed8c508acbabfde1092353a8acaec8aac8951535
| 47 |
py
|
Python
|
library/libvirt_filter.py
|
bkmeneguello/ansible-role-libvirt
|
e7f82077b1fd4c2ec5afa463973ecde599209549
|
[
"MIT"
] | 1 |
2019-02-19T19:41:36.000Z
|
2019-02-19T19:41:36.000Z
|
library/libvirt_filter.py
|
bkmeneguello/ansible-role-libvirt
|
e7f82077b1fd4c2ec5afa463973ecde599209549
|
[
"MIT"
] | null | null | null |
library/libvirt_filter.py
|
bkmeneguello/ansible-role-libvirt
|
e7f82077b1fd4c2ec5afa463973ecde599209549
|
[
"MIT"
] | null | null | null |
# TODO: https://libvirt.org/formatnwfilter.html
| 47 | 47 | 0.787234 |
71ea1c59255a1948249d1ed69284c07777e83df9
| 669 |
py
|
Python
|
estafeta/core/__init__.py
|
Solunest/pyestafeta
|
cd24cea4973f5184f4cc7e72a653de8b22e32f69
|
[
"MIT"
] | null | null | null |
estafeta/core/__init__.py
|
Solunest/pyestafeta
|
cd24cea4973f5184f4cc7e72a653de8b22e32f69
|
[
"MIT"
] | null | null | null |
estafeta/core/__init__.py
|
Solunest/pyestafeta
|
cd24cea4973f5184f4cc7e72a653de8b22e32f69
|
[
"MIT"
] | null | null | null |
from estafeta.core.client import EstafetaClient
user = None
password = None
id = None
account_number = None
production = None
from estafeta.core.error import EstafetaWrongData, EstafetaEmptyField
__url_label__ = [
'https://labelqa.estafeta.com/EstafetaLabel20/services/EstafetaLabelWS?wsdl',
'https://label.estafeta.com/EstafetaLabel20/services/EstafetaLabelWS?wsdl',
]
__url_tracking__ = [
'https://trackingqa.estafeta.com/Service.asmx?wsdl',
'https://tracking.estafeta.com/Service.asmx?wsdl',
]
__url_quote__ = [
'http://frecuenciacotizador.estafeta.com/Service.asmx?wsdl',
'http://frecuenciacotizador.estafeta.com/Service.asmx?wsdl',
]
| 25.730769 | 81 | 0.762332 |
71eac23074999fc487a373bc24fa920adcc09f2f
| 355 |
py
|
Python
|
yunionclient/api/flavors.py
|
tb365/mcclient_python
|
06647e7496b9e2c3aeb5ade1276c81871063159b
|
[
"Apache-2.0"
] | 3 |
2021-09-22T11:34:08.000Z
|
2022-03-13T04:55:17.000Z
|
yunionclient/api/flavors.py
|
xhw20190116/python_yunionsdk
|
eb7c8c08300d38dac204ec4980a775abc9c7083a
|
[
"Apache-2.0"
] | 13 |
2019-06-06T08:25:41.000Z
|
2021-07-16T07:26:10.000Z
|
yunionclient/api/flavors.py
|
xhw20190116/python_yunionsdk
|
eb7c8c08300d38dac204ec4980a775abc9c7083a
|
[
"Apache-2.0"
] | 7 |
2019-03-31T05:43:36.000Z
|
2021-03-04T09:59:05.000Z
|
from yunionclient.common import base
| 35.5 | 78 | 0.659155 |
71eb4d213edb33b0bf4b80a95a7ac788fefb3194
| 892 |
py
|
Python
|
char_map.py
|
rakib313/Bangla-End2End-Speech-Recognition
|
67e776841bf2bb3f108e94d0567dc707497605ff
|
[
"MIT"
] | null | null | null |
char_map.py
|
rakib313/Bangla-End2End-Speech-Recognition
|
67e776841bf2bb3f108e94d0567dc707497605ff
|
[
"MIT"
] | null | null | null |
char_map.py
|
rakib313/Bangla-End2End-Speech-Recognition
|
67e776841bf2bb3f108e94d0567dc707497605ff
|
[
"MIT"
] | null | null | null |
"""
Defines two dictionaries for converting
between text and integer sequences.
"""
char_map_str = """
' 0
<SPACE> 1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
v 70
b 71
s 72
73
2 74
0 75
1 76
4 77
f 78
o 79
t 80
a 81
l 82
w 83
r 84
d 85
c 86
u 87
p 88
n 89
g 90
91
i 92
z 93
m 94
e 95
96
h 97
x 98
3 99
5 100
y 101
9 102
103
j 104
105
8 106
107
k 108
109
"""
# the "blank" character is mapped to 28
char_map = {}
index_map = {}
for line in char_map_str.strip().split('\n'):
ch, index = line.split()
char_map[ch] = int(index)
index_map[int(index)+1] = ch
index_map[2] = ' '
| 7.023622 | 45 | 0.600897 |
71eb86a2745c8b7f4f02262f1b986e3f1a2ff12c
| 238 |
py
|
Python
|
app.py
|
MaggieChege/New_App
|
75a2be55d50cf21305036c875af2120607ac33c0
|
[
"MIT"
] | null | null | null |
app.py
|
MaggieChege/New_App
|
75a2be55d50cf21305036c875af2120607ac33c0
|
[
"MIT"
] | null | null | null |
app.py
|
MaggieChege/New_App
|
75a2be55d50cf21305036c875af2120607ac33c0
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
from flask_restful import Api
# from restful import Api
from resources.Hello import CategoryResource
api_bp = Blueprint('api', __name__)
api = Api(api_bp)
# Route
api.add_resource(CategoryResource, '/Hello')
| 21.636364 | 44 | 0.794118 |
71ebf7fd79d9cbf3e546f3b0a0480b99be5ed04d
| 3,549 |
py
|
Python
|
websockets.py
|
ejojmjn/indiana-phone
|
5d666ac651d3e02291806f24c265564002912e00
|
[
"MIT"
] | null | null | null |
websockets.py
|
ejojmjn/indiana-phone
|
5d666ac651d3e02291806f24c265564002912e00
|
[
"MIT"
] | null | null | null |
websockets.py
|
ejojmjn/indiana-phone
|
5d666ac651d3e02291806f24c265564002912e00
|
[
"MIT"
] | null | null | null |
#from gevent import monkey
#monkey.patch_all()
from flask import Flask, render_template, json
from flask_socketio import SocketIO, emit
from pydbus import SystemBus
from gi.repository import GLib
import threading
import json
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app, async_mode='threading')
#socketio = SocketIO(app)
#Message: (':1.654', '/hfp/org/bluez/hci0/dev_94_65_2D_84_61_99', 'org.ofono.Modem', 'PropertyChanged', ('Powered', False))
#Data: Powered
bus = SystemBus()
if __name__ == '__main__':
t = threading.Thread(target=dbus_monitor)
t.daemon = True
t.start()
socketio.run(app, host='0.0.0.0', port=5001)
| 32.263636 | 151 | 0.641589 |
71ec7e1ab519fe39c3c2b69f2a497fd39095d1ca
| 15,524 |
py
|
Python
|
tests/pytests/test_tags.py
|
wayn111/RediSearch
|
897b2de35988b84851dd8380c614a21ad8da7c0f
|
[
"BSD-3-Clause",
"Ruby",
"Apache-2.0",
"MIT"
] | null | null | null |
tests/pytests/test_tags.py
|
wayn111/RediSearch
|
897b2de35988b84851dd8380c614a21ad8da7c0f
|
[
"BSD-3-Clause",
"Ruby",
"Apache-2.0",
"MIT"
] | null | null | null |
tests/pytests/test_tags.py
|
wayn111/RediSearch
|
897b2de35988b84851dd8380c614a21ad8da7c0f
|
[
"BSD-3-Clause",
"Ruby",
"Apache-2.0",
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from includes import *
from common import *
| 42.184783 | 142 | 0.523448 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.