hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f9b5c8efc58630f49b1fe78511366116237e9554
| 471 |
py
|
Python
|
tests/conditions/operators/test_set_membership_operator.py
|
rusintez/flipper-client
|
cd00ae1a3582c5cb7e661c5aa9b8a7b65b35a9e0
|
[
"Apache-2.0"
] | 82 |
2019-04-03T16:09:04.000Z
|
2022-03-29T23:48:31.000Z
|
tests/conditions/operators/test_set_membership_operator.py
|
rusintez/flipper-client
|
cd00ae1a3582c5cb7e661c5aa9b8a7b65b35a9e0
|
[
"Apache-2.0"
] | 17 |
2019-04-16T17:17:36.000Z
|
2021-02-25T22:06:01.000Z
|
tests/conditions/operators/test_set_membership_operator.py
|
rusintez/flipper-client
|
cd00ae1a3582c5cb7e661c5aa9b8a7b65b35a9e0
|
[
"Apache-2.0"
] | 12 |
2019-07-29T20:07:28.000Z
|
2022-03-29T21:10:15.000Z
|
import unittest
from flipper.conditions.operators.set_membership_operator import SetMembershipOperator
class TestCompare(unittest.TestCase):
def test_returns_true_when_expected_is_in_actual(self):
operator = SetMembershipOperator()
self.assertTrue(operator.compare(1, [1, 2, 3]))
def test_returns_false_when_expected_is_not_in_actual(self):
operator = SetMembershipOperator()
self.assertFalse(operator.compare(1, [2, 3, 4]))
| 29.4375 | 86 | 0.760085 | 364 | 0.772824 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
f9b86bf8f49332b5a6eedda3d7d77088bf890eb2
| 19,801 |
py
|
Python
|
pangtreebuild/pangenome/builders/dagmaf2poagraph.py
|
meoke/PangTreeBuild
|
7cafb76df32c559a76ed1d269699dc0e52313312
|
[
"MIT"
] | 2 |
2019-09-04T20:01:28.000Z
|
2019-12-23T22:41:57.000Z
|
pangtreebuild/pangenome/builders/dagmaf2poagraph.py
|
meoke/PangTreeBuild
|
7cafb76df32c559a76ed1d269699dc0e52313312
|
[
"MIT"
] | 2 |
2019-08-10T16:18:01.000Z
|
2019-10-28T21:40:23.000Z
|
pangtreebuild/pangenome/builders/dagmaf2poagraph.py
|
meoke/PangTreeBuild
|
7cafb76df32c559a76ed1d269699dc0e52313312
|
[
"MIT"
] | 2 |
2020-04-23T23:57:52.000Z
|
2020-07-12T17:09:02.000Z
|
from collections import namedtuple
from typing import Tuple, List, NewType, Optional, Dict
from pangtreebuild.mafgraph.graph import Block
from pangtreebuild.mafgraph.graph.Arc import Arc
from pangtreebuild.mafgraph.mafreader import start_position
from pangtreebuild.pangenome import graph
from pangtreebuild.pangenome import DAGMaf
from pangtreebuild.pangenome.parameters import missings
from pangtreebuild.pangenome.parameters import msa
from pangtreebuild.tools import logprocess
global_logger = logprocess.get_global_logger()
detailed_logger = logprocess.get_logger("details")
class PoagraphBuildException(Exception):
"""Any exception connected with building poagraph."""
pass
MafSequenceID = NewType('MafSequenceID', str)
SequenceInfo = namedtuple('SequenceInfo', ['block_id',
'start',
'strand',
'size',
'srcSize',
'orient'])
Edge = namedtuple('Edge', ['seq_id',
'from_block_id',
'to_block_id',
'last_node_id'])
class _BuildState:
def __init__(self,
initial_nodes: List[graph.Node],
initial_sequences: Dict[msa.SequenceID, graph.Sequence],
initial_edges: Dict[msa.SequenceID, List[Edge]],
seqs_info: Dict[msa.SequenceID, List[SequenceInfo]],
initial_column_id: graph.ColumnID,
fasta_provider: missings.FastaProvider):
self.nodes: List[graph.Node] = initial_nodes
self.sequences: Dict[msa.SequenceID, graph.Sequence] = initial_sequences
self.free_edges: Dict[msa.SequenceID, List[Edge]] = initial_edges
self.seqs_info: Dict[msa.SequenceID, List[SequenceInfo]] = seqs_info
self.column_id: graph.ColumnID = initial_column_id
self.fasta_provider: missings.FastaProvider = fasta_provider
def get_poagraph(dagmaf: DAGMaf.DAGMaf,
fasta_provider: missings.FastaProvider,
metadata: Optional[msa.MetadataCSV]) -> \
Tuple[List[graph.Node], Dict[msa.SequenceID, graph.Sequence]]:
"""Gets poagraph from given dagmaf using fasta_provider and metadata.
Args:
dagmaf: DagMaf that will be converted to Poagraph.
fasta_provider: Provider of symbols missing in DagMaf.
metadata: MetadataCSV.
Returns:
Tuple of poagraph elements.
"""
sequences_in_dagmaf = _get_sequences_ids(dagmaf)
build_state = _BuildState(initial_nodes=[],
initial_sequences=_init_sequences(sequences_in_dagmaf, metadata),
initial_edges=_init_free_edges(sequences_in_dagmaf),
seqs_info=_get_seqs_info(dagmaf, sequences_in_dagmaf),
initial_column_id=graph.ColumnID(-1),
fasta_provider=fasta_provider)
_complement_starting_nodes(build_state)
for i, mafnode in enumerate(dagmaf.dagmaf_nodes):
_process_block(build_state, mafnode)
return build_state.nodes, build_state.sequences
def _get_sequences_ids(dagmaf: DAGMaf.DAGMaf) -> List[msa.SequenceID]:
return list({msa.SequenceID(seq.id)
for block in dagmaf.dagmaf_nodes
for seq in block.alignment})
def _init_sequences(sequences_in_dagmaf: List[msa.SequenceID],
metadata: Optional[msa.MetadataCSV]) -> \
Dict[msa.SequenceID, graph.Sequence]:
metadata_sequences_ids = metadata.get_all_sequences_ids() if metadata else []
initial_sequences = {seq_id: graph.Sequence(seqid=seq_id,
paths=[],
seqmetadata=metadata.get_sequence_metadata(seq_id)
if metadata else {})
for seq_id in set(sequences_in_dagmaf + metadata_sequences_ids)}
return initial_sequences
def _init_free_edges(maf_sequences_ids: List[msa.SequenceID]) -> \
Dict[msa.SequenceID, List[Edge]]:
return {seq_id: [] for seq_id in maf_sequences_ids}
def _get_seqs_info(dagmaf: DAGMaf.DAGMaf,
sequences_in_dagmaf: List[msa.SequenceID]) -> \
Dict[msa.SequenceID, List[SequenceInfo]]:
seqs_info = {seq_id: [] for seq_id in sequences_in_dagmaf}
for n in dagmaf.dagmaf_nodes:
for seq in n.alignment:
seqs_info[msa.SequenceID(seq.id)].append(SequenceInfo(block_id=graph.BlockID(n.id),
start=start_position(seq),
strand=seq.annotations["strand"],
size=seq.annotations["size"],
srcSize=seq.annotations["srcSize"],
orient=n.orient))
absents_sequences: List[msa.SequenceID] = []
for seq_id, seq_info_list in seqs_info.items():
if seq_info_list:
seqs_info[seq_id] = sorted(seq_info_list, key=lambda si: si.start)
else:
absents_sequences.append(seq_id)
for seq_id in absents_sequences:
del seqs_info[seq_id]
return seqs_info
def _complement_starting_nodes(build_state: _BuildState) -> None:
for seq_id, seq_info_list in build_state.seqs_info.items():
first_block_sinfo = seq_info_list[0]
if first_block_sinfo.start != 0:
_complement_sequence_starting_nodes(build_state,
seq_id,
first_block_sinfo)
def _complement_sequence_starting_nodes(build_state: _BuildState,
seq_id: msa.SequenceID,
first_block_sinfo: SequenceInfo) -> \
None:
current_node_id: graph.NodeID = _get_max_node_id(build_state.nodes)
column_id = -first_block_sinfo.start
join_with = None
for i in range(first_block_sinfo.start):
current_node_id += 1
missing_nucleotide = _get_missing_nucleotide(build_state.fasta_provider, seq_id, i)
build_state.nodes += [graph.Node(node_id=current_node_id,
base=missing_nucleotide,
column_id=column_id)]
_add_node_to_sequence(build_state,
seq_id=seq_id,
join_with=join_with,
node_id=current_node_id)
join_with = current_node_id
column_id += 1
build_state.free_edges[seq_id] += [Edge(seq_id=seq_id,
from_block_id=None,
to_block_id=first_block_sinfo.block_id,
last_node_id=current_node_id)]
def _get_max_node_id(nodes: List[graph.Node]) -> graph.NodeID:
return graph.NodeID(len(nodes) - 1)
def _get_missing_nucleotide(fasta_provider, seq_id: msa.SequenceID, i: int) -> graph.Base:
return fasta_provider.get_base(seq_id, i)
def _add_node_to_sequence(build_state: _BuildState,
seq_id: msa.SequenceID,
join_with: graph.NodeID,
node_id: graph.NodeID) -> None:
if len(build_state.sequences[seq_id].paths) == 0 or join_with is None:
build_state.sequences[seq_id].paths.append(graph.SeqPath([node_id]))
else:
for path in build_state.sequences[seq_id].paths:
if path[-1] == join_with:
path.append(node_id)
return
raise PoagraphBuildException("No path with specified last node id.")
def _process_block(build_state: _BuildState, block: DAGMaf.DAGMafNode):
current_node_id = _get_max_node_id(build_state.nodes)
block_width = len(block.alignment[0].seq)
paths_join_info = _get_paths_join_info(block, build_state.free_edges)
build_state.column_id = _get_max_column_id(build_state.nodes)
for col in range(block_width):
build_state.column_id += 1
sequence_name_to_nucleotide = {MafSequenceID(seq.id): seq[col]
for seq in block.alignment}
nodes_codes = _get_column_nucleotides_sorted_codes(sequence_name_to_nucleotide)
column_nodes_ids = [current_node_id + i + 1 for i, _ in enumerate(nodes_codes)]
for i, nucl in enumerate(nodes_codes):
current_node_id += 1
maf_seqs_id = [seq_id for seq_id, n in sequence_name_to_nucleotide.items() if n == nucl]
build_state.nodes += [graph.Node(node_id=current_node_id,
base=graph.Base(nucl),
aligned_to=_get_next_aligned_node_id(i, column_nodes_ids),
column_id=build_state.column_id,
block_id=block.id)]
for maf_seq_id in maf_seqs_id:
seq_id = msa.SequenceID(maf_seq_id)
_add_node_to_sequence(build_state, seq_id, paths_join_info[seq_id], current_node_id)
paths_join_info[seq_id] = current_node_id
_add_block_out_edges_to_free_edges(build_state, block, paths_join_info)
_manage_endings(build_state, block, paths_join_info)
def _get_paths_join_info(block: Block,
free_edges: Dict[msa.SequenceID, List[Edge]]) -> \
Dict[msa.SequenceID, Optional[graph.NodeID]]:
paths_join_info: Dict[msa.SequenceID, Optional[graph.NodeID]] = dict()
for seq in block.alignment:
seq_id = msa.SequenceID(seq.id)
paths_join_info[seq_id] = None
for i, edge in enumerate(free_edges[seq_id]):
if edge.to_block_id == block.id:
paths_join_info[seq_id] = edge.last_node_id
return paths_join_info
def _get_max_column_id(nodes: List[graph.Node]) -> graph.ColumnID:
current_columns_ids = [node.column_id for node in nodes]
return max(current_columns_ids) if current_columns_ids \
else graph.ColumnID(-1)
def _get_column_nucleotides_sorted_codes(seq_to_nucl: Dict[msa.SequenceID, str]) -> \
List[str]:
return sorted(
set(
[nucleotide
for nucleotide
in seq_to_nucl.values()
if nucleotide != '-']))
def _get_next_aligned_node_id(current_column_i, column_nodes_ids) -> \
Optional[graph.NodeID]:
if len(column_nodes_ids) > 1:
return column_nodes_ids[(current_column_i + 1) % len(column_nodes_ids)]
return None
def _add_block_out_edges_to_free_edges(build_state: _BuildState,
block: Block,
join_info: Dict[msa.SequenceID, graph.NodeID]):
for edge in block.out_edges:
_ = _get_correct_edge_type(edge)
for seq in edge.sequences:
seq_id = msa.SequenceID(seq[0].seq_id)
last_node_id = _complement_sequence_middles_if_needed(build_state=build_state,
block=block,
edge=edge,
seq=seq,
last_node_id=join_info[seq_id])
if last_node_id is not None:
build_state.free_edges[seq_id].append(Edge(seq_id=seq_id,
from_block_id=block.id,
to_block_id=edge.to,
last_node_id=last_node_id))
def _get_correct_edge_type(edge: Arc) -> Tuple[int, int]:
return edge.edge_type
def _complement_sequence_middles_if_needed(build_state: _BuildState,
block: Block,
edge: Arc,
seq,
last_node_id: graph.NodeID):
seq_id = msa.SequenceID(seq[0].seq_id)
left_block_sinfo, right_block_sinfo = _get_edge_sinfos(seqs_info=build_state.seqs_info,
from_block_id=block.id,
edge=edge,
seq_id=seq_id)
if _complementation_not_needed(left_block_sinfo, right_block_sinfo):
if edge.edge_type == (1, -1):
return last_node_id
else:
return None
else:
current_node_id = _get_max_node_id(build_state.nodes)
column_id = build_state.column_id
if left_block_sinfo.start < right_block_sinfo.start:
last_pos = left_block_sinfo.start + left_block_sinfo.size - 1
next_pos = right_block_sinfo.start
else:
last_pos = right_block_sinfo.start + right_block_sinfo.size - 1
next_pos = left_block_sinfo.start
join_with = last_node_id if _should_join_with_last_node(edge.edge_type) else None
for i in range(last_pos + 1, next_pos):
column_id += 1
current_node_id += 1
missing_nucleotide = _get_missing_nucleotide(build_state.fasta_provider, seq_id, i)
build_state.nodes += [graph.Node(node_id=current_node_id,
base=missing_nucleotide,
aligned_to=None,
column_id=column_id,
block_id=None)]
_add_node_to_sequence(build_state,
seq_id=seq_id,
join_with=join_with,
node_id=current_node_id)
join_with = current_node_id
if _should_join_with_next_node(edge.edge_type):
return current_node_id
else:
return None
def _get_edge_sinfos(seqs_info: Dict[msa.SequenceID, List[SequenceInfo]],
from_block_id: graph.BlockID,
edge: Arc,
seq_id: msa.SequenceID) -> \
Tuple[SequenceInfo, SequenceInfo]:
left_seq_info, right_seq_info = None, None
for sinfo in seqs_info[seq_id]:
if sinfo.block_id == from_block_id:
left_seq_info = sinfo
if sinfo.block_id == edge.to:
right_seq_info = sinfo
if left_seq_info is None or right_seq_info is None:
raise PoagraphBuildException(f"""SequenceInfos for edge cannot be None.
Left block is {left_seq_info},
right block is {right_seq_info}.""")
return left_seq_info, right_seq_info
def _complementation_not_needed(left: SequenceInfo, right: SequenceInfo) -> \
bool:
return left.start + left.size == right.start or \
right.start + right.size == left.start
def _should_join_with_last_node(edge_type: Tuple[int, int]) -> bool:
if edge_type == (1, 1) or edge_type == (1, -1):
return True
elif edge_type == (-1, 1) or edge_type == (-1, -1):
return False
else:
raise PoagraphBuildException("""Incorrect edge type.
Cannot decide if sequence should be joined
with complemented nucleotides.""")
def _should_join_with_next_node(edge_type: Tuple[int, int]) -> bool:
if edge_type == (-1, 1) or edge_type == (1, -1) or edge_type == (-1, -1):
return True
elif edge_type == (1, 1):
return False
else:
raise PoagraphBuildException("""Incorrect edge type. Cannot decide if
complemented nucleotides must be joined
with next block.""")
def _manage_endings(build_state: _BuildState,
block: Block,
join_info: Dict[msa.SequenceID, graph.NodeID]):
sequences_ending_in_this_block = _get_ending_sequences(build_state.seqs_info, block)
for seq_id in sequences_ending_in_this_block:
block_sinfo: SequenceInfo = _get_sinfo(build_state.seqs_info[seq_id], block.id)
if _sequence_not_complete(block_sinfo):
last_node_id = _complement_sequence_middle_nodes(build_state,
seq_id=seq_id,
last_pos=block_sinfo.start + block_sinfo.size-1,
next_pos=block_sinfo.srcSize,
last_node_id=join_info[seq_id])
else:
last_node_id = join_info[seq_id]
build_state.free_edges[seq_id].append(Edge(seq_id=seq_id,
from_block_id=block.id,
to_block_id=None,
last_node_id=last_node_id))
def _get_ending_sequences(seqs_info: Dict[msa.SequenceID, List[SequenceInfo]], block: Block) -> List[msa.SequenceID]:
sequences_ending_in_this_block = []
for seq_id, sinfo_list in seqs_info.items():
last_block_sinfo = sinfo_list[-1]
if last_block_sinfo.block_id == block.id:
sequences_ending_in_this_block.append(seq_id)
return sequences_ending_in_this_block
def _get_sinfo(seq_info: List[SequenceInfo], block_id: int) -> SequenceInfo:
for sinfo in seq_info:
if sinfo.block_id == block_id:
return sinfo
raise PoagraphBuildException(f"No sequences info for given block")
def _sequence_not_complete(last_block_sinfo: SequenceInfo) -> bool:
if last_block_sinfo.strand == 1:
return last_block_sinfo.start + last_block_sinfo.size != last_block_sinfo.srcSize
elif last_block_sinfo.strand == -1:
return last_block_sinfo.start != 0
else:
raise Exception("Unexpected strand value")
def _complement_sequence_middle_nodes(build_state: _BuildState,
seq_id: msa.SequenceID,
last_pos,
next_pos,
last_node_id: graph.NodeID) -> \
graph.NodeID:
current_node_id = _get_max_node_id(build_state.nodes)
column_id = build_state.column_id
join_with = last_node_id
for i in range(last_pos+1, next_pos):
column_id += 1
current_node_id += 1
missing_nucleotide = _get_missing_nucleotide(build_state.fasta_provider, seq_id, i)
build_state.nodes += [graph.Node(node_id=current_node_id,
base=missing_nucleotide,
aligned_to=None,
column_id=column_id,
block_id=None)
]
_add_node_to_sequence(build_state,
seq_id=seq_id,
join_with=join_with,
node_id=current_node_id)
join_with = current_node_id
return current_node_id
| 44.596847 | 117 | 0.56982 | 932 | 0.047068 | 0 | 0 | 0 | 0 | 0 | 0 | 1,148 | 0.057977 |
f9b8816aef7a829b307e2a9d13b933fbcd80a3a1
| 353 |
py
|
Python
|
tests/utils/test_gzipgen.py
|
takanabe/cli
|
7cbf781d7f286fc4e52ef0980712c28f386e8d09
|
[
"Apache-2.0"
] | 19 |
2021-01-07T06:41:51.000Z
|
2022-03-05T08:23:57.000Z
|
tests/utils/test_gzipgen.py
|
takanabe/cli
|
7cbf781d7f286fc4e52ef0980712c28f386e8d09
|
[
"Apache-2.0"
] | 189 |
2020-11-04T15:38:47.000Z
|
2022-03-31T05:02:06.000Z
|
tests/utils/test_gzipgen.py
|
takanabe/cli
|
7cbf781d7f286fc4e52ef0980712c28f386e8d09
|
[
"Apache-2.0"
] | 7 |
2020-11-20T16:55:51.000Z
|
2022-02-01T11:17:30.000Z
|
from launchable.utils.gzipgen import compress
import gzip
from unittest import TestCase
class GzippenTest(TestCase):
def test_compress(self):
"""Basic sanity test of """
encoded = b''.join(compress([b'Hello', b' ', b'world']))
msg = gzip.decompress(encoded)
print(msg)
self.assertEqual(msg, b'Hello world')
| 27.153846 | 64 | 0.654391 | 262 | 0.74221 | 0 | 0 | 0 | 0 | 0 | 0 | 64 | 0.181303 |
f9ba31abc310c48d834cebff8aeef25ad2407242
| 138 |
py
|
Python
|
project_data_app/urls.py
|
chrisba11/kickstarter_projects
|
08a341d01428d20c384546773a853fc7d99a6140
|
[
"MIT"
] | null | null | null |
project_data_app/urls.py
|
chrisba11/kickstarter_projects
|
08a341d01428d20c384546773a853fc7d99a6140
|
[
"MIT"
] | null | null | null |
project_data_app/urls.py
|
chrisba11/kickstarter_projects
|
08a341d01428d20c384546773a853fc7d99a6140
|
[
"MIT"
] | null | null | null |
from django.urls import path
from .views import project_list_view
urlpatterns = [
path('', project_list_view, name='project_list'),
]
| 23 | 53 | 0.753623 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 16 | 0.115942 |
f9bd66223ae9916274f6536d0f5a42a245f694b0
| 847 |
py
|
Python
|
tech/CCI.py
|
A1eXFei/StockMarket3
|
01000d0731395868c4477982c6ed6c632c9a7427
|
[
"Apache-2.0"
] | null | null | null |
tech/CCI.py
|
A1eXFei/StockMarket3
|
01000d0731395868c4477982c6ed6c632c9a7427
|
[
"Apache-2.0"
] | null | null | null |
tech/CCI.py
|
A1eXFei/StockMarket3
|
01000d0731395868c4477982c6ed6c632c9a7427
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: UTF-8 -*-
import talib as ta
import numpy as np
from util import StockUtil as su
from tech import StockTechIndicator
class CCI(StockTechIndicator):
def __init__(self):
StockTechIndicator.__init__(self)
def calculate(self, stock_code, date, time_period=14):
cci = 0.0
data = su.get_basic_data(stock_code, date, time_period + 1).sort_index(ascending=False)
if data.shape[0] >= time_period + 1:
cci = round(ta.CCI(data['HIGH'].as_matrix(), data['LOW'].as_matrix(), data['CLOSE'].as_matrix(), time_period)[-1], 3)
if np.isnan(cci) or np.isinf(cci) or np.isneginf(cci):
cci = 0.0
self.save_tech_data(stock_code, date, {'CCI':cci})
return cci
if __name__ == "__main__":
b = CCI()
print b.calculate('chbtc', 'btc_cny', '5min', 1497449100)
| 32.576923 | 129 | 0.641086 | 609 | 0.719008 | 0 | 0 | 0 | 0 | 0 | 0 | 78 | 0.09209 |
f9beae7adf6d3836aaf0918cb0eb1c2fed9c5001
| 938 |
py
|
Python
|
timeit/timeit_np_ufunc.py
|
Dennis-van-Gils/DvG_Arduino_lock-in_amp
|
4576aa529413c93c6e4d6152802349f5f0c3ee16
|
[
"MIT"
] | 9 |
2020-07-02T15:23:18.000Z
|
2022-03-04T03:32:04.000Z
|
timeit/timeit_np_ufunc.py
|
Dennis-van-Gils/DvG_Arduino_lock-in_amp
|
4576aa529413c93c6e4d6152802349f5f0c3ee16
|
[
"MIT"
] | 3 |
2019-04-22T22:53:56.000Z
|
2021-11-02T20:13:43.000Z
|
timeit/timeit_np_ufunc.py
|
Dennis-van-Gils/DvG_Arduino_lock-in_amp
|
4576aa529413c93c6e4d6152802349f5f0c3ee16
|
[
"MIT"
] | 2 |
2021-11-05T14:34:22.000Z
|
2022-03-04T03:32:06.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Thu Dec 6 21:22:33 2018
@author: vangi
"""
from timeit import timeit
setup = '''
import numpy as np
np.random.seed(0)
N_array = 200000
a_np = np.random.randn(N_array)
b_np = np.random.randn(N_array)
c_np = np.empty(N_array)
def no_ufunc(a_np, b_np, c_np):
c_np = a_np * b_np
#print(c_np[0])
def ufunc_buffered(a_np, b_np, c_np):
c_np = np.multiply(a_np, b_np)
#print(c_np[0])
def ufunc_unbuffered(a_np, b_np, c_np):
np.multiply(a_np, b_np, out=c_np)
#print(c_np[0])
'''
N = 1000
print("Numpy multiply strategies")
print("no ufunc : %.3f ms" %
(timeit('no_ufunc(a_np, b_np, c_np)', setup=setup, number=N)/N*1000))
print("ufunc buf : %.3f ms" %
(timeit('ufunc_buffered(a_np, b_np, c_np)', setup=setup, number=N)/N*1000))
print("ufunc no buf: %.3f ms" %
(timeit('ufunc_unbuffered(a_np, b_np, c_np)', setup=setup, number=N)/N*1000))
| 22.333333 | 83 | 0.635394 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 705 | 0.751599 |
f9bff3fda8ac0169c8ed57237e81f02a4608ca3c
| 2,451 |
py
|
Python
|
conductor/conductor/api/controllers/validator.py
|
aalsudais/optf-has
|
c3e070b6ebc713a571c10d7a5cd87e5053047136
|
[
"Apache-2.0"
] | 4 |
2019-02-14T19:18:09.000Z
|
2019-10-21T17:17:59.000Z
|
conductor/conductor/api/controllers/validator.py
|
aalsudais/optf-has
|
c3e070b6ebc713a571c10d7a5cd87e5053047136
|
[
"Apache-2.0"
] | null | null | null |
conductor/conductor/api/controllers/validator.py
|
aalsudais/optf-has
|
c3e070b6ebc713a571c10d7a5cd87e5053047136
|
[
"Apache-2.0"
] | 4 |
2019-05-09T07:05:54.000Z
|
2020-11-20T05:56:47.000Z
|
#
# -------------------------------------------------------------------------
# Copyright (c) 2015-2017 AT&T Intellectual Property
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -------------------------------------------------------------------------
#
from yaml.constructor import ConstructorError
from yaml.nodes import MappingNode
try:
from yaml import CLoader as Loader
except ImportError:
from yaml import Loader
class UniqueKeyLoader(Loader):
"""Unique Key Loader for PyYAML
Ensures no duplicate keys on any given level.
https://gist.github.com/pypt/94d747fe5180851196eb#gistcomment-2084028
"""
DUPLICATE_KEY_PROBLEM_MARK = "found duplicate key"
def construct_mapping(self, node, deep=False):
"""Check for duplicate keys while constructing a mapping."""
if not isinstance(node, MappingNode):
raise ConstructorError(
None, None, "expected a mapping node, but found %s" % node.id,
node.start_mark)
mapping = {}
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
try:
hash(key)
except (TypeError) as exc:
raise ConstructorError("while constructing a mapping",
node.start_mark,
"found unacceptable key (%s)" % exc,
key_node.start_mark)
# check for duplicate keys
if key in mapping:
raise ConstructorError("while constructing a mapping",
node.start_mark,
self.DUPLICATE_KEY_PROBLEM_MARK,
key_node.start_mark)
value = self.construct_object(value_node, deep=deep)
mapping[key] = value
return mapping
| 38.296875 | 78 | 0.572011 | 1,496 | 0.610363 | 0 | 0 | 0 | 0 | 0 | 0 | 1,159 | 0.472868 |
f9c217fdeaa845a8b5a88bdf4815b175a49ccae7
| 32,310 |
py
|
Python
|
gamejoltapi.py
|
bgempire/gamejoltapi
|
03a77527e00a67e5990dbc1289c54e280954b712
|
[
"MIT"
] | 1 |
2022-01-18T12:08:58.000Z
|
2022-01-18T12:08:58.000Z
|
gamejoltapi.py
|
bgempire/gamejoltapi
|
03a77527e00a67e5990dbc1289c54e280954b712
|
[
"MIT"
] | null | null | null |
gamejoltapi.py
|
bgempire/gamejoltapi
|
03a77527e00a67e5990dbc1289c54e280954b712
|
[
"MIT"
] | 1 |
2021-04-30T11:27:39.000Z
|
2021-04-30T11:27:39.000Z
|
from urllib.parse import urlencode as _urlencode, quote as _quote
from urllib.request import urlopen as _urlopen
from hashlib import md5 as _md5
from ast import literal_eval as _literal_eval
from collections import OrderedDict as _OrderedDict
_DEBUG = False
class GameJoltDataRequired(Exception):
""" Exception raised when not all required data is provided in the request call.
:param key: The data field name which is required.
:type key: str
"""
def __init__(self, key):
self.key = key
self.message = "Value is required, cannot be None: " + repr(key)
super().__init__(self.message)
class GameJoltDataCollision(Exception):
""" Exception raised when a value cannot be provided along with another.
:param keys: The data field names which collided.
:type keys: list
"""
def __init__(self, keys):
self.keys = keys
self.message = "Values cannot be used together: " + ", ".join([repr(k) for k in self.keys])
super().__init__(self.message)
class GameJoltAPI:
""" The main Game Jolt API class. Aside from the required arguments, most of the
optional arguments are provided to avoid asking for them in every single method.
:param gameId: The game ID. Required in all requests.
:type gameId: int
:param privateKey: The API private key. Required in all requests.
:type privateKey: str
:param username: Username used in some requests. Optional.
:type username: str
:param userToken: User access token used in some requests. Optional.
:type userToken: str
:param responseFormat: The response format of the requests. Can be ``"json"``, ``"xml"``, ``"keypair"`` or ``"dump"``. Optional, defaults to ``"json"``.
:type responseFormat: str
:param submitRequests: If submit the requests or just get the generated URLs from the method calls. Useful to generate URLs for batch requests. Optional, defaults to ``True``.
:type submitRequests: bool
.. py:attribute:: gameId
:type: int
The game ID. Required in all requests.
.. py:attribute:: privateKey
:type: str
The API private key. Required in all requests.
.. py:attribute:: username
:type: str
Username used in some requests. Optional.
.. py:attribute:: userToken
:type: str
User access token used in some requests. Optional.
.. py:attribute:: responseFormat
:type: str
The response format of the requests. Can be ``"json"``, ``"xml"``, ``"keypair"`` or ``"dump"``. Optional, defaults to ``"json"``.
.. py:attribute:: submitRequests
:type: bool
If submit the requests or just get the generated URLs from the method calls. Useful to generate URLs for batch requests. Optional, defaults to ``True``."""
def __init__(self, gameId, privateKey, username=None, userToken=None, responseFormat="json", submitRequests=True):
self.__API_URL = "https://api.gamejolt.com/api/game/v1_2"
self.__RETURN_FORMATS = ["json", "keypair", "dump", "xml"]
self.gameId = str(gameId)
self.privateKey = privateKey
self.username = username
self.userToken = userToken
self.responseFormat = responseFormat if responseFormat in self.__RETURN_FORMATS else "json"
self.submitRequests = submitRequests
self.operations = {
"users/fetch" : self.__API_URL + "/users/" + "?",
"users/auth" : self.__API_URL + "/users/auth/" + "?",
"sessions/open" : self.__API_URL + "/sessions/open/" + "?",
"sessions/ping" : self.__API_URL + "/sessions/ping/" + "?",
"sessions/check" : self.__API_URL + "/sessions/check/" + "?",
"sessions/close" : self.__API_URL + "/sessions/close/" + "?",
"scores/fetch" : self.__API_URL + "/scores/" + "?",
"scores/tables" : self.__API_URL + "/scores/tables/" + "?",
"scores/add" : self.__API_URL + "/scores/add/" + "?",
"scores/get-rank" : self.__API_URL + "/scores/get-rank/" + "?",
"trophies/fetch" : self.__API_URL + "/trophies/" + "?",
"trophies/add-achieved" : self.__API_URL + "/trophies/add-achieved/" + "?",
"trophies/remove-achieved" : self.__API_URL + "/trophies/remove-achieved/" + "?",
"data-store/set" : self.__API_URL + "/data-store/set/" + "?",
"data-store/update" : self.__API_URL + "/data-store/update/" + "?",
"data-store/remove" : self.__API_URL + "/data-store/remove/" + "?",
"data-store/fetch" : self.__API_URL + "/data-store/" + "?",
"data-store/get-keys" : self.__API_URL + "/data-store/get-keys/" + "?",
"friends" : self.__API_URL + "/friends/" + "?",
"time" : self.__API_URL + "/time/" + "?",
"batch" : self.__API_URL + "/batch/" + "?",
}
def _submit(self, operationUrl, data):
orderedData = _OrderedDict()
isBatch = "batch" in operationUrl
if not self.submitRequests and "format" in data.keys():
data.pop("format")
for key in sorted(data.keys()):
orderedData[key] = data[key]
data = orderedData
requestUrls = data.pop("requests") if isBatch else []
requestAsParams = "&".join(["requests[]=" + url for url in requestUrls]) if isBatch else ""
urlParams = _urlencode(data)
urlParams += "&" + requestAsParams if isBatch else ""
urlToSignature = operationUrl + urlParams + self.privateKey
signature = _md5(urlToSignature.encode()).hexdigest()
finalUrl = operationUrl + urlParams + "&signature=" + signature
if self.submitRequests:
if _DEBUG: print("Requesting URL:", finalUrl)
response = _urlopen(finalUrl).read().decode()
if self.responseFormat == "json":
return _literal_eval(response)["response"]
else:
return response
else:
if _DEBUG: print("Generated URL:", finalUrl)
return finalUrl
def _validateRequiredData(self, data):
for key in data.keys():
if data[key] is None:
raise GameJoltDataRequired(key)
return True
def _getValidData(self, data):
validatedData = {}
if self.responseFormat != "json":
validatedData["format"] = self.responseFormat
for key in data.keys():
if data[key] is not None:
validatedData[key] = data[key]
return validatedData
def _processBoolean(self, value):
if value is not None:
return str(value).lower()
# Users
def usersFetch(self, username=None, userId=None):
"""Returns a user's data.
:param username: The username of the user whose data you'd like to fetch.
:type username: str
:param userId: The ID of the user whose data you'd like to fetch.
:type userId: str, int or list
.. note::
- Only one parameter, ``username`` or ``userId``, is required.
- You can pass in multiple user ids by providing a list or separating them with commas in a string (example: ``"13,89,35"``)."""
if type(userId) in (list, tuple, set):
userId = ",".join(userId)
# Required data
data = {
"game_id" : self.gameId
}
if username is not None:
data["username"] = username
elif userId is not None:
data["user_id"] = userId
else:
data["username"] = self.username
self._validateRequiredData(data)
return self._submit(self.operations["users/fetch"], data)
def usersAuth(self):
"""Authenticates the user's information. This should be done before you make
any calls for the user, to make sure the user's credentials (username and
token) are valid."""
# Required data
data = {
"game_id" : self.gameId,
"username" : self.username,
"user_token" : self.userToken
}
self._validateRequiredData(data)
return self._submit(self.operations["users/auth"], data)
# Sessions
def sessionsOpen(self):
"""Opens a game session for a particular user and allows you to tell Game Jolt
that a user is playing your game. You must ping the session to keep it active
and you must close it when you're done with it.
.. note::
You can only have one open session for a user at a time. If you try to open a new session while one is running, the system will close out the current one before opening the new one.
"""
# Required data
data = {
"game_id" : self.gameId,
"username" : self.username,
"user_token" : self.userToken
}
self._validateRequiredData(data)
return self._submit(self.operations["sessions/open"], data)
def sessionsPing(self, status=None):
"""Pings an open session to tell the system that it's still active. If the session
hasn't been pinged within 120 seconds, the system will close the session and you
will have to open another one. It's recommended that you ping about every 30
seconds or so to keep the system from clearing out your session.
You can also let the system know whether the player is in an active or idle state
within your game.
:param status: Sets the status of the session.
:type status: str
.. note::
Valid Values for ``status``:
- ``"active"``: Sets the session to the ``"active"`` state.
- ``"idle"``: Sets the session to the ``"idle"`` state.
"""
# Required data
data = {
"game_id" : self.gameId,
"username" : self.username,
"user_token" : self.userToken
}
# Optional data
optionalData = {
"status" : status # active or idle
}
self._validateRequiredData(data)
data.update(self._getValidData(optionalData))
return self._submit(self.operations["sessions/ping"], data)
def sessionsCheck(self):
"""Checks to see if there is an open session for the user. Can be used to see
if a particular user account is active in the game.
.. note::
This endpoint returns ``"false"`` for the ``"success"`` field when no open session exists. That behaviour is different from other endpoints which use this field to indicate an error state.
"""
# Required data
data = {
"game_id" : self.gameId,
"username" : self.username,
"user_token" : self.userToken
}
self._validateRequiredData(data)
return self._submit(self.operations["sessions/check"], data)
def sessionsClose(self):
"""Closes the active session."""
# Required data
data = {
"game_id" : self.gameId,
"username" : self.username,
"user_token" : self.userToken
}
self._validateRequiredData(data)
return self._submit(self.operations["sessions/close"], data)
# Scores
def scoresFetch(self, limit=None, tableId=None, guest=None, betterThan=None, worseThan=None, thisUser=False):
"""Returns a list of scores either for a user or globally for a game.
:param limit: The number of scores you'd like to return.
:type limit: int
:param tableId: The ID of the score table.
:type tableId: int
:param guest: A guest's name.
:type guest: str
:param betterThan: Fetch only scores better than this score sort value.
:type betterThan: int
:param worseThan: Fetch only scores worse than this score sort value.
:type worseThan: int
:param thisUser: If ``True``, fetch only scores of current user. Else, fetch scores of all users.
:type thisUser: bool
.. note::
- The default value for ``limit`` is ``10`` scores. The maximum amount of scores you can retrieve is ``100``.
- If ``tableId`` is left blank, the scores from the primary score table will be returned.
- Only pass in ``thisUser=True`` if you would like to retrieve scores for just the user set in the class constructor. Leave ``thisUser=False`` and ``guest=None`` to retrieve all scores.
- ``guest`` allows you to fetch scores by a specific guest name. Only pass either the ``thisUser=True`` or the ``guest`` (or none), never both.
- Scores are returned in the order of the score table's sorting direction. e.g. for descending tables the bigger scores are returned first.
"""
# Required data
data = {
"game_id" : self.gameId
}
# Optional data
optionalData = {
"username" : self.username if guest is None and thisUser else None,
"user_token" : self.userToken if guest is None and thisUser else None,
"limit" : limit,
"table_id" : tableId,
"guest" : guest if guest is not None and not thisUser else None,
"better_than" : betterThan,
"worse_than" : worseThan,
}
self._validateRequiredData(data)
data.update(self._getValidData(optionalData))
return self._submit(self.operations["scores/fetch"], data)
def scoresTables(self):
"""Returns a list of high score tables for a game."""
# Required data
data = {
"game_id" : self.gameId
}
self._validateRequiredData(data)
return self._submit(self.operations["scores/tables"], data)
def scoresAdd(self, score, sort, tableId=None, guest=None, extraData=None):
"""Adds a score for a user or guest.
:param score: This is a string value associated with the score. Example: ``"500 Points"``
:type score: str
:param sort: This is a numerical sorting value associated with the score. All sorting will be based on this number. Example: ``500``
:type sort: int
:param tableId: The ID of the score table to submit to.
:type table_id: int
:param guest: The guest's name. Overrides the ``username`` set in the constructor.
:type guest: str
:param extraData: If there's any extra data you would like to store as a string, you can use this variable.
:type extra_data: str
.. note::
- You can either store a score for a user or a guest. If you're storing for a user, you must pass in the ``username`` and ``userToken`` parameters in the class constructor and leave ``guest`` as ``None``. If you're storing for a guest, you must pass in the ``guest`` parameter.
- The ``extraData`` value is only retrievable through the API and your game's dashboard. It's never displayed publicly to users on the site. If there is other data associated with the score such as time played, coins collected, etc., you should definitely include it. It will be helpful in cases where you believe a gamer has illegitimately achieved a high score.
- If ``tableId`` is left blank, the score will be submitted to the primary high score table.
"""
# Required data
data = {
"game_id" : self.gameId,
"score" : score,
"sort" : sort
}
# Optional data
optionalData = {
"username" : self.username if guest is None else None,
"user_token" : self.userToken if guest is None else None,
"table_id" : tableId,
"guest" : guest if guest is not None else None,
"extra_data" : extraData,
}
self._validateRequiredData(data)
data.update(self._getValidData(optionalData))
return self._submit(self.operations["scores/add"], data)
def scoresGetRank(self, sort, tableId=None):
"""Returns the rank of a particular score on a score table.
:param sort: This is a numerical sorting value that is represented by a rank on the score table.
:type sort: int
:param tableId: The ID of the score table from which you want to get the rank.
:type tableId: int
.. note::
- If ``tableId`` is left blank, the ranks from the primary high score table will be returned.
- If the score is not represented by any rank on the score table, the request will return the rank that is closest to the requested score.
"""
# Required data
data = {
"game_id" : self.gameId,
"sort" : sort
}
# Optional data
optionalData = {
"table_id" : tableId,
}
self._validateRequiredData(data)
data.update(self._getValidData(optionalData))
return self._submit(self.operations["scores/get-rank"], data)
# Trophies
def trophiesFetch(self, achieved=None, trophyId=None):
"""Returns one trophy or multiple trophies, depending on the parameters passed in.
:param achieved: Pass in ``True`` to return only the achieved trophies for a user. Pass in ``False`` to return only trophies the user hasn't achieved. Leave blank to retrieve all trophies.
:type achieved: bool
:param trophyId: If you would like to return just one trophy, you may pass the trophy ID with this parameter. If you do, only that trophy will be returned in the response. You may also pass multiple trophy IDs here if you want to return a subset of all the trophies. You do this as a list or a string with comma-separated values in the same way you would for retrieving multiple users (example: ``"13,89,35"``). Passing a ``trophyId`` will ignore the ``achieved`` parameter if it is passed.
:type trophyId: str, int or list
"""
if type(trophyId) in (list, tuple, set):
trophyId = ",".join(trophyId)
# Required data
data = {
"game_id" : self.gameId,
"username" : self.username,
"user_token" : self.userToken
}
# Optional data
optionalData = {
"achieved" : self._processBoolean(achieved) if trophyId is None else None,
"trophy_id" : trophyId
}
self._validateRequiredData(data)
data.update(self._getValidData(optionalData))
return self._submit(self.operations["trophies/fetch"], data)
def trophiesAddAchieved(self, trophyId):
"""Sets a trophy as achieved for a particular user.
:param trophyId: The ID of the trophy to add for the user.
:type trophyId: int
"""
# Required data
data = {
"game_id" : self.gameId,
"username" : self.username,
"user_token" : self.userToken,
"trophy_id" : trophyId
}
self._validateRequiredData(data)
return self._submit(self.operations["trophies/add-achieved"], data)
def trophiesRemoveAchieved(self, trophyId):
"""Remove a previously achieved trophy for a particular user.
:param trophyId: The ID of the trophy to remove from the user.
:type trophyId: int
"""
# Required data
data = {
"game_id" : self.gameId,
"username" : self.username,
"user_token" : self.userToken,
"trophy_id" : trophyId
}
self._validateRequiredData(data)
return self._submit(self.operations["trophies/remove-achieved"], data)
# Data Storage
def dataStoreSet(self, key, data, globalData=False):
"""Sets data in the data store.
:param key: The key of the data item you'd like to set.
:type key: str
:param data: The data you'd like to set.
:type data: str
:param globalData: If set to `True`, ignores ``username`` and ``userToken`` set in constructor and processes global data instead of user data.
:type globalData: bool
.. note::
You can create new data store items by passing in a key that doesn't yet exist in the data store.
.. code-block:: python
# Store on the key "some_global_value" the data "500" in the global data store
result = api.dataStoreSet("some_global_value", "500", globalData=True)
"""
# Required data
data = {
"game_id" : self.gameId,
"key" : key,
"data" : data
}
# Optional data
optionalData = {
"username" : self.username,
"user_token" : self.userToken
}
# Process global data instead of user data
if globalData:
optionalData["username"] = None
optionalData["user_token"] = None
self._validateRequiredData(data)
data.update(self._getValidData(optionalData))
return self._submit(self.operations["data-store/set"], data)
def dataStoreUpdate(self, key, operation, value, globalData=False):
"""Updates data in the data store.
:param key: The key of the data item you'd like to update.
:type key: str
:param operation: The operation you'd like to perform.
:type operation: str
:param value: The value you'd like to apply to the data store item. (See values below.)
:type value: str
:param globalData: If set to `True`, ignores ``username`` and ``userToken`` set in constructor and processes global data instead of user data.
:type globalData: bool
.. note::
Valid Values for ``operation``:
- ``"add"``: Adds the ``value`` to the current data store item.
- ``"subtract"``: Substracts the ``value`` from the current data store item.
- ``"multiply"``: Multiplies the ``value`` by the current data store item.
- ``"divide"``: Divides the current data store item by the ``value``.
- ``"append"``: Appends the ``value`` to the current data store item.
- ``"prepend"``: Prepends the ``value`` to the current data store item.
.. note::
You can only perform mathematic operations on numerical data.
.. code-block:: python
# Adds "100" to "some_global_value" in the global data store
result = api.dataStoreUpdate("some_global_value", "add", "100", globalData=True)
"""
# Required data
data = {
"game_id" : self.gameId,
"key" : key,
"operation" : operation,
"value" : value
}
# Optional data
optionalData = {
"username" : self.username,
"user_token" : self.userToken
}
# Process global data instead of user data
if globalData:
optionalData["username"] = None
optionalData["user_token"] = None
self._validateRequiredData(data)
data.update(self._getValidData(optionalData))
return self._submit(self.operations["data-store/update"], data)
def dataStoreRemove(self, key, globalData=False):
"""Removes data from the data store.
:param key: The key of the data item you'd like to remove.
:type key: str
:param globalData: If set to `True`, ignores ``username`` and ``userToken`` set in constructor and processes global data instead of user data.
:type globalData: bool
.. code-block:: python
# Remove "some_global_value" from global data store
result = api.dataStoreRemove("some_global_value", globalData=True)
"""
# Required data
data = {
"game_id" : self.gameId,
"key" : key
}
# Optional data
optionalData = {
"username" : self.username,
"user_token" : self.userToken
}
# Process global data instead of user data
if globalData:
optionalData["username"] = None
optionalData["user_token"] = None
self._validateRequiredData(data)
data.update(self._getValidData(optionalData))
return self._submit(self.operations["data-store/remove"], data)
def dataStoreFetch(self, key, globalData=False):
"""Returns data from the data store.
:param key: The key of the data item you'd like to fetch.
:type key: str
:param globalData: If set to `True`, ignores ``username`` and ``userToken`` set in constructor and processes global data instead of user data.
:type globalData: bool
.. code-block:: python
# Get "some_global_value" from global data store
result = api.dataStoreFetch("some_global_value", globalData=True)
"""
# Required data
data = {
"game_id" : self.gameId,
"key" : key
}
# Optional data
optionalData = {
"username" : self.username,
"user_token" : self.userToken
}
# Process global data instead of user data
if globalData:
optionalData["username"] = None
optionalData["user_token"] = None
self._validateRequiredData(data)
data.update(self._getValidData(optionalData))
return self._submit(self.operations["data-store/fetch"], data)
def dataStoreGetKeys(self, pattern=None, globalData=False):
"""Returns either all the keys in the game's global data store, or all the keys in a user's data store.
:param pattern: The pattern to apply to the key names in the data store.
:type pattern: str
:param globalData: If set to `True`, ignores ``username`` and ``userToken`` set in constructor and processes global data instead of user data.
:type globalData: bool
.. note::
- If you apply a pattern to the request, only keys with applicable key names will be returned. The placeholder character for patterns is ``*``.
- This request will return a list of the ``key`` values. The ``key`` return value can appear more than once.
.. code-block:: python
# Get keys from global data store starting with "some_global"
result = api.dataStoreGetKeys("some_global_*", globalData=True)
"""
# Required data
data = {
"game_id" : self.gameId
}
# Optional data
optionalData = {
"username" : self.username,
"user_token" : self.userToken,
"pattern" : pattern
}
# Process global data instead of user data
if globalData:
optionalData["username"] = None
optionalData["user_token"] = None
self._validateRequiredData(data)
data.update(self._getValidData(optionalData))
return self._submit(self.operations["data-store/get-keys"], data)
# Friends
def friends(self):
"""Returns the list of a user's friends."""
# Required data
data = {
"game_id" : self.gameId,
"username" : self.username,
"user_token" : self.userToken
}
self._validateRequiredData(data)
return self._submit(self.operations["friends"], data)
# Time
def time(self):
"""Returns the time of the Game Jolt server."""
# Required data
data = {
"game_id" : self.gameId
}
self._validateRequiredData(data)
return self._submit(self.operations["time"], data)
# Batch Calls
def batch(self, requests=[], parallel=None, breakOnError=None):
"""A batch request is a collection of sub-requests that enables developers to send multiple API calls with one HTTP request.
:param requests: An list of sub-request URLs. Each request will be executed and the responses of each one will be returned in the payload.
:type requests: list of str
:param parallel: By default, each sub-request is processed on the servers sequentially. If this is set to ``True``, then all sub-requests are processed at the same time, without waiting for the previous sub-request to finish before the next one is started.
:type parallel: bool
:param breakOnError: If this is set to ``True``, one sub-request failure will cause the entire batch to stop processing subsequent sub-requests and return a value of ``"false"`` for success.
:type breakOnError: bool
.. note::
- The maximum amount of sub requests in one batch request is 50.
- Dump format is not supported in batch calls.
- The ``parallel`` and ``breakOnError`` parameters cannot be used in the same request.
.. code-block:: python
# Disable request submitting to get URLs from methods
api.submitRequests = False
# Generate list of request URLs
requests = [
api.usersFetch(),
api.sessionsCheck(),
api.scoresTables(),
api.trophiesFetch(),
api.dataStoreGetKeys("*", globalData=True),
api.friends(),
api.time()
]
# Enable request submitting again
api.submitRequests = True
# Submit batch request and get all results
result = api.batch(requests=requests)
"""
if parallel is not None and breakOnError is not None:
raise GameJoltDataCollision(["parallel", "break_on_error"])
for i in range(len(requests)):
requests[i] = requests[i].replace(self.__API_URL, "")
requests[i] = requests[i].split("&signature=")[0]
requests[i] += "&signature=" + _md5((requests[i] + self.privateKey).encode()).hexdigest()
requests[i] = _quote(requests[i].replace(self.__API_URL, ""), safe="")
# Required data
data = {
"game_id" : self.gameId,
"requests" : requests if len(requests) > 0 else None
}
# Optional data
optionalData = {
"parallel" : self._processBoolean(parallel),
"break_on_error" : self._processBoolean(breakOnError)
}
self._validateRequiredData(data)
data.update(self._getValidData(optionalData))
return self._submit(self.operations["batch"], data)
| 38.327402 | 498 | 0.572857 | 32,043 | 0.991736 | 0 | 0 | 0 | 0 | 0 | 0 | 18,870 | 0.58403 |
f9c3b4eb59658fb2124d809e4025b7e6912a6d8f
| 3,243 |
py
|
Python
|
poplar/util.py
|
mortonjt/poplar
|
854d1ef819392f54536df386ef034091831802ed
|
[
"BSD-3-Clause"
] | null | null | null |
poplar/util.py
|
mortonjt/poplar
|
854d1ef819392f54536df386ef034091831802ed
|
[
"BSD-3-Clause"
] | null | null | null |
poplar/util.py
|
mortonjt/poplar
|
854d1ef819392f54536df386ef034091831802ed
|
[
"BSD-3-Clause"
] | null | null | null |
import os
import inspect
import torch
import numpy as np
import numbers
def get_data_path(fn, subfolder='data'):
"""Return path to filename ``fn`` in the data folder.
During testing it is often necessary to load data files. This
function returns the full path to files in the ``data`` subfolder
by default.
Parameters
----------
fn : str
File name.
subfolder : str, defaults to ``data``
Name of the subfolder that contains the data.
Returns
-------
str
Inferred absolute path to the test data for the module where
``get_data_path(fn)`` is called.
Notes
-----
The requested path may not point to an existing file, as its
existence is not checked.
This is the same method as borrowed from scikit-bio
"""
# getouterframes returns a list of tuples: the second tuple
# contains info about the caller, and the second element is its
# filename
callers_filename = inspect.getouterframes(inspect.currentframe())[1][1]
path = os.path.dirname(os.path.abspath(callers_filename))
data_path = os.path.join(path, subfolder, fn)
return data_path
def check_random_state(seed):
""" Turn seed into a np.random.RandomState instance.
Parameters
----------
seed : None | int | instance of RandomState
If seed is None, return the RandomState singleton used by np.random.
If seed is an int, return a new RandomState instance seeded with seed.
If seed is already a RandomState instance, return it.
Otherwise raise ValueError.
Note
----
This is from sklearn
"""
if seed is None or seed is np.random:
return np.random.mtrand._rand
if isinstance(seed, numbers.Integral):
return np.random.RandomState(seed)
if isinstance(seed, np.random.RandomState):
return seed
raise ValueError('%r cannot be used to seed a numpy.random.RandomState'
' instance' % seed)
dictionary = {
"A": 1,
"B": 2,
"C": 3,
"D": 4,
"E": 5,
"F": 6,
"G": 7,
"H": 8,
"I": 9,
"J": 10,
"K": 11,
"L": 12,
"M": 13,
"N": 14,
"O": 15,
"P": 16,
"Q": 17,
"R": 18,
"S": 19,
"T": 20,
"U": 21,
"V": 22,
"W": 23,
"X": 24,
"Y": 25,
"Z": 26,
".": 27
}
def encode(x):
""" Convert string to tokens. """
tokens = list(map(lambda i: dictionary[i], list(x)))
tokens = torch.Tensor(tokens)
tokens = tokens.long()
return tokens
def tokenize(gene, pos, neg, model, device, pad=1024):
if len(gene) == len(pos) and len(gene) == len(neg):
# extract features, and take <CLS> token
g = list(map(lambda x: model.extract_features(encode(x))[:, 0, :], gene))
p = list(map(lambda x: model.extract_features(encode(x))[:, 0, :], pos))
n = list(map(lambda x: model.extract_features(encode(x))[:, 0, :], neg))
g_ = torch.cat(g, 0)
p_ = torch.cat(p, 0)
n_ = torch.cat(n, 0)
else:
g_ = model.extract_features(encode(gene))[:, 0, :]
p_ = model.extract_features(encode(pos))[:, 0, :]
n_ = model.extract_features(encode(neg))[:, 0, :]
return g_, p_, n_
| 26.153226 | 81 | 0.588344 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,483 | 0.457293 |
f9c72fdee75782efa656735bc0a7c52d729983de
| 5,109 |
py
|
Python
|
search-github-ip.py
|
brilliant116/learning-code
|
5378b1f0c53b4ceac56942044bbb666278a138f2
|
[
"MIT"
] | null | null | null |
search-github-ip.py
|
brilliant116/learning-code
|
5378b1f0c53b4ceac56942044bbb666278a138f2
|
[
"MIT"
] | null | null | null |
search-github-ip.py
|
brilliant116/learning-code
|
5378b1f0c53b4ceac56942044bbb666278a138f2
|
[
"MIT"
] | null | null | null |
import asyncio
import time
import socket
import argparse
import aiohttp
class MyConnector(aiohttp.TCPConnector):
def __init__(self, ip):
self.__ip = ip
super().__init__()
async def _resolve_host(
self, host: str, port: int,
traces: None = None,
):
return [{
'hostname': host, 'host': self.__ip, 'port': port,
'family': self._family, 'proto': 0, 'flags': 0,
}]
async def test_domain(domain, ip, proto):
if proto == 'http':
return await test_domain_http(domain, ip)
elif proto == 'ssh':
return await test_domain_ssh(domain, ip)
else:
raise ValueError('unknown proto', proto)
async def test_domain_ssh(domain, ip):
st = time.time()
r, _w = await asyncio.open_connection(ip, 22)
await r.read(1)
return time.time() - st
async def test_domain_http(domain, ip):
url = 'https://github.com/'
st = time.time()
async with aiohttp.ClientSession(
connector = MyConnector(ip),
timeout = aiohttp.ClientTimeout(total=10),
) as s:
r = await s.get(url)
_ = await r.text()
return time.time() - st
async def producer(q, proto):
items = await get_items(proto)
for item in items:
await q.put(item)
await q.put(None)
async def printer(q):
while True:
try:
item = await q.get()
except asyncio.CancelledError:
break
if isinstance(item[1], Exception):
(domain, ip, proto), e = item
print(f'{domain:21} {ip:15} {proto:4} {e!r}')
else:
(domain, ip, proto), t = item
print(f'{domain:21} {ip:15} {proto:4} {t:6.2f}')
async def fastest_finder(q):
fastest_ip, latency = None, 1000
while True:
try:
item = await q.get()
except asyncio.CancelledError:
return fastest_ip
if not isinstance(item[1], Exception):
(_, ip, _), t = item
if t < latency:
latency = t
fastest_ip = ip
async def worker(q, ret_q):
while True:
item = await q.get()
if item is None:
await q.put(None)
break
try:
t = await test_domain(*item)
except Exception as e:
await ret_q.put((item, e))
else:
await ret_q.put((item, t))
async def main(proto):
q = asyncio.Queue()
ret_q = asyncio.Queue()
futures = [worker(q, ret_q) for _ in range(40)]
producer_fu = asyncio.ensure_future(producer(q, proto))
printer_fu = asyncio.ensure_future(printer(ret_q))
await asyncio.wait(futures)
printer_fu.cancel()
await producer_fu
await printer_fu
async def update_hosts():
import os, sys, subprocess
if os.geteuid() != 0:
sys.exit('not root?')
q = asyncio.Queue()
ret_q = asyncio.Queue()
futures = [worker(q, ret_q) for _ in range(40)]
producer_fu = asyncio.ensure_future(
producer(q, ['http']))
finder_fu = asyncio.ensure_future(
fastest_finder(ret_q))
await asyncio.wait(futures)
finder_fu.cancel()
await producer_fu
ip = await finder_fu
if ip is not None:
cmd = ['sed', '-Ei', rf'/^[0-9.]+[[:space:]]+(gist\.)?github\.com\>/s/[^[:space:]]+/{ip}/', '/etc/hosts']
subprocess.check_call(cmd)
async def resolve(domain):
loop = asyncio.get_event_loop()
addrinfo = await loop.getaddrinfo(
domain, None,
family=socket.AF_INET,
proto=socket.IPPROTO_TCP,
)
ips = [x[-1][0] for x in addrinfo]
return domain, ips
async def get_items(proto):
items = [
('13.234.210.38', 'Bombay'),
('13.234.176.102', 'Bombay'),
('52.192.72.89', 'Tokyo'),
('13.114.40.48', 'Tokyo'),
('52.69.186.44', 'Tokyo'),
('15.164.81.167', 'Seoul'),
('52.78.231.108', 'Seoul'),
('13.229.188.59', 'Singapore'),
('13.250.177.223', 'Singapore'),
('52.74.223.119', 'Singapore'),
('192.30.255.112', 'Seattle'),
('192.30.255.113', 'Seattle'),
('140.82.112.3', 'Seattle'),
('140.82.112.4', 'Seattle'),
('192.30.253.112', 'Ashburn'),
('192.30.253.113', 'Ashburn'),
('140.82.113.3', 'Ashburn'),
('140.82.113.4', 'Ashburn'),
('140.82.114.3', 'Ashburn'),
('140.82.114.4', 'Ashburn'),
('140.82.118.3', 'Armsterdam'),
('140.82.118.4', 'Armsterdam'),
('140.82.121.3', 'Frankfurt'),
('140.82.121.4', 'Frankfurt'),
('13.237.44.5', 'Sydney'),
('52.64.108.95', 'Sydney'),
('13.236.229.21', 'Sydney'),
('18.231.5.6', 'Sao Paulo'),
('18.228.52.138', 'Sao Paulo'),
('18.228.67.229', 'Sao Paulo'),
]
return [(x[1], x[0], y) for x in items for y in proto]
if __name__ == '__main__':
import logging
logging.getLogger().addHandler(logging.NullHandler())
parser = argparse.ArgumentParser(
description='GitHub IP 访问速度测试')
parser.add_argument('proto', nargs='*',
default=['http', 'ssh'],
help='测试指定协议')
parser.add_argument('--hosts',
action='store_true',
help='更新 /etc/hosts')
args = parser.parse_args()
if args.hosts:
main_fu = update_hosts()
else:
main_fu = main(args.proto)
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(main_fu)
except KeyboardInterrupt:
pass
| 24.921951 | 109 | 0.595811 | 330 | 0.06424 | 0 | 0 | 0 | 0 | 4,249 | 0.827136 | 1,113 | 0.216663 |
f9c7d6fee331123a55864020b776b1d34238b97d
| 467 |
py
|
Python
|
src/todo/522-LongestUncommonSubsequenceII.py
|
Jiezhi/myleetcode
|
b346e94c46da2a3033ebc8ff50e621aa179c4f62
|
[
"MIT"
] | 1 |
2022-03-03T15:11:48.000Z
|
2022-03-03T15:11:48.000Z
|
src/todo/522-LongestUncommonSubsequenceII.py
|
Jiezhi/myleetcode
|
b346e94c46da2a3033ebc8ff50e621aa179c4f62
|
[
"MIT"
] | null | null | null |
src/todo/522-LongestUncommonSubsequenceII.py
|
Jiezhi/myleetcode
|
b346e94c46da2a3033ebc8ff50e621aa179c4f62
|
[
"MIT"
] | 2 |
2022-01-20T22:49:58.000Z
|
2022-01-20T22:53:13.000Z
|
#!/usr/bin/env python
"""
CREATED AT: 2021/8/28
Des:
https://leetcode.com/problems/longest-uncommon-subsequence-ii/
GITHUB: https://github.com/Jiezhi/myleetcode
"""
from typing import List
class Solution:
def findLUSlength(self, strs: List[str]) -> int:
pass
def test():
assert Solution().findLUSlength(strs=["aba", "cdc", "eae"]) == 3
assert Solution().findLUSlength(strs=["aaa", "aaa", "aa"]) == -1
if __name__ == '__main__':
test()
| 18.68 | 68 | 0.648822 | 81 | 0.173448 | 0 | 0 | 0 | 0 | 0 | 0 | 204 | 0.436831 |
f9c8ce2fe1e056d6cf0dbb9895f3232eec31c3ce
| 23,732 |
py
|
Python
|
AdventOfCode/2020/day20.py
|
benhunter/coding-challenges
|
0cca059da7c8ae6cdc62dbeb3db8250ab42ac3b6
|
[
"MIT"
] | null | null | null |
AdventOfCode/2020/day20.py
|
benhunter/coding-challenges
|
0cca059da7c8ae6cdc62dbeb3db8250ab42ac3b6
|
[
"MIT"
] | null | null | null |
AdventOfCode/2020/day20.py
|
benhunter/coding-challenges
|
0cca059da7c8ae6cdc62dbeb3db8250ab42ac3b6
|
[
"MIT"
] | null | null | null |
# Advent of Code 2020 Day
# https://adventofcode.com/2020/
import cProfile
import itertools
import math
import numpy as np
from collections import namedtuple
from pprint import pformat, pprint
from typing import List, Optional
from numpy.typing import ArrayLike
USE_EXAMPLE1 = False # example input or full input
DEBUG = False # debug prints to console
PROFILE = False # profiling flag
Tile = namedtuple("Tile", "number, data")
Tile.__repr__ = ( # type: ignore
lambda self: f"Tile: {self.number}\n" + f"{pformat(self.data)}" # type: ignore
) # type: ignore
# class Tile(namedtuple("Tile", "number, data")):
# # Tile with a number ID and data fields.
# # Inheriting from namedtuple to override the repr dunder.
# # Equivalent code:
# # Tile = namedtuple("Tile", "number, data")
# # Tile.__repr__ = lambda self: f"Tile: {self.number}\n{pformat(self.data)}"
# def __new__(cls, number, data: ArrayLike):
# self = super(Tile, cls).__new__(cls, number, data)
# return self
# def __repr__(self) -> str:
# return f"Tile: {self.number}\n{pformat(self.data)}"
def test_Tile():
assert Tile("1", 0)
assert Tile("1", 0).number == "1"
assert Tile("1", 0).data == 0
def count_edge_matches(first_tile: Tile, second_tile: Tile):
assert type(first_tile) is Tile
assert type(second_tile) is Tile
first_tile_faces: List[np.ndarray] = list(generate_faces(first_tile))
second_tile_faces: List[np.ndarray] = list(generate_faces(second_tile))
matches: List[np.ndarray] = [
face_one
for face_one, face_two in itertools.product(first_tile_faces, second_tile_faces)
if np.array_equal(face_one, face_two)
]
return len(matches) // 2
def test_count_edge_match():
tile_one = Tile(0, np.array([[0, 1], [2, 3]], dtype=object))
tile_two = Tile(1, np.array([[0, 1], [4, 5]], dtype=object))
assert count_edge_matches(tile_one, tile_two) == 1
def generate_faces(tile: Tile):
# Generator for Tile faces
# use:
# for face in generate_faces(tile):
# print(face)
assert type(tile) is Tile
tile_faces = [
tile.data[0],
tile.data[-1],
tile.data[:, 0],
tile.data[:, -1],
]
nparray_tile_flipped = np.flip(tile.data) # flip on both axes
tile_faces += [
nparray_tile_flipped[0],
nparray_tile_flipped[-1],
nparray_tile_flipped[:, 0],
nparray_tile_flipped[:, -1],
]
yield from tile_faces
def is_face_matches_tile(face: np.ndarray, tile: Tile):
# determine whether face matches anywhere on tile,
# including after rotating and flipping tile
assert type(face) is np.ndarray
assert type(tile) is Tile
matches = [
face_one
for face_one, face_two in itertools.product([face], generate_faces(tile))
if np.array_equal(face_one, face_two)
]
return bool(len(matches))
def is_face_matches_face(face_one: np.ndarray, face_two: np.ndarray):
# Compare faces without flipping
assert type(face_one) is np.ndarray
assert type(face_two) is np.ndarray
result = np.array_equal(face_one, face_two)
return result
def count_all_edge_matches(tile: Tile, tiles: List[Tile]):
count = 0
for candidate_tile in tiles:
if tile.number == candidate_tile.number:
# don't check a tile against itself
continue
# count the matching edges
count += count_edge_matches(tile, candidate_tile)
if DEBUG and count > 0:
print(count)
return count
def find_corner_pieces(tiles: List[Tile]):
# count matching faces for every tile
# corner tiles have only 2 matching faces
# all other tiles have more
corner_pieces = []
for tile in tiles:
match_count = count_all_edge_matches(tile, tiles)
if match_count == 2:
corner_pieces.append(tile)
return corner_pieces
def next_match(known_tile: Tile, candidate_tiles: List[Tile]) -> Tile:
assert type(known_tile) is Tile
# from candidate_tiles, find a tile that has a matching edge with known_tile
return next(generate_next_match(known_tile, candidate_tiles))
def generate_next_match(known_tile: Tile, candidate_tiles: List[Tile]):
assert type(known_tile) is Tile
# from candidate_tiles, find a tile that has a matching edge with known_tile
for candidate_tile in candidate_tiles:
if count_edge_matches(known_tile, candidate_tile) > 0:
yield candidate_tile
raise RuntimeError("Did not find a next match.")
def product(values):
# see also: math.prod
ret = 1
ret = [ret := ret * v for v in values][-1]
return ret
def generate_nparray_orientation(npa: ArrayLike):
# generator to provide all orientations (rotations and flips) for 2-Dimensial np.array
# Usage:
# for orientation in generate_nparray_orientation(candidate_nparray):
# print(orientation)
# np array rotations
# https://numpy.org/doc/stable/reference/generated/numpy.rot90.html#numpy.rot90
# print(tiles[0].data)
# print(np.rot90(tiles[0].data)) # rotate counter clockwise
# print(np.rot90(tiles[0].data, axes=(1,0))) # rotate clockwise
# print(np.rot90(tiles[0].data, k=0)) # rotate counter clockwise 0 times
# note that rotations return views, not new arrays
# flip too, not just rotate
orientations = [
npa, # original
np.rot90(npa, k=1), # counter-clockwise once
np.rot90(npa, k=2), # counter-clockwise twice
np.rot90(npa, k=3), # counter-clockwise thrice
]
npa_flipped = np.flip(npa, axis=0) # flip on x axis
orientations += [
npa_flipped,
np.rot90(npa_flipped, k=1),
np.rot90(npa_flipped, k=2),
np.rot90(npa_flipped, k=3),
]
yield from orientations
def generate_tile_orientation(tile: Tile):
yield from generate_nparray_orientation(tile.data)
def is_tile_matches_neighbors(
y_index: int, x_index: int, solution: List[List[Optional[Tile]]]
):
"""Neighbors can be Tile or None"""
optional_tile: Optional[Tile] = solution[y_index][x_index]
if optional_tile is None:
return True
elif isinstance(optional_tile, Tile):
tile: Tile = optional_tile
else:
raise RuntimeError
assert isinstance(solution[y_index][x_index], Tile)
if DEBUG:
print(tile.data)
# Up
temp_tile: Optional[Tile]
if y_index > 0:
temp_tile = solution[y_index - 1][x_index]
if isinstance(temp_tile, Tile):
# if solution[y_index - 1][x_index]:
neighbor_up: Tile = temp_tile
neighbor_face_down: np.ndarray = neighbor_up.data[-1]
tile_face_up: np.ndarray = tile.data[0]
if not is_face_matches_face(tile_face_up, neighbor_face_down):
return False
# Down
if y_index < (len(solution) - 1):
temp_tile = solution[y_index + 1][x_index]
if isinstance(temp_tile, Tile):
neighbor_down: Tile = temp_tile
neighbor_face_up: np.ndarray = neighbor_down.data[0]
tile_face_down: np.ndarray = tile.data[-1]
if not is_face_matches_face(tile_face_down, neighbor_face_up):
return False
# Left
if x_index > 0:
temp_tile = solution[y_index][x_index - 1]
if isinstance(temp_tile, Tile):
neighbor_left: Tile = temp_tile
neighbor_face_right = neighbor_left.data[:, -1]
tile_face_left = tile.data[:, 0]
if not is_face_matches_face(tile_face_left, neighbor_face_right):
return False
# Right
if x_index < (len(solution[0]) - 1):
temp_tile = solution[y_index][x_index + 1]
if isinstance(temp_tile, Tile):
neighbor_right: Tile = temp_tile
neighbor_face_left = neighbor_right.data[:, 0]
tile_face_right = tile.data[:, -1]
if not is_face_matches_face(tile_face_right, neighbor_face_left):
return False
return True
def is_partial_solution_valid(solution: List[List[Optional[Tile]]]):
# Check a partial solution. None is allowed where a Tile has not been placed yet.
for y_index in range(len(solution)):
for x_index in range(len(solution[0])):
if solution[y_index][x_index] is None:
continue
if not is_tile_matches_neighbors(y_index, x_index, solution):
return False
return True
def repr_solution_tiles(solution: List[List[Tile]]) -> str:
s = ""
for y_index, solution_row in enumerate(solution):
for y_tile_index in range(len(solution[0][0].data)):
for x_index, tile in enumerate(solution_row):
if solution[y_index][x_index]:
s += "".join(solution[y_index][x_index].data[y_tile_index])
s += " "
else:
s += "-" * len(solution[0][0].data[0])
s += " "
s += "\n"
s += "\n"
return s
def list_str_solution(solution: List[List[Tile]]) -> List[str]:
lines = []
for y_index, solution_row in enumerate(solution):
for y_tile_index in range(1, len(solution[0][0].data) - 1):
line = ""
for x_index, tile in enumerate(solution_row):
if solution[y_index][x_index]:
line += "".join(solution[y_index][x_index].data[y_tile_index][1:-1])
else:
line += "-" * len(solution[0][0].data[0][1:-1])
lines.append(line)
return lines
def repr_solution(solution: List[List[Tile]]) -> str:
s = ""
for row in list_str_solution(solution):
s += row + "\n"
return s
def match_2d(pattern_2d: np.ndarray, string_2d: np.ndarray):
matches = []
for y_index in range(len(string_2d) - len(pattern_2d) + 1):
for x_index in range(len(string_2d[0]) - len(pattern_2d[0]) + 1):
next_candidate = False
candidate_str = string_2d[
y_index : y_index + len(pattern_2d),
x_index : x_index + len(pattern_2d[0]),
]
for y_candidate in range(len(pattern_2d)):
for x_candidate in range(len(pattern_2d[0])):
# only looking for "#" in pattern_2d
if pattern_2d[y_candidate][x_candidate] != "#":
continue
if (
pattern_2d[y_candidate][x_candidate]
!= candidate_str[y_candidate][x_candidate]
):
next_candidate = True
break
else:
continue
if next_candidate:
break
if not next_candidate:
matches.append((y_index, x_index))
return matches
def test_match_2d():
monster = [" # ", "# ## ## ###", " # # # # # # "]
monster_nparray = list_str_to_nparray(monster)
sea = ["# . # ", "# ## ## ###", " # # # # # # "]
sea_nparray = list_str_to_nparray(sea)
matches = match_2d(monster_nparray, monster_nparray)
assert matches == [(0, 0)]
matches = match_2d(monster_nparray, sea_nparray)
assert matches == [(0, 0)]
def list_str_to_nparray(list_str: List[str]) -> np.ndarray:
# seperate each character so the nparray can be rotated, flipped
return np.array([[c for c in s] for s in list_str])
def solve_part1(tiles: List[Tile]) -> int:
# find the corners by counting the matching edges of each tile.
# corners have only two matching edges
corners: List[Tile] = find_corner_pieces(tiles)
corner_ids = [corner.number for corner in corners]
return product(corner_ids)
def solve_part2(tiles: List[Tile]) -> int:
dimension = math.isqrt(len(tiles))
solution: List[List[Optional[Tile]]] = [
[None for _ in range(dimension)] for _ in range(dimension)
]
# solution: List[List[Tile]] = [
# [None for _ in range(dimension)] for _ in range(dimension)
# ]
# print(solution)
assert is_partial_solution_valid(solution)
# start the solution with one of the corners found previously
solution[0][0] = find_corner_pieces(tiles)[0] # can be flipped/rotated
# tiles will only hold tiles that are not in solution yet
tiles.remove(solution[0][0])
# print(solution)
assert is_partial_solution_valid(solution)
# place solution[0][1]
# find a matching tile
assert isinstance(solution[0][0], Tile)
candidate_tile = next_match(solution[0][0], tiles)
# print(f"candidate_tile: {candidate_tile}")
# orient the corner. Which face matches?
# Options
# 1. could make this a tuple that also carries the "index" for how to rotate
# 2. or carries the rotated tile with each face
# 3. or just send the rotations and check the desired face below
# tile_faces = [
# tile.data[0], # top
# tile.data[-1], # bottom
# tile.data[:, 0], # left
# tile.data[:, -1], # right
# ]
# tile_rotations = [
# tile.data,
# np.rot90(tile.data, k=1),
# np.rot90(tile.data, k=2),
# np.rot90(tile.data, k=3),
# ]
# for face in tile_faces:
# if is_edge_match(face, candidate_tile):
# print(f"Face {face} matched candidate {candidate_tile}")
# in tile_rotations we are looking for the right face to match
# for orientation in tile_rotations:
y_index: int = 0
x_index: int = 0
tile = solution[y_index][x_index]
assert isinstance(tile, Tile)
for orientation in generate_tile_orientation(tile):
if is_face_matches_tile(orientation[:, -1], candidate_tile):
solution[y_index][x_index] = Tile(tile.number, orientation)
# print("matched orientation")
break
assert is_partial_solution_valid(solution)
# orient the candidate match and place it
for orientation in generate_tile_orientation(candidate_tile):
# compare left face of solved tile to right face of candidate_tile in all possible orientations
tile = solution[y_index][x_index]
assert isinstance(tile, Tile)
if is_face_matches_face(tile.data[:, -1], orientation[:, 0]):
# print(f"Placing candidate tile {candidate_tile.number}")
solution[y_index][x_index + 1] = Tile(candidate_tile.number, orientation)
# remove the matching candidate from tiles
tiles.remove(candidate_tile)
break
assert is_partial_solution_valid(solution)
y_index = 1
x_index = 0
tile = solution[y_index - 1][x_index]
assert isinstance(tile, Tile)
candidate_tile = next_match(tile, tiles)
# does row 0 need to flip?
# does candidate match to top or bottom of solution[0][0]?
needs_flip: bool = False
# compare top face of solution[0][0] to candidate_tile
up_neighbor: Tile = solution[0][0]
if is_face_matches_tile(up_neighbor.data[0], candidate_tile):
needs_flip = True
if needs_flip:
for x_index, tile in enumerate(solution[0]):
if isinstance(tile, Tile):
flipped_data = np.flipud(tile.data) # flip up down
solution[0][x_index] = Tile(tile.line, flipped_data)
# orient candidate_tile to tile above
# for orientation in orientation_generator(candidate_tile):
# if is_face_matches_tile(orientation[0], solution[0][0]):
# print(orientation[0])
# if is_face_matches_face(orientation[0], solution[0][0].data[-1]):
# print(orientation[0])
for orientation in generate_tile_orientation(candidate_tile):
if is_face_matches_face(up_neighbor.data[-1], orientation[0]):
if DEBUG:
print(f"Placing candidate tile {candidate_tile.number}")
solution[y_index][x_index] = Tile(candidate_tile.number, orientation)
# remove candidate match from tiles
tiles.remove(candidate_tile)
break
assert is_partial_solution_valid(solution)
# after the first corner, and it's neighbors have been placed
# the solution cannot be flipped
# solve first row
y_index = 0
for x_index, tile in enumerate(solution[y_index]):
if tile:
continue
# print(f"{x_index} {tile}")
left_neighbor: Optional[Tile] = solution[y_index][x_index - 1]
assert isinstance(left_neighbor, Tile)
for candidate_tile in generate_next_match(left_neighbor, tiles):
# find the right orientation for candidate_tile to left_neighbor
for orientation in generate_tile_orientation(candidate_tile):
if is_face_matches_face(left_neighbor.data[:, -1], orientation[:, 0]):
# print(f"Placing candidate tile {candidate_tile.number}")
solution[y_index][x_index] = Tile(
candidate_tile.number, orientation
)
# remove candidate match from tiles
tiles.remove(candidate_tile)
break
if solution[y_index][x_index] is not None:
break
assert isinstance(solution[y_index][x_index], Tile)
assert is_partial_solution_valid(solution)
# print(f"Solution:\n{solution}")
# print(repr_solution(solution))
assert is_partial_solution_valid(solution)
# print()
# solve other rows. if the left neighbor is empty or we are on the left edge of solution,
# look up to place tile
for y_index, solution_row in enumerate(solution):
for x_index, tile in enumerate(solution[y_index]):
if tile:
continue
if x_index > 0:
# we are not on left edge of solution
assert isinstance(solution[y_index][x_index - 1], Tile)
left_neighbor = solution[y_index][x_index - 1]
assert isinstance(left_neighbor, Tile)
for candidate_tile in generate_next_match(left_neighbor, tiles):
# find the right orientation for candidate_tile to left_neighbor
# and to up_neighbor
for orientation in generate_tile_orientation(candidate_tile):
if is_face_matches_face(
left_neighbor.data[:, -1], orientation[:, 0]
):
# print(f"Placing candidate tile {candidate_tile.number}")
solution[y_index][x_index] = Tile(
candidate_tile.number, orientation
)
if not is_partial_solution_valid(solution):
# keep trying orientations
continue
# this is the right orientation with all neighbors
# remove candidate match from tiles
tiles.remove(candidate_tile)
break
if solution[y_index][x_index] is not None:
break
assert solution[y_index][x_index] is not None
assert is_partial_solution_valid(solution)
elif x_index == 0:
# on left edge of solution, look at up neighbor
temp_tile: Optional[Tile] = solution[y_index - 1][x_index]
assert isinstance(temp_tile, Tile)
up_neighbor = temp_tile
for candidate_tile in generate_next_match(up_neighbor, tiles):
for orientation in generate_tile_orientation(candidate_tile):
if is_face_matches_face(up_neighbor.data[-1], orientation[0]):
# print(f"Placing candidate tile {candidate_tile.number}")
solution[y_index][x_index] = Tile(
candidate_tile.number, orientation
)
if not is_partial_solution_valid(solution):
# keep trying orientations
continue
# remove candidate match from tiles
tiles.remove(candidate_tile)
break
if solution[y_index][x_index] is not None:
break
assert solution[y_index][x_index] is not None
assert is_partial_solution_valid(solution)
for row in solution:
for tile in row:
assert isinstance(tile, Tile)
solution_complete: List[List[Tile]] = solution.copy() # type: ignore # assert above verified correctness
if DEBUG:
print(repr_solution_tiles(solution_complete))
str_solution = repr_solution(solution_complete)
print(str_solution)
monster = [" # ", "# ## ## ###", " # # # # # # "]
nparray_monster = list_str_to_nparray(monster)
# need to rotate and flip str_solution to get matches
nparray_solution = list_str_to_nparray(list_str_solution(solution_complete))
if DEBUG:
print(nparray_solution)
# matches = match_2d(monster, list_str_solution(solution))
# print(matches)
for orientation in generate_nparray_orientation(nparray_solution):
matches = match_2d(nparray_monster, orientation)
if len(matches) > 0:
break
if DEBUG:
print(orientation)
print(matches)
# count "#" minus (count "#" in monster * len(matches))
pound_in_orientation = len(
[char for row in orientation for char in row if char == "#"]
)
pound_in_monster = len(
[char for row in nparray_monster for char in row if char == "#"]
)
part2 = pound_in_orientation - (len(matches) * pound_in_monster)
return part2
def load_tiles(filename: str) -> List[Tile]:
with open(filename) as f:
tiles_str: List[str] = f.read().split("\n\n")
tiles: List[Tile] = []
t_index: int
tile_str: str
for t_index, tile_str in enumerate(tiles_str):
tile_temp: List[str] = tile_str.split("\n")
number: int = int(tile_temp[0].split()[1][:-1])
data: np.ndarray = np.array([[char for char in row] for row in tile_temp[1:]])
tiles.append(Tile(number, data))
return tiles
def main():
if USE_EXAMPLE1:
filename = "./AdventOfCode/2020/day20-example1-input.txt"
else:
filename = "./AdventOfCode/2020/day20-input.txt"
tiles: List[Tile] = load_tiles(filename)
if DEBUG:
pprint(tiles)
print(f"Loaded {len(tiles)} tiles")
print(
f"Each tile is {len(tiles[0].data)} rows, {len(tiles[0].data[0])} columns"
)
# Part 1
part1 = solve_part1(tiles)
print(f"Part 1: {part1}") # 68781323018729
if USE_EXAMPLE1:
assert part1 == 20899048083289
else:
assert part1 == 68781323018729
if PROFILE:
with cProfile.Profile() as pr:
solve_part1(tiles)
pr.print_stats()
# Part 2
part2 = solve_part2(tiles.copy())
print(f"Part 2: {part2}")
if USE_EXAMPLE1:
assert part2 == 273
else:
assert part2 == 1629
if PROFILE:
with cProfile.Profile() as pr:
solve_part2(tiles.copy())
pr.print_stats()
if __name__ == "__main__":
main()
| 36.567026 | 110 | 0.605469 | 0 | 0 | 2,178 | 0.091775 | 0 | 0 | 0 | 0 | 6,026 | 0.253919 |
f9caa403c7cda77ed58ce080740499d1a738c3e3
| 1,870 |
py
|
Python
|
src/server_main_loop.py
|
the40san/unity_survival_shooter_multi_server_python
|
d20d9aa2204bca70d0787acbfe395277b776e92d
|
[
"MIT"
] | 3 |
2017-04-11T05:36:08.000Z
|
2021-03-16T16:22:07.000Z
|
src/server_main_loop.py
|
the40san/unity_survival_shooter_multi_server_python
|
d20d9aa2204bca70d0787acbfe395277b776e92d
|
[
"MIT"
] | null | null | null |
src/server_main_loop.py
|
the40san/unity_survival_shooter_multi_server_python
|
d20d9aa2204bca70d0787acbfe395277b776e92d
|
[
"MIT"
] | 1 |
2017-04-11T05:35:26.000Z
|
2017-04-11T05:35:26.000Z
|
import socket
import select
from server_info import ServerInfo
from client_handler.client_thread import ClientThread
from server_handler.server_thread import ServerThread
from server_handler.server_thread_proxy import ServerThreadProxy
from logger import Logger
class ServerMainLoop:
def __init__(self):
self.listener = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.read_fds = set([self.listener])
self.server_thread = ServerThread()
self.server_thread.start()
def exec(self):
try:
self.listener.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.listener.bind((ServerInfo.hostname, ServerInfo.port))
self.listener.listen(ServerInfo.backlog)
except BaseException as error:
Logger.log("Server setup error: " + error.strerror)
Logger.log("exiting...")
else:
while True:
self.main_loop()
finally:
self.shutdown()
def main_loop(self):
read_ready, write_ready, err_ready = select.select(self.read_fds, [], [])
for sock in read_ready:
if sock is self.listener:
self.accept_new_client(sock)
else:
self.accept_new_message(sock)
def accept_new_client(self, sock):
conn, address = self.listener.accept()
self.read_fds.add(conn)
Logger.log("new client connected")
def accept_new_message(self, sock):
thread = ClientThread(sock, ServerThreadProxy(self.server_thread))
self.read_fds.remove(sock)
self.server_thread.add_client(thread)
thread.start()
def shutdown(self):
Logger.log("shutting down")
for sock in self.read_fds:
sock.close()
self.server_thread.shutdown()
self.server_thread.join()
| 30.16129 | 81 | 0.647059 | 1,606 | 0.858824 | 0 | 0 | 0 | 0 | 0 | 0 | 71 | 0.037968 |
f9caf1a992a5eacb6d048931ae39cf07dfd472c4
| 8,838 |
py
|
Python
|
Aula_01/Aula01_Resolucao_Exercicios_Lista_Estruturas_Condicionais.py
|
elcbasilio/letscode
|
ea2ed5ee80485d98fad2c77a7a50927a7d524793
|
[
"MIT"
] | null | null | null |
Aula_01/Aula01_Resolucao_Exercicios_Lista_Estruturas_Condicionais.py
|
elcbasilio/letscode
|
ea2ed5ee80485d98fad2c77a7a50927a7d524793
|
[
"MIT"
] | null | null | null |
Aula_01/Aula01_Resolucao_Exercicios_Lista_Estruturas_Condicionais.py
|
elcbasilio/letscode
|
ea2ed5ee80485d98fad2c77a7a50927a7d524793
|
[
"MIT"
] | null | null | null |
# 1. Peça a idade do usuário e imprima se ele é maior ou menor de 18 anos;
idade = int (input ('Digite sua idade:'))
if idade < 18:
print ('Você tem menos de 18 anos')
else:
print ('Você tem 18 anos ou mais')
# 2. Peça um número e mostre se ele é positivo ou negativo;
numero = float (input ('Digite um número qualquer:'))
if numero < 0:
print ('Este número é Negativo')
else:
print ('Este número é Positivo')
# 3. Dado um número digitado, mostre se ele é Par ou Ímpar
numero = int (input ('Digite um número inteiro:'))
if numero % 2 == 0:
print ('Este número é Par')
else:
print ('Este número é Ímpar')
# 4. Peça dois números e mostre o maior deles;
n1 = float (input ('Digite um número qualquer:'))
n2 = float (input ('Digite mais um número qualquer:'))
if n1 < n2:
print ('O número',n1,'é menor que o número',n2)
elif n1 > n2:
print ('O número',n1,'é maior que o número',n2)
else:
print ('Os números digitados são idênticos')
# 5. Faça um programa que leia a validade das informações:
# a. Idade: entre 0 e 150;
# b. Salário: maior que 0;
# c. Sexo: M, F ou Outro;
# O programa deve imprimir uma mensagem de erro para cada informação inválida.
idade = int (input ('Digite sua idade:'))
salario = float (input ('Digite seu salário R$:'))
sexo = input ('Digite seu Sexo (M / F / O):')
if idade < 0 or idade > 150:
print ('A idade informada é inválida, digite uma idade entre 0 e 150 anos')
if salario <= 0:
print ('O salário informado é inválido, digite um valor maior que 0')
if sexo != 'M' and sexo != 'F' and sexo != 'O' and sexo != 'm' and sexo != 'f' and sexo != 'o':
print ('O sexo informado é inválido, digite M, F ou O')
# 6. Escreva um programa que peça a nota de 3 provas de um aluno, e verifique se ele passou o não de ano:
# Obs: O aluno irá passar de ano se sua média for maior que 6.
n1 = float (input ('Digite sua 1ª Nota:'))
n2 = float (input ('Digite sua 2ª Nota:'))
n3 = float (input ('Digite sua 3ª Nota:'))
nf = (n1+n2+n3)/3
if nf <= 6:
print ('Sua nota média foi',nf,'e você foi REPROVADO!')
else:
print ('Sua nota média foi',nf,'e você foi APROVADO!')
# 7. Fazer um programa que mostre uma questão de múltipla escolha com 5 opções (letras a, b, c, d, e e).
# Sabendo a resposta certa, receber a opção do usuário e informar a letra que o usuário marcou e se a
# resposta está certa ou errada.
print ('Escolha a alternativa correta')
print ('')
print ('Pergunta: Quem descobriu o Brasil?')
print ('')
print ('a) Vasco da Gama')
print ('b) Jair Bolsonaro')
print ('c) Silvio Santos')
print ('d) Pedro Álvares Cabral')
print ('e) Craque Neto 10')
print ('')
pergunta = (input ('Qual é a resposta correta: '))
if pergunta == 'd' or pergunta == 'D':
print ('Você selecionou a opção d) Pedro Álvares Cabral. A resposta está correta')
elif pergunta == 'a' or pergunta == 'A':
print ('Você selecionou a opção a) Vasco da Gama. A resposta está errada')
elif pergunta == 'b' or pergunta == 'B':
print ('Você selecionou a opção b) Jair Bolsonaro. A resposta está errada')
elif pergunta == 'c' or pergunta == 'C':
print ('Você selecionou a opção c) Silvio Santos. A resposta está errada')
elif pergunta == 'e' or pergunta == 'E':
print ('Você selecionou a opção e) Craque Neto 10. A resposta está errada')
else:
print ('Você selecionou uma opção inválida')
# 8. Vamos fazer um programa para verificar quem é o assassino de um crime.
# Para descobrir a polícia reuniu um dos suspeitos e fez um pequeno questionário com 5 perguntas de sim ou não:
# a. Telefonou para a vítima?
# b. Esteve no local do crime?
# c. Mora perto da vítima?
# d. Devia para a vítima?
# e. Já trabalhou com a vítima?
# Cada resposta sim dá um ponto para o suspeito, a polícia considera que os
# suspeitos com 5 pontos são os assassinos, com 4 a 3 pontos são cúmplices
# e 2 pontos são apenas suspeitos, necessitando outras investigações, valores
# abaixo de 1 são liberados.
print ('Responda S para Sim e N para Não em cada uma das perguntas abaixo')
print ('')
p1 = input ('Telefonou para a vítima?')
p2 = input ('Esteve no local do crime?')
p3 = input ('Mora perto da vítima?')
p4 = input ('Devia para a vítima?')
p5 = input ('Já trabalhou com a vítima?')
if p1 == 's' or p1 == 'S':
p11 = 1
else:
p11 = 0
if p2 == 's' or p1 == 'S':
p22 = 1
else:
p22 = 0
if p3 == 's' or p1 == 'S':
p33 = 1
else:
p33 = 0
if p4 == 's' or p1 == 'S':
p44 = 1
else:
p44 = 0
if p5 == 's' or p1 == 'S':
p55 = 1
else:
p55 = 0
soma = p11+p22+p33+p44+p55
print ('')
if soma == 5:
print ('Você é o Assassino')
elif soma >= 3:
print ('Você é Cúmplice')
elif soma >= 1:
print ('Você é Suspeito')
else:
print ('Você está Liberado')
# 9. Um produto vai sofrer aumento de acordo com a tabela 1 abaixo, peça para
# o usuário digitar o valor do produto de acordo com o preço antigo e
# escreva uma das mensagens da tabela 2, de acordo com o preço reajustado:
# Preço antigo Percentual de aumento
# Até R$ 50 5%
# Entre R$ 50 e R$100 10%
# Entre R$100 e R$150 13%
# Acima de R$150 15%
# Preço Novo Mensagem
# Ate R$80 Barato
# Entre R$ 80 e R$115 Razoável
# Entre R$ 115 e R$150 Normal
# Entre R$ 150 e R$170 Caro
# Acima de R$170 Muito caro
print ('Reajuste de Preços')
pa = float (input ('Digit o preço do produto que será reajustado: R$ '))
if pa <= 0:
print ('Digite um valor maior que ZERO')
pn=0
elif pa <= 50:
pn = pa * 1.05
elif pa <= 100:
pn = pa * 1.1
elif pa <= 150:
pn = pa * 1.13
else:
pn = pa * 1.15
if pn <= 0:
print ('')
elif pn < 80:
print ('O novo valor do produto é R$',pn,'- Barato')
elif pn < 115:
print ('O novo valor do produto é R$',pn,'- Razoável')
elif pn < 150:
print ('O novo valor do produto é R$',pn,'- Normal')
elif pn < 170:
print ('O novo valor do produto é R$',pn,'- Caro')
else:
print ('O novo valor do produto é R$',pn,'- Muito Caro')
# Desafio
# 1. Faça um programa que leia 3 números e informe o maior deles;
n1 = float (input ('Digite o 1º Número: '))
n2 = float (input ('Digite o 2º Número: '))
n3 = float (input ('Digite o 3º Número: '))
if n1 >= n2 and n1 >= n3:
print ('O maior número é',n1)
elif n2 >= n1 and n2 >= n3:
print ('O maior número é',n2)
elif n3 >= n1 and n3 >= n2:
print ('O maior número é',n3)
# 2. Agora faça com 4 números;
n1 = float (input ('Digite o 1º Número: '))
n2 = float (input ('Digite o 2º Número: '))
n3 = float (input ('Digite o 3º Número: '))
n4 = float (input ('Digite o 4º Número: '))
if n1 >= n2 and n1 >= n3 and n1 >= n4:
print ('O maior número é',n1)
elif n2 >= n1 and n2 >= n3 and n2 >= n4:
print ('O maior número é',n2)
elif n3 >= n1 and n3 >= n2 and n3 >= n4:
print ('O maior número é',n3)
elif n4 >= n1 and n4 >= n2 and n4 >= n3:
print ('O maior número é',n4)
'''
3. Um hospital quer fazer o diagnóstico de gripe ou dengue a partir de um
questionário de sintomas, tendo as perguntas abaixo, faça um programa
que faça o diagnóstico deste hospital:
a. Sente dor no corpo?
b. Você tem febre?
c. Você tem tosse?
d. Está com congestão nasal?
e. Tem manchas pelo corpo?
Para o diagnóstico ele tem a seguinte tabela:
A B C D E Resultado
Sim Sim Não Não Sim Dengue
Sim Sim Sim Sim Não gripe
Não Sim Sim Sim Não gripe
Sim Não Sim Sim Não Gripe
Sim Não Não Não Não Sem Doenças
Não Não Não Não Não Sem Doenças
'''
print ('Diagnóstico de gripe ou dengue')
print ('')
print ('Digite S para "Sim" ou N para "Não"')
print ('')
p1 = (input ('Sente dor no corpo? '))
p2 = (input ('Você tem febre? '))
p3 = (input ('Você tem tosse? '))
p4 = (input ('Está com congestão nasal? '))
p5 = (input ('Tem manchas pelo corpo? '))
if (p1.upper()!='S' and p1.upper()!='N') and (p2.upper()!='S' and p2.upper()!='N') and (p3.upper()!='S' and p3.upper()!='N') and (p4.upper()!='S' and p4.upper()!='N') and (p5.upper()!='S' and p5.upper()!='N'):
print ('Você digitou uma ou mais opções incorretas')
elif p1.upper()=='S' and p2.upper()=='S' and p3.upper()=='N' and p4.upper()=='N' and p5.upper()=='S':
print ('Diagnóstico - Dengue')
elif p1.upper()=='S' and p2.upper()=='S' and p3.upper()=='S' and p4.upper()=='S' and p5.upper()=='N':
print ('Diagnóstico - Gripe')
elif p1.upper()=='N' and p2.upper()=='S' and p3.upper()=='S' and p4.upper()=='S' and p5.upper()=='N':
print ('Diagnóstico - Gripe')
elif p1.upper()=='S' and p2.upper()=='N' and p3.upper()=='S' and p4.upper()=='S' and p5.upper()=='N':
print ('Diagnóstico - Gripe')
elif p1.upper()=='S' and p2.upper()=='N' and p3.upper()=='N' and p4.upper()=='N' and p5.upper()=='N':
print ('Diagnóstico - Sem Doenças')
elif p1.upper()=='N' and p2.upper()=='N' and p3.upper()=='N' and p4.upper()=='N' and p5.upper()=='N':
print ('Diagnóstico - Sem Doenças')
else:
print ('Diagnóstico - Diagnóstico não está especificado')
| 34.795276 | 209 | 0.642114 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5,626 | 0.62056 |
f9cd5d6aedbff1e1481a77881774667ef1fefe16
| 2,023 |
py
|
Python
|
ros_radar_mine/neuro_learning/controller/testing/load_and_test.py
|
tudelft/blimp_snn
|
23acbef8822337387aee196a3a10854e82bb4f80
|
[
"Apache-2.0"
] | 3 |
2021-11-08T20:20:21.000Z
|
2021-12-29T09:05:37.000Z
|
ros_radar_mine/neuro_learning/controller/testing/load_and_test.py
|
tudelft/blimp_snn
|
23acbef8822337387aee196a3a10854e82bb4f80
|
[
"Apache-2.0"
] | null | null | null |
ros_radar_mine/neuro_learning/controller/testing/load_and_test.py
|
tudelft/blimp_snn
|
23acbef8822337387aee196a3a10854e82bb4f80
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 2 20:41:01 2021
@author: marina
Interesting:
- T2000_NPOP100_NGEN500_NEU2-10-5-2-1_05-04-2021_19-08-51: Really smooth and contained between (-3,3)
- T2000_NPOP100_NGEN500_NEU2-10-5-2-1_05-04-2021_19-25-20
"""
# Set absolute package path
import sys, os
sys.path.append(os.path.abspath(".."))
import os
import extra.aux_funcs as af # :)
import numpy as np
from evol_funcs.evol_mut_eval import evaluate, evaluate_SNNyPID
#from evol_funcs.evol_funcs_ANN import evaluate, evaluate_ANNyPID
import pid.myPID as PID
# Some constants
custom_config = True
#config_file = "config_night/config1.yaml"
config_file = "config.yaml"
# T2500_NPOP50_NGEN200_24-03-2021_23-47-25/
# T1500_NPOP60_NGEN400_24-03-2021_22-21-03/
# T3000_NPOP50_NGEN200_24-03-2021_22-06-17/
# T1800_NPOP40_NGEN500_NEU10-5-1-1_17-06-2021_11-40-05
# "T1800_NPOP40_NGEN500_NEU1-3-2-1_17-06-2021_15-10-09"
net_name = "T1200_NPOP40_NGEN500_NEU10-5-5-1_26-05-2021_13-34-44"
gen = 30
hof = True
num = 0
# Load configuration "cf" dir
cf = af.set_config("../config/" + config_file)
# Load population (IND or HOF)
pop = af.readPopulation(cf, net_name, gen, hof)
#pop = af.readPopulationSD(cf, net_name, gen, hof)
network = pop[num]
# Load network configuration
if not custom_config:
cf = network.cf
# Activate plotting
cf["evol"]["plot"] = True
# Evaluate network + plot
'''
network.pid[0] = -1
network.pid[1] = 0
network.pid[2] = 0
'''
#network.pid = [2,0,0]
individual = [network]
mse = evaluate(individual, cf, h_refList = cf["evol"]["h_ref"], h_init = cf["evol"]["h_init"])
#pid = PID.PID(*cf["pid"]["PID"], cf["pid"]["dt"], cf["pid"]["simple"])
#inputList = np.linspace(*cf["pid"]["input_lim"], cf["pid"]["n_points"])
#inputList = np.random.uniform(low=cf["pid"]["input_lim"][0], high=cf["pid"]["input_lim"][1], size = (cf["pid"]["n_points"],))
#mse = evaluate_PID(individual, cf, pid=pid, inputList=inputList)
print("MSE = ", mse)
#torch.save(model.state_dict(), PATH)
| 26.973333 | 126 | 0.707365 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,449 | 0.716263 |
f9ce968f21d6e6f401b8601663fad589082a13f8
| 293 |
py
|
Python
|
HackerRank/Python/Sets/py-the-captains-room.py
|
neiesc/Problem-solving
|
d3bce7a3b9801e6049e2c135418b31cba47b0964
|
[
"MIT"
] | 1 |
2019-07-20T16:59:21.000Z
|
2019-07-20T16:59:21.000Z
|
HackerRank/Python/Sets/py-the-captains-room.py
|
neiesc/Problem-solving
|
d3bce7a3b9801e6049e2c135418b31cba47b0964
|
[
"MIT"
] | 5 |
2019-03-10T19:46:42.000Z
|
2020-04-24T22:42:30.000Z
|
HackerRank/Python/Sets/py-the-captains-room.py
|
neiesc/Problem-solving
|
d3bce7a3b9801e6049e2c135418b31cba47b0964
|
[
"MIT"
] | null | null | null |
#!/bin/python3
# The Captain's Room
# https://www.hackerrank.com/challenges/py-the-captains-room/problem
from collections import Counter
if __name__ == '__main__':
k = int(input())
room_captain = Counter(map(int, input().split())).most_common()[:-2:-1]
print(room_captain[0][0])
| 29.3 | 76 | 0.68942 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 112 | 0.382253 |
f9cf4cff9f1ee36f3f46ec1c7c81c7f6a57782b0
| 1,536 |
py
|
Python
|
transformer_2/data/processing/__init__.py
|
mingruimingrui/Transformer2
|
2b44289ee7c7312d699f2261c1e4ebccce0f21e2
|
[
"MIT"
] | null | null | null |
transformer_2/data/processing/__init__.py
|
mingruimingrui/Transformer2
|
2b44289ee7c7312d699f2261c1e4ebccce0f21e2
|
[
"MIT"
] | 1 |
2020-06-01T02:13:10.000Z
|
2020-06-01T02:13:10.000Z
|
transformer_2/data/processing/__init__.py
|
mingruimingrui/Transformer2
|
2b44289ee7c7312d699f2261c1e4ebccce0f21e2
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
from __future__ import unicode_literals
# Registry
from transformer_2.data.processing._registry import PROCESSOR_REGISTRY, \
BaseProcessor, register_processor, Compose, make_processor_from_list
# Generic processors
from transformer_2.data.processing.general import HtmlUnescape, HtmlEscape, \
Lowercase, Strip, WhitespaceNormalize, UnicodeNormalize
from transformer_2.data.processing.replace import ReplaceSubstrings, \
ReplaceTokens, WhitelistCharacters
# Specialized processors
from transformer_2.data.processing.onmt import OnmtTokenize, OnmtDetokenize
from transformer_2.data.processing.sentencepiece import SpmEncode, SpmDecode
from transformer_2.data.processing.sacremoses import SacremosesTokenize, \
SacremosesDetokenize
# Language specific processors
from transformer_2.data.processing.chinese import ToSimplifiedChinese, \
ToTraditionalChinese, Jieba
# Default processing steps
from transformer_2.data.processing._default import DEFAULT_PROCESSING_STEPS
__all__ = [
'PROCESSOR_REGISTRY', 'BaseProcessor', 'register_processor',
'Compose', 'make_processor_from_list',
'HtmlUnescape', 'HtmlEscape',
'Lowercase', 'Strip',
'WhitespaceNormalize', 'UnicodeNormalize',
'ReplaceSubstrings', 'ReplaceTokens', 'WhitelistCharacters',
'OnmtTokenize', 'OnmtDetokenize',
'SacremosesTokenize', 'SacremosesDetokenize',
'SpmEncode', 'SpmDecode',
'ToSimplifiedChinese', 'ToTraditionalChinese', 'Jieba',
'DEFAULT_PROCESSING_STEPS'
]
| 34.909091 | 77 | 0.802734 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 508 | 0.330729 |
f9d231776637662c74f12d56ca75ccd899e7d32a
| 7,223 |
py
|
Python
|
pydmc/pydmc.py
|
TeradataInteractiveAmericas/pydmc
|
817899311f7908436fba7e1fc553a019bafadcb6
|
[
"MIT"
] | 1 |
2017-02-16T13:55:32.000Z
|
2017-02-16T13:55:32.000Z
|
pydmc/pydmc.py
|
TeradataInteractiveAmericas/pydmc
|
817899311f7908436fba7e1fc553a019bafadcb6
|
[
"MIT"
] | null | null | null |
pydmc/pydmc.py
|
TeradataInteractiveAmericas/pydmc
|
817899311f7908436fba7e1fc553a019bafadcb6
|
[
"MIT"
] | null | null | null |
"""
PyDMC API Client
"""
import requests
class DMC:
def __init__(self,user,pwrd,host,v=5,content='json',accept='json'):
self.user = user
self.pwrd = pwrd
self.host = "%s/api/rest/v%d" % (host,v)
self.content = content
self.accept = accept
self.headers = {
'content-type': 'application/%s' % content,
'accept': 'application/%s' % accept
}
def get(self,domain,function,query=None):
return requests.get(
self._build_url(domain,function),
auth=(self.user, self.pwrd),
headers=self.headers,
params=query
)
# if str(call.status_code)[0] == '2':
# return call
# else:
# raise HttpError(call.status_code,call.reason)
def post(self,domain,function,query=None,entity=None):
return requests.post(
self._build_url(domain,function),
auth=(self.user, self.pwrd),
headers=self.headers,
params=query,
data=entity
)
# if str(call.status_code)[0] == '2':
# return call
# else:
# raise HttpError(call.status_code,call.reason)
def _build_url(self,domain,function):
return '%s/%s/%s' % (self.host,domain,function)
def getAttributes(self):
return self.get('meta','getAttributeDefinitions')
def createAttributes(self,defs):
return self.post('meta','createAttributeDefinitions',None,defs)
def getUsedPersonalizations(self,messageId):
query = {"messageId": messageId}
call = self.get('message','getUsedPersonalizations', query)
try:
return call.json()
except:
print('Something has gone horribly wrong and you need to write an exception for it.')
print(call)
print(call.url)
print(call.status_code)
quit()
finally:
pass
def getUserByEmail(self,email):
query = {"email": email}
# user = self.get('user','getByEmail',query).json()
call = self.get('user','getByEmail',query)
user = call.json()
try:
return User(user['id'],user['email'],user['mobileNumber'],user['identifier'])
except:
# TODO: add exception objects
if user['errorCode'] == 'INVALID_PARAMETER':
print("'%s' is an invalid value for '%s'. Please try again.") % (
user['value'],user['parameterName']
)
else:
print('There was an error that you need an exception for.')
quit()
finally:
pass
def getUser(self,user_id):
query = {"userId": user_id}
# user = self.get('user','getByEmail',query).json()
call = self.get('user','get',query)
user = call.json()
try:
return User(user['id'],user['email'],user['mobileNumber'],user['identifier'])
except:
# TODO: add exception objects
if user['errorCode'] == 'INVALID_PARAMETER':
print("'%s' is an invalid value for '%s'. Please try again.") % (
user['value'],user['parameterName']
)
quit()
else:
print('There was an error that you need an exception for.')
quit()
finally:
pass
def updateProfileByEmail(self,email,attributes=None):
query = {"email": email}
if attributes:
entity = "[%s]" % ",".join([ "%s" % a.json() for a in attributes ])
else:
entity = "null"
return self.post('user','updateProfileByEmail',query,entity)
def updateProfile(self,userId,attributes=None):
query = {"userId": userId}
if attributes:
entity = "[%s]" % ",".join([ "%s" % a.json() for a in attributes ])
else:
entity = "null"
call = self.post('user','updateProfile',query,entity)
if call.status_code == 204:
return True
else:
for a in attributes:
print(a.json())
print(call.headers)
print(userId)
print(attributes)
print(call.url)
print('There was an error that you need an exception for.')
quit()
def getPreparedMessages(self,groupId):
query = {"groupId": groupId}
return self.get('group','getPreparedMessages', query)
def sendSingleMessage(self,recipientId,messageId,additionalContent='null'):
query = {"recipientId": recipientId,"messageId": messageId}
entity = additionalContent
return self.post('message','sendSingle',query,entity)
class Attribute:
def __init__(self,name,value):
from dateutil import parser
self.name = name
if "date" in self.name.lower():
d = parser.parse(value, fuzzy=True)
value = d.isoformat()
self.value = value
def json(self):
return '{"name":"%s","value":"%s"}' % (self.name,self.value)
def __unicode__(self):
return '%s: %s' % (self.name, self.value)
def __str__(self):
return '%s: %s' % (self.name, self.value)
class Attachment:
def __init__(self,name,contentType,content):
import base64
self.name = name
self.contentType = contentType
with open(content,'rb') as f:
self.content = base64.b64encode(f.read())
def json(self):
return '{"name":"%s","contentType":"%s","content":"%s"}' % (self.name,self.contentType,self.content)
class MessageContent:
def __init__(self,parameters=None,attachments=None):
self.parameters = parameters
self.attachments = attachments
class User:
def __init__(self,id,email,mobileNumber,identifier):
self.id = id
self.email = email
self.mobileNumber = mobileNumber
self.identifier = identifier
def json(self):
return '{"name":"%s","value":"%s"}' % (self.name,self.value)
def __unicode__(self):
return 'DMC User: %d (%s)' % (self.id, self.email)
def __str__(self):
return unicode(self).encode('utf-8')
class DMCError(Exception):
def __init__(self,errorActor,errorCode,message):
self.errorActor = errorActor
self.errorCode = errorCode
self.message = message
def __unicode__(self):
return "[DMC Error] %s\n" % str(self.message)
def __str__(self):
return unicode(self).encode('utf-8')
class InvalidParameterError(DMCError):
def __init__(self,errorActor,errorCode,message,parameterName,propertyName,value):
self.parameterName = parameterName
self.propertyName = propertyName
self.value = value
def __unicode__(self):
print "%s %s %s %s %s %s\n" % (self.errorCode,self.errorActor,self.message,self.parameterName,self.propertyName,self.value)
quit()
def __str__(self):
print "%s %s %s %s %s %s\n" % (self.errorCode,self.errorActor,self.message,self.parameterName,self.propertyName,self.value)
quit()
| 29.72428 | 131 | 0.5668 | 7,161 | 0.991416 | 0 | 0 | 0 | 0 | 0 | 0 | 1,589 | 0.219992 |
f9d3186f39a6e3865241f7bef73bfedbb15ea7d5
| 832 |
py
|
Python
|
COE/contents/building/__init__.py
|
Python-Project-Cheap-Empire/cheap-of-empire
|
44aaae29e4fadc9df46734f529031ce8c4bb3475
|
[
"MIT"
] | null | null | null |
COE/contents/building/__init__.py
|
Python-Project-Cheap-Empire/cheap-of-empire
|
44aaae29e4fadc9df46734f529031ce8c4bb3475
|
[
"MIT"
] | 2 |
2022-01-31T21:05:15.000Z
|
2022-01-31T21:08:11.000Z
|
COE/contents/building/__init__.py
|
Python-Project-Cheap-Empire/cheap-of-empire
|
44aaae29e4fadc9df46734f529031ce8c4bb3475
|
[
"MIT"
] | 1 |
2022-02-04T12:05:14.000Z
|
2022-02-04T12:05:14.000Z
|
from .archery_range import ArcheryRange
from .barrack import Barrack
from .building import Building
from .dock import Dock
from .farm import Farm
from .granary import Granary
from .market import Market
from .military_building import MilitaryBuilding
from .stable import Stable
from .storage_building import StorageBuilding
from .storage_pit import StoragePit
from .technology_building import TechnologyBuilding
from .town_center import TownCenter
from .house import House
from .watch_tower import WatchTower
from .small_wall import SmallWall
__all__ = [
"ArcheryRange",
"Barrack",
"Building",
"Dock",
"Farm",
"Granary",
"Market",
"MilitaryBuilding",
"Stable",
"StorageBuilding",
"StoragePit",
"TechnologyBuilding",
"TownCenter",
"House",
"WatchTower",
"SmallWall",
]
| 23.111111 | 51 | 0.740385 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 179 | 0.215144 |
f9d3e8e17cb4d7f9fe9db4b44a63b0c8a9f8f65f
| 323 |
py
|
Python
|
test/suite/E27.py
|
shardros/autopep8
|
2ab2ea74668b10f3910f3d5b9526494fa5671ca1
|
[
"MIT"
] | 3,459 |
2015-01-03T15:53:43.000Z
|
2022-03-31T16:33:01.000Z
|
test/suite/E27.py
|
hayata-yamamoto/autopep8
|
107e29dce22c7b367a36633a78735278e4ad4288
|
[
"MIT"
] | 435 |
2015-01-03T12:58:44.000Z
|
2022-03-29T12:37:13.000Z
|
test/suite/E27.py
|
hayata-yamamoto/autopep8
|
107e29dce22c7b367a36633a78735278e4ad4288
|
[
"MIT"
] | 279 |
2015-03-16T16:34:51.000Z
|
2022-03-26T23:58:48.000Z
|
#: Okay
True and False
#: E271
True and False
#: E272
True and False
#: E271
if 1:
#: E273
True and False
#: E273 E274
True and False
#: E271
a and b
#: E271
1 and b
#: E271
a and 2
#: E271 E272
1 and b
#: E271 E272
a and 2
#: E272
this and False
#: E273
a and b
#: E274
a and b
#: E273 E274
this and False
| 10.419355 | 15 | 0.613003 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 125 | 0.386997 |
f9d666c57cbfbdaa2610dd40857b2cf6d3dbfa06
| 8,973 |
py
|
Python
|
api/GIS/functionalFun/thread_GIS.py
|
AutoCoinDCF/NEW_API
|
f4abc48fff907a0785372b941afcd67e62eec825
|
[
"Apache-2.0"
] | null | null | null |
api/GIS/functionalFun/thread_GIS.py
|
AutoCoinDCF/NEW_API
|
f4abc48fff907a0785372b941afcd67e62eec825
|
[
"Apache-2.0"
] | null | null | null |
api/GIS/functionalFun/thread_GIS.py
|
AutoCoinDCF/NEW_API
|
f4abc48fff907a0785372b941afcd67e62eec825
|
[
"Apache-2.0"
] | null | null | null |
from threading import Thread,Lock
from api.GIS.config import GIS_mgdb_config
from api.GIS.database.mongoDB import MGO
import json
from api.GIS.GISStaticsFun import GisStaticsFun
class TheadFun():
def __init__(self):
pass
# self.param = param
def queryQBByIds(self,ids):
DBConfig = []
for cf in GIS_mgdb_config.dataBaseConfig:
ctype = cf['type']
if ctype == 'event' or ctype == 'org':
DBConfig.append(cf)
LocationInfo = {}
features = []
event_ids = []
org_ids = []
for id in ids:
if len(id) < 20:
org_ids.append(id)
else:
event_ids.append(id)
threads = [Thread(target=TheadFun.queryDBById,args=(cf,ids,LocationInfo)) for cf in DBConfig]
# threads_org = [Thread(target=TheadFun.queryOrgById,args=(cf,org_ids,LocationInfo)) for cf in orgDBConfig]
for t in threads:
t.start()
for t in threads:
t.join()
features = TheadFun.getFeaturesByLocationInfo(LocationInfo)
return features
@staticmethod
def queryDBById(cf, ids, LocationInfo):
cType = cf['type']
if cType == "event":
TheadFun.queryEventById(cf, ids, LocationInfo)
else:
TheadFun.queryOrgAndTarById(cf, ids, LocationInfo)
@staticmethod
def queryOrgAndTarById(cf, ids, LocationInfo):
mg = MGO(host=cf['host'], port=cf['port'], user=cf['user'], pwd=cf['pwd'],dbname=cf['dbname'])
dbName = cf['dbname']
collName = cf['collName']
fieldConfig = GIS_mgdb_config.fieldConfig
fieldconfig = fieldConfig[dbName][collName]
locationListKey = fieldconfig["locationList"]
QBIdKey = fieldconfig["QBId"]
TypeKey = fieldconfig["type"]
locationNameKey = fieldconfig["locationName"]
findObj = {QBIdKey:{'$in':ids}}
rows = mg.find(collName,findObj)
for row in rows:
try:
EventId = str(row[QBIdKey])
localName = row[locationNameKey]
locationlist = row[locationListKey]
Type = row[TypeKey]
for index,locationItem in enumerate(locationlist):
geometry = locationItem
X = str(geometry['coordinates'][0])
Y = str(geometry['coordinates'][1])
ident = "event&" + X + Y
heatAttr = GisStaticsFun.getHeatAttr(row,showHeatAttr,EventAttrKey) ## 获取热力属性
Param = TheadFun.getParam(EventId,index,eventType,heatAttr) ##获取param
location = geometry
TheadFun.getEventLocationInfo(Param,ident,location,localName,LocationInfo) ##获取locationinfo
except:
print(row["_id"] + "失败!")
@staticmethod
def queryEventById(cf, ids, LocationInfo):
mg = MGO(host=cf['host'], port=cf['port'], user=cf['user'], pwd=cf['pwd'],dbname=cf['dbname'])
dbName = cf['dbname']
collName = cf['collName']
fieldConfig = GIS_mgdb_config.fieldConfig
fieldconfig = fieldConfig[dbName][collName]
locationListKey = fieldconfig["locationList"]
QBIdKey = fieldconfig["QBId"]
SubtypeKey = fieldconfig["Subtype"]
EventAttrKey = fieldconfig["EventAttr"]
showHeatAttr = fieldconfig["showHeatAttr"]
findObj = {QBIdKey:{'$in':ids}}
rows = mg.find(collName,findObj)
for row in rows:
try:
EventId = str(row[QBIdKey])
localName = row[locationListKey][0]['name']
locationlist = row[locationListKey]
eventType = row[SubtypeKey]
for index,locationItem in enumerate(locationlist):
geometry = locationItem['geometry']
X = str(geometry['coordinates'][0])
Y = str(geometry['coordinates'][1])
ident = "event&" + X + Y
heatAttr = GisStaticsFun.getHeatAttr(row,showHeatAttr,EventAttrKey) ## 获取热力属性
Param = TheadFun.getParam(EventId,index,eventType,heatAttr) ##获取param
location = geometry
TheadFun.getEventLocationInfo(Param,ident,location,localName,LocationInfo) ##获取locationinfo
except:
print(row["_id"] + "失败!")
def exploreEvents(self,geometryStrArr):
eventsDBConfig = GIS_mgdb_config.dataBaseConfig['event']
LocationInfo = {}
features = []
threads = [Thread(target=TheadFun.spatialQueryDB,args=(cf,geometryStrArr,LocationInfo)) for cf in eventsDBConfig]
for t in threads:
t.start()
for t in threads:
t.join()
features = TheadFun.getFeaturesByLocationInfo(LocationInfo)
return features
@staticmethod
def spatialQueryDB(cf,geometryStrArr,LocationInfo):
mg = MGO(host=cf['host'], port=cf['port'], user=cf['user'], pwd=cf['pwd'],dbname=cf['dbname'])
dbName = cf['dbname']
collName = cf['collName']
fieldConfig = GIS_mgdb_config.fieldConfig
fieldconfig = fieldConfig[dbName][collName]
locationListKey = fieldconfig["locationList"]
geometryKey = fieldconfig["geometry"]
QBIdKey = fieldconfig["QBId"]
SubtypeKey = fieldconfig["Subtype"]
EventAttrKey = fieldconfig["EventAttr"]
showHeatAttr = fieldconfig["showHeatAttr"]
findOrArr = []
for geometryStr in geometryStrArr:
geometryObj = json.loads(geometryStr)
findO_point = {locationListKey:{'$elemMatch':{geometryKey:{"$within":{"$geometry":geometryObj}}}}}
findOrArr.append(findO_point)
findObj = {'$or':findOrArr}
rows = mg.find(collName,findObj)
for row in rows:
try:
EventId = str(row[QBIdKey])
localName = row[locationListKey][0]['name']
locationlist = row[locationListKey]
eventType = row[SubtypeKey]
for index,locationItem in enumerate(locationlist):
geometry = locationItem['geometry']
isIntersect = True
if len(geometry['coordinates']) == 0 or geometry['coordinates'][0] == '' or geometry['coordinates'][1] == '': #去除坐标有错误的
continue
#去除locationList中的坐标不在传入的geometry中的
if len(locationlist) > 1:
isIntersect = False
for geometryStr in geometryStrArr:
geometryObj = json.loads(geometryStr)
isIntersect = GisStaticsFun.isIntersert(geometry,geometryObj)
if isIntersect:
break
if not isIntersect: #判l断locationlist中的每一个地点是否落在所查询的范围内
continue
X = str(geometry['coordinates'][0])
Y = str(geometry['coordinates'][1])
ident = "event&" + X + Y
heatAttr = GisStaticsFun.getHeatAttr(row,showHeatAttr,EventAttrKey)
Param = TheadFun.getParam(EventId,index,eventType,heatAttr)
location = geometry
TheadFun.getEventLocationInfo(Param,ident,location,localName,LocationInfo)
except:
print(row["_id"] + "失败!")
@staticmethod
def getParam(EventId,index,eventType,heatAttr):
Param = {
"ParamId":EventId+"#"+str(index),
"QBId":EventId,
'QBType':eventType,
"heatAttr":heatAttr
}
return Param
@staticmethod
def getEventLocationInfo(Param,ident,location,localName,LocationInfo):
if(ident in LocationInfo):
EventArr = LocationInfo[ident]['Params']
EventArr.append(Param)
else:
LocationInfo[ident] = {
"Params":[Param],
"location":location,
"localName":localName
}
@staticmethod
def getFeaturesByLocationInfo(LocationInfo):
features = []
for k,v in LocationInfo.items():
location = v['location']
featureId = k
params = v['Params']
localname = v['localName']
feature = {
"type": "Feature",
"id": featureId,
"geometry": location,
"properties": {
'Params':params,
'locationName':localname,
'selectedNum':len(params)
}
}
features.append(feature)
return features
####======================================####
| 41.35023 | 140 | 0.5482 | 8,929 | 0.979809 | 0 | 0 | 7,451 | 0.817623 | 0 | 0 | 1,234 | 0.135411 |
f9d69be77fcbae4d67f52431660566569a03abe1
| 268 |
py
|
Python
|
src/cell.py
|
ViktorBusk/Machine-Learning-Number-Prediction
|
f9863ccdb623ed3bf83ca26faae310d56f087c4f
|
[
"MIT"
] | null | null | null |
src/cell.py
|
ViktorBusk/Machine-Learning-Number-Prediction
|
f9863ccdb623ed3bf83ca26faae310d56f087c4f
|
[
"MIT"
] | null | null | null |
src/cell.py
|
ViktorBusk/Machine-Learning-Number-Prediction
|
f9863ccdb623ed3bf83ca26faae310d56f087c4f
|
[
"MIT"
] | null | null | null |
import pygame
class Cell:
def __init__(self, pos, dimensions, color):
self.pos = pos
self.dimensions = dimensions
self.color = color
def draw(self, screen):
pygame.draw.rect(screen, self.color, ((self.pos), (self.dimensions)))
| 26.8 | 77 | 0.630597 | 253 | 0.94403 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
f9d780addff74609d2c11318421ccdacd8b15d8d
| 1,845 |
py
|
Python
|
Creating a variable at template using filter.py/filter.py
|
YooInKeun/Facebook-Page-Insights-Web-Crawler
|
dbe8477b1e0671aca137cd94eff090d691e99ee2
|
[
"MIT"
] | 1 |
2021-07-12T00:04:04.000Z
|
2021-07-12T00:04:04.000Z
|
Creating a variable at template using filter.py/filter.py
|
YooInKeun/Facebook-Page-Insights-Web-Crawler
|
dbe8477b1e0671aca137cd94eff090d691e99ee2
|
[
"MIT"
] | null | null | null |
Creating a variable at template using filter.py/filter.py
|
YooInKeun/Facebook-Page-Insights-Web-Crawler
|
dbe8477b1e0671aca137cd94eff090d691e99ee2
|
[
"MIT"
] | null | null | null |
from django import template
register = template.Library()
# 문자열 변수 생성 가능
class SetVarNode(template.Node):
def __init__(self, new_val, var_name):
self.new_val = new_val
self.var_name = var_name
def render(self, context):
context[self.var_name] = self.new_val
return ''
import re
@register.tag
def setvar(parser,token):
# This version uses a regular expression to parse tag contents.
try:
# Splitting by None == splitting by spaces.
tag_name, arg = token.contents.split(None, 1)
except ValueError:
raise template.TemplateSyntaxError("%r tag requires arguments" % token.contents.split()[0])
m = re.search(r'(.*?) as (\w+)', arg)
if not m:
raise template.TemplateSyntaxError("%r tag had invalid arguments" % tag_name)
new_val, var_name = m.groups()
if not (new_val[0] == new_val[-1] and new_val[0] in ('"', "'")):
raise template.TemplateSyntaxError("%r tag's argument should be in quotes" % tag_name)
return SetVarNode(new_val[1:-1], var_name)
# 모든 타입 변수 생성 가능
class SetVarNode(template.Node):
def __init__(self, var_name, var_value):
self.var_name = var_name
self.var_value = var_value
def render(self, context):
try:
value = template.Variable(self.var_value).resolve(context)
except template.VariableDoesNotExist:
value = ""
context[self.var_name] = value
return u""
@register.tag(name='set')
def set_var(parser, token):
"""
{% set some_var = '123' %}
"""
parts = token.split_contents()
if len(parts) < 4:
raise template.TemplateSyntaxError("'set' tag must be of the form: {% set <var_name> = <var_value> %}")
return SetVarNode(parts[1], parts[3])
| 29.758065 | 112 | 0.616802 | 643 | 0.341476 | 0 | 0 | 1,073 | 0.569835 | 0 | 0 | 420 | 0.223048 |
f9d7b799633ebd72932c23f0a6056d0edfbe48dc
| 2,194 |
py
|
Python
|
kblight.py
|
marklr/system76-kbdlight-cli
|
03ba5ce04c7abd7b125e6777aac7cf19c6651f51
|
[
"MIT"
] | null | null | null |
kblight.py
|
marklr/system76-kbdlight-cli
|
03ba5ce04c7abd7b125e6777aac7cf19c6651f51
|
[
"MIT"
] | null | null | null |
kblight.py
|
marklr/system76-kbdlight-cli
|
03ba5ce04c7abd7b125e6777aac7cf19c6651f51
|
[
"MIT"
] | null | null | null |
#!/bin/env python3
import sys
import plac
import webcolors
import os
_COLOR_FILTERS = {
'#': '',
'(': '',
')': '',
}
LIGHT_CONTROLS = {
'left': '/sys/class/leds/system76::kbd_backlight/color_left',
'center': '/sys/class/leds/system76::kbd_backlight/color_center',
'right': '/sys/class/leds/system76::kbd_backlight/color_right',
}
def check_light_controls():
for light, fpath in LIGHT_CONTROLS.items():
if not os.path.exists(fpath):
raise FileNotFoundError(f"Light path {fpath} for light {light} does not exist")
def parse_color(color):
c = str(color).lower().strip()
for k, v in _COLOR_FILTERS.items():
c = c.replace(k, v)
hex = None
try:
hc = '#' + c if not c.startswith('#') else c
rgb = webcolors.hex_to_rgb(hc)
if rgb:
return hc.upper()
except Exception:
# probably not a hex already
pass
try:
hex = webcolors.name_to_hex(c)
except Exception:
pass
if '%' in c:
try:
hex = webcolors.rgb_percent_to_hex((int(x.strip()) for x in c.split(',')))
except Exception:
pass
else:
try:
hex = webcolors.rgb_to_hex((int(x.strip()) for x in c.split(',')))
except Exception:
pass
return hex
def set_light(name, hex):
if name not in LIGHT_CONTROLS:
raise NotImplementedError(f"Cannot control light {name}")
print(f"[+] Setting light {name} to {hex}", file=sys.stdout)
with open(LIGHT_CONTROLS[name], 'w') as f:
f.write(hex.upper().replace('#', '') + "\n")
def main(color: ("color choice (rgb triplet/color name/hex)", 'positional'),
light: ("light to set (right/left/center/all)", 'option', 'l')='all'):
check_light_controls()
hex = parse_color(color)
if not hex:
print(f"[-] Unrecognized color {color}", file=sys.stderr)
return os.EX_NOINPUT
print(f"[*] Parsed color {color} to hex {hex}", file=sys.stdout)
for light_ in filter(lambda x: light == x or light == 'all', LIGHT_CONTROLS.keys()):
set_light(light_, hex)
if __name__ == '__main__':
plac.call(main)
| 26.119048 | 91 | 0.592981 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 585 | 0.266636 |
f9d8c56ae37748311a4ccb37002818c53ff1fd16
| 420 |
py
|
Python
|
fimath/constants.py
|
kalinkinisaac/modular
|
301d26ad222a5ef3278aaf251908e0a8537bb58f
|
[
"MIT"
] | null | null | null |
fimath/constants.py
|
kalinkinisaac/modular
|
301d26ad222a5ef3278aaf251908e0a8537bb58f
|
[
"MIT"
] | null | null | null |
fimath/constants.py
|
kalinkinisaac/modular
|
301d26ad222a5ef3278aaf251908e0a8537bb58f
|
[
"MIT"
] | null | null | null |
from .field import Field
from .re_field import ReField
from .matrix import Matrix
inf = Field(is_inf=True)
IDM = Matrix(1, 0, 0, 1)
G0 = Matrix(0, -1, 1, 0)
G1 = Matrix(0, 1, -1, 1)
G1_2 = G1 ** 2
G0_ = Matrix(0, 1, -1, 0)
G1_ = Matrix(1, 1, -1, 0)
G1_2_ = G1_ ** 2
G_ = Matrix(1, -1, 0, 1)
G__ = Matrix(1, 0, -1, 1)
ZERO = Field(0)
ONE = Field(1)
INF = inf
V0 = Field(1j)
V1 = Field(ReField(1/2), ReField(b=1/2))
| 16.8 | 40 | 0.592857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
f9dab830b35494d6660350f19bc8089f5510126e
| 116 |
py
|
Python
|
test/__init__.py
|
elagheb/at_commands
|
51f1fa553b651b639aa3d1e1b3ac4ff07322f7a0
|
[
"BSD-2-Clause"
] | null | null | null |
test/__init__.py
|
elagheb/at_commands
|
51f1fa553b651b639aa3d1e1b3ac4ff07322f7a0
|
[
"BSD-2-Clause"
] | null | null | null |
test/__init__.py
|
elagheb/at_commands
|
51f1fa553b651b639aa3d1e1b3ac4ff07322f7a0
|
[
"BSD-2-Clause"
] | null | null | null |
import unittest
import unittest.mock as mock
from unittest.mock import patch
__all__ = ['unittest','mock','patch']
| 19.333333 | 37 | 0.767241 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 23 | 0.198276 |
f9ddbc50ba159c971816ef0bfcfea8bf2513c89c
| 5,972 |
py
|
Python
|
reporte.py
|
InoveProyectos/Buscador-Alquileres-Python
|
78379da88db7957cdc8c804e0491f2a796e2c706
|
[
"Unlicense"
] | 1 |
2020-05-13T04:29:44.000Z
|
2020-05-13T04:29:44.000Z
|
reporte.py
|
InoveProyectos/Buscador-Alquileres-Python
|
78379da88db7957cdc8c804e0491f2a796e2c706
|
[
"Unlicense"
] | null | null | null |
reporte.py
|
InoveProyectos/Buscador-Alquileres-Python
|
78379da88db7957cdc8c804e0491f2a796e2c706
|
[
"Unlicense"
] | 3 |
2020-05-13T18:25:34.000Z
|
2020-11-24T16:19:37.000Z
|
#!/usr/bin/env python
'''
Reporte búsqueda de alquileres de Inmuebles
---------------------------
Autor: Inove Coding School
Version: 1.0
Descripcion:
Este script realiza reportes de los datos adquiridos de alquileres de inmuebles
Reporte Nº
0: Visualizar todos los reportes juntos de estudio por ambientes y m2
1: Cantidad de alquileres por ambiente
2: Precio por ambiente
3: Cantidad de alquileres por m2
4: Precio por m2
5: Calcular y visualizar le prediccion de costos por m2
Requisitos de instalacion:
- Python 3.x
- Libreriras (incluye los comandos de instalacion)
pip install numpy
pip install pandas
pip install matplotlib
pip install seaborn
pip install sklearn
'''
__author__ = "Inove Coding School"
__email__ = "[email protected]"
__version__ = "1.0"
import sys
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.axes
import seaborn as sns
from sklearn import linear_model
def generar_reporte(reporte, silent_mode=False):
df = pd.read_csv("propiedades.csv")
# Sacamos todas las filas de la tabla las cuales el campo "m2" o "ambientes" se encuentre vacio
propiedades = df[df['m2'].notna()]
propiedades = propiedades[propiedades['ambientes'].notna()]
# Nos quedamos solamente con aquellas filas que el precio haya sido informado en pesos Argentinos
propiedades = propiedades.loc[propiedades['moneda'] == 'ARS']
# Como alternativa se puede pasar los precios en dolares a pesos, pero en general estos datos se escapan de la media
#dolar_pesos = 67
#propiedades['precio'] = propiedades.apply(lambda x: x['precio']*dolar_pesos if x['moneda'] == 'USD' else x['precio'], axis = 1)
# Obtener cuantos alquileres por ambientes hay
ambientes = propiedades.groupby('ambientes')['ambientes'].count()
ambientes_df = ambientes.reset_index(name='cantidad')
# Obtener el precio promedio por cantidad de ambientes
precio_por_ambiente = propiedades.groupby('ambientes')['precio'].mean()/1000
precio_por_ambiente_df = precio_por_ambiente.reset_index(name='precio')
fig = plt.figure(figsize=(16,9))
axs = np.empty(4, dtype=type(matplotlib.axes))
# En funcion del reporte a mostrar defino el estilo del gráfico
if reporte < 5:
sns.set_style("white")
else:
sns.set_style("whitegrid", {'grid.linestyle': '--'})
if reporte == 0:
# En el caso de desear visualizar todos los gráficos genero subplots para mostarlos juntos
axs[0] = fig.add_subplot(221)
axs[1] = fig.add_subplot(222)
axs[2] = fig.add_subplot(223)
axs[3] = fig.add_subplot(224)
elif reporte <= 4:
# Creo unicamente el gráifico que voy a mostrar
axs[reporte-1] = fig.add_subplot(111)
elif reporte == 5:
ax1 = fig.add_subplot(111)
elif reporte == 6:
axs[0] = fig.add_subplot(121)
axs[1] = fig.add_subplot(122)
elif reporte == 7:
axs[2] = fig.add_subplot(121)
axs[3] = fig.add_subplot(122)
if reporte == 0 or reporte == 1 or reporte == 6:
# Graficar "Cantidad de alquileres por ambiente"
ax = sns.barplot(x=ambientes_df['ambientes'], y=ambientes_df['cantidad'], ax=axs[0])
ax.set_alpha(0.8)
ax.set_title("Cantidad de alquileres por ambiente", fontsize=15)
ax.set_ylabel("Cantidad", fontsize=12)
ax.set_xlabel("Ambientes", fontsize=12)
if reporte == 0 or reporte == 2 or reporte == 6:
# Graficar "Precio por ambiente"
ax = sns.barplot(x=precio_por_ambiente_df['ambientes'], y=precio_por_ambiente_df['precio'], palette="pastel", ax=axs[1])
ax.set_alpha(0.8)
ax.set_title("Precio por ambiente", fontsize=15)
ax.set_ylabel("Precio[miles de pesos]", fontsize=12)
if reporte == 0 or reporte == 3 or reporte == 7:
# Graficar "Cantidad de alquileres por m2"
ax = sns.distplot(propiedades['m2'], bins=40, kde=True, kde_kws={"color": "blue", "alpha":0.3, "linewidth": 1, "shade":True }, ax=axs[2])
ax.set_title("Cantidad de alquileres por m2", fontsize=15, y=0.7, x = 0.5)
ax.set_ylabel("Cantidad", fontsize=12)
ax.set_xlabel('m2')
if reporte == 0 or reporte == 4 or reporte == 7:
# Graficar "Precio por m2"
ax = sns.scatterplot(propiedades['m2'],propiedades['precio']/1000, color='blue', ax=axs[3])
ax.set_title("Precio por m2", fontsize=15, y=-0.01)
ax.set_ylabel("Precio[miles de pesos]", fontsize=12)
ax.set_xlabel('m2')
if reporte == 5:
# Calcular y visualizar le prediccion de costos por m2
regr = linear_model.LinearRegression()
x = np.asanyarray(propiedades[['ambientes','m2']])
y = np.asanyarray(propiedades['precio'])
regr.fit(x, y)
y_hat= regr.predict(propiedades[['ambientes','m2']])
# Graficar "Precio por m2"
sns.set_style("whitegrid", {'grid.linestyle': '--'})
ax = sns.scatterplot(propiedades['m2'],propiedades['precio']/1000, color='blue', ax=ax1)
ax = sns.lineplot(propiedades['m2'],y_hat/1000, color='red', ax=ax1)
ax.set_title("Precio por m2", fontsize=15)
ax.set_ylabel("Precio[miles de pesos]", fontsize=12)
ax.set_xlabel('m2')
plt.legend(('predicción', 'precio publicado '), prop={'size': 15})
if silent_mode == False:
plt.show(block=True)
return fig
if __name__ == '__main__':
try:
reporte = int(sys.argv[1]) # Tomo el número de reporte de la líne ade comando
except:
reporte = 0 # Sino especificamos el reporte a visualizar se mostrarán todos
if(reporte < 0 or reporte > 7):
print("Error: El reporte especificado debe estar entre los valores 0 y 7")
generar_reporte(reporte=reporte)
| 39.289474 | 146 | 0.641996 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,585 | 0.432202 |
f9de9cbd50628088d488c6c5b4c6b194981f73ef
| 3,043 |
py
|
Python
|
imap.py
|
cynsky/voyage_data_emails
|
3cc3b1364248dd3fed56e4c9f4c3ad619c6650ae
|
[
"MIT"
] | 1 |
2020-09-22T16:24:31.000Z
|
2020-09-22T16:24:31.000Z
|
imap.py
|
cynsky/voyage_data_emails
|
3cc3b1364248dd3fed56e4c9f4c3ad619c6650ae
|
[
"MIT"
] | null | null | null |
imap.py
|
cynsky/voyage_data_emails
|
3cc3b1364248dd3fed56e4c9f4c3ad619c6650ae
|
[
"MIT"
] | null | null | null |
"""
Read voyage data emails.
"""
import email
from imaplib import IMAP4_SSL
import logging
OK = 'OK'
logger = logging.getLogger(__name__)
class EmailCheckError(Exception):
pass
class EmailServer:
def __init__(self, server, username, password):
self.password = password
self.server = server
self.username = username
def __enter__(self):
self.mail = IMAP4_SSL(self.server)
logger.debug('Attempting to login as "%s".', self.username)
self.mail.login(self.username, self.password)
logger.debug('Login as "%s" worked.', self.username)
return self
def __exit__(self, type, value, traceback):
#self.mail.close()
pass
def select_inbox(self):
"""
Access the inbox.
Raises
------
EmailCheckError
If the inbox cannot be accessed or the message count fails.
"""
logger.debug('Attempting to access the inbox.')
ok, mail_count_list = self.mail.select('INBOX')
if ok != OK:
raise EmailCheckError('Failed selecting the inbox.')
try:
mail_count = int(mail_count_list[0])
except ValueError as e:
raise EmailCheckError('Failed to get the message count.') from e
logger.info('Found %s items in the inbox.', mail_count)
def get_uid_list(self):
"""
Return the message UID list.
Each UID can be used to access the correct message, even if the mailbox
changes after this call.
Raises
------
EmailCheckError
If the UID list request fails or the list cannot be split into
values.
Returns
-------
list
List of UID integers.
"""
logger.debug('Attempting to get the message UID list.')
self.select_inbox()
#ok, data = mail.search(None, 'ALL')
ok, raw_uid_list = self.mail.uid('search', None, 'ALL')
if ok != OK:
raise EmailCheckError('Failed searching mail.')
try:
uid_list = raw_uid_list[0].split()
except ValueError as e:
raise EmailCheckError('Mail count conversion failed.') from e
return uid_list
def get_email_message(self, uid):
logger.debug('Get email message with UID %s.', uid)
ok, data = self.mail.uid('fetch', uid, '(RFC822)')
if ok != OK:
raise EmailCheckError('Failed fetching message.')
# data[0][0] == '1 (RFC822 {25644}'
# data[0][1] is a string containing the email headers and body.
logger.debug('Convert email from bytes.')
raw_email_bytes = data[0][1]
#raw_email_str = raw_email_bytes.decode('utf-8')
#return email.message_from_string(raw_email_str)
return email.message_from_bytes(raw_email_bytes)
def loop_email_messages(self):
"""
Generate email messages from the current mailbox.
Yields the message from `get_email_message()` for each UID.
>>> for message in loop_email_messages(mail):
... print(message)
Raises
------
EmailCheckError
If the UID list request fails.
"""
try:
uid_list = self.get_uid_list()
except EmailCheckError as e:
logger.error(e.args[0])
raise e
logger.debug('Start looping UID list.')
for uid in uid_list:
yield self.get_email_message(uid)
logger.debug('Finished looping UID list.')
| 20.019737 | 73 | 0.692737 | 2,895 | 0.951364 | 565 | 0.185672 | 0 | 0 | 0 | 0 | 1,449 | 0.476175 |
f9de9f006fc9afa79a63265eef2873fd5e7b5f5b
| 1,991 |
py
|
Python
|
2021/python/day3.py
|
majormunky/advent_of_code
|
4cccd7f3879e28e465bbc39176659bdd52bd70d6
|
[
"MIT"
] | null | null | null |
2021/python/day3.py
|
majormunky/advent_of_code
|
4cccd7f3879e28e465bbc39176659bdd52bd70d6
|
[
"MIT"
] | null | null | null |
2021/python/day3.py
|
majormunky/advent_of_code
|
4cccd7f3879e28e465bbc39176659bdd52bd70d6
|
[
"MIT"
] | 1 |
2020-12-04T06:12:01.000Z
|
2020-12-04T06:12:01.000Z
|
from common import get_file_contents
def most_common(index, items, total):
if int(items[index]) >= total / 2:
return "1"
else:
return "0"
def least_common(index, items, total):
# if our number is bigger than half our total lines
# then we know that 1 is the more common value
# so we return 0
if int(items[index]) > total / 2:
return "0"
elif int(items[index]) == total / 2:
return "0"
else:
return "1"
def build_frequency(lines):
freq = [0 for i in range(len(lines[0]))]
for line in lines:
parts = list(line)
for index, item in enumerate(parts):
if item == "1":
freq[index] += 1
return freq
def p1():
lines = get_file_contents("data/day3_input.txt")
freq = build_frequency(lines)
gamma = [most_common(i, freq, len(lines)) for i in range(len(freq))]
epsilon = [least_common(i, freq, len(lines)) for i in range(len(freq))]
gamma = int("0b" + "".join(gamma), 2)
epsilon = int("0b" + "".join(epsilon), 2)
return gamma * epsilon
def digit_check(index, number, target_value):
return number[index] == target_value
def get_item(lines, func):
data = lines.copy()
freq = build_frequency(data)
items_to_remove = []
for index in range(len(data[0])):
# num is either the most or least common number
num = func(index, freq, len(data))
for line in data:
if not digit_check(index, line, num):
if line in data:
items_to_remove.append(line)
for remove_item in items_to_remove:
if remove_item in data:
data.remove(remove_item)
if len(data) == 1:
break
freq = build_frequency(data)
return data[0]
test_data = [
"00100",
"11110",
"10110",
"10111",
"10101",
"01111",
"00111",
"11100",
"10000",
"11001",
"00010",
"01010",
]
def p2():
lines = get_file_contents("data/day3_input.txt")
oxygen = get_item(lines, most_common)
scrubber = get_item(lines, least_common)
return int("0b" + oxygen, 2) * int("0b" + scrubber, 2)
if __name__ == '__main__':
print("Part 1: ", p1())
print("Part 2: ", p2())
| 20.316327 | 72 | 0.658463 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 354 | 0.1778 |
f9e18afe3ddb7b565b697f03187cc311b80b604e
| 670 |
py
|
Python
|
dallinger/redis_utils.py
|
Dallinger/Dallinger
|
c3acf1375391ef8cb702641638bf5a5008aa9be3
|
[
"MIT"
] | 100 |
2016-09-07T03:55:36.000Z
|
2022-02-28T02:20:10.000Z
|
dallinger/redis_utils.py
|
Dallinger/Dallinger
|
c3acf1375391ef8cb702641638bf5a5008aa9be3
|
[
"MIT"
] | 3,457 |
2016-09-05T23:21:31.000Z
|
2022-03-31T19:11:31.000Z
|
dallinger/redis_utils.py
|
Dallinger/Dallinger
|
c3acf1375391ef8cb702641638bf5a5008aa9be3
|
[
"MIT"
] | 53 |
2016-10-03T07:24:34.000Z
|
2021-10-20T20:42:38.000Z
|
import os
import redis
from urllib.parse import urlparse
def connect_to_redis(url=None):
"""Return a connection to Redis.
If a URL is supplied, it will be used, otherwise an environment variable
is checked before falling back to a default.
Since we are generally running on Heroku, and configuring SSL certificates
is challenging, we disable cert requirements on secure connections.
"""
redis_url = url or os.getenv("REDIS_URL", "redis://localhost:6379")
connection_args = {"url": redis_url}
if urlparse(redis_url).scheme == "rediss":
connection_args["ssl_cert_reqs"] = None
return redis.from_url(**connection_args)
| 30.454545 | 78 | 0.723881 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 382 | 0.570149 |
f9e1ea0fa4cb837c6d62e27ef66230461e2beb01
| 139 |
py
|
Python
|
libkludge/generate/this_access.py
|
zhangxiao6776/kludge
|
17a561f4b15399edd8175c883f8410a1b23c4d90
|
[
"BSD-3-Clause"
] | null | null | null |
libkludge/generate/this_access.py
|
zhangxiao6776/kludge
|
17a561f4b15399edd8175c883f8410a1b23c4d90
|
[
"BSD-3-Clause"
] | null | null | null |
libkludge/generate/this_access.py
|
zhangxiao6776/kludge
|
17a561f4b15399edd8175c883f8410a1b23c4d90
|
[
"BSD-3-Clause"
] | 2 |
2017-12-01T20:44:14.000Z
|
2021-08-21T21:47:04.000Z
|
#
# Copyright (c) 2010-2016, Fabric Software Inc. All rights reserved.
#
class ThisAccess(object):
const = 0
mutable = 1
static = 2
| 15.444444 | 68 | 0.676259 | 64 | 0.460432 | 0 | 0 | 0 | 0 | 0 | 0 | 70 | 0.503597 |
f9e2a6cf566ceeaab2c7f9874c63accbd13dbe53
| 88 |
py
|
Python
|
project/server/main/__init__.py
|
ardikabs/dnsmanager
|
4d2f302ea9f54fd4d5416328dc46a1c47b573e5b
|
[
"MIT"
] | 1 |
2019-01-15T10:33:04.000Z
|
2019-01-15T10:33:04.000Z
|
project/server/main/__init__.py
|
ardikabs/dnsmanager
|
4d2f302ea9f54fd4d5416328dc46a1c47b573e5b
|
[
"MIT"
] | null | null | null |
project/server/main/__init__.py
|
ardikabs/dnsmanager
|
4d2f302ea9f54fd4d5416328dc46a1c47b573e5b
|
[
"MIT"
] | null | null | null |
from . import modules
def init_app(app, **kwargs):
modules.init_app(app, **kwargs)
| 17.6 | 35 | 0.693182 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
f9e3c2c814e617eb1250bbe52b5026a15b1d2778
| 366 |
py
|
Python
|
Python by Harish/Class 1/Loops.py
|
kai92a/Learning_Python
|
5195aeb950e21150838c44d7c6af87cd86d31301
|
[
"MIT"
] | null | null | null |
Python by Harish/Class 1/Loops.py
|
kai92a/Learning_Python
|
5195aeb950e21150838c44d7c6af87cd86d31301
|
[
"MIT"
] | null | null | null |
Python by Harish/Class 1/Loops.py
|
kai92a/Learning_Python
|
5195aeb950e21150838c44d7c6af87cd86d31301
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 2 03:02:57 2021
@author: sgaa_
"""
print ("for loop")
for i in range (1,20):
print (i)
print ("while loop")
i=1
while i<12:
print (i)
i+=1
if 1 in range(2,5):
print ("Yes 1 is in the range")
elif 3 in range (3,5):
print ("Yes 1 is in range 2")
else:
print ("1 is not in both the ranges")
| 14.64 | 41 | 0.565574 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 177 | 0.483607 |
f9e4232dbd5470195e751b3cfb7348b26305a4d1
| 12,684 |
py
|
Python
|
src/utils/datasets.py
|
gorjanradevski/siamese_multi_head_attention
|
fcbfe21f284bf98a1d0e725a9e6f2df19363b4a5
|
[
"MIT"
] | 2 |
2020-06-11T03:03:35.000Z
|
2022-01-08T07:15:46.000Z
|
src/utils/datasets.py
|
gorjanradevski/multimodal_representations_deep_learning
|
fcbfe21f284bf98a1d0e725a9e6f2df19363b4a5
|
[
"MIT"
] | null | null | null |
src/utils/datasets.py
|
gorjanradevski/multimodal_representations_deep_learning
|
fcbfe21f284bf98a1d0e725a9e6f2df19363b4a5
|
[
"MIT"
] | null | null | null |
import json
import re
import os
import logging
from abc import ABC
from typing import Dict, Any, List, Tuple
from utils.constants import pascal_train_size, pascal_val_size
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
def preprocess_caption(caption: str) -> str:
"""Basic method used around all classes
Performs pre-processing of the caption in the following way:
1. Converts the whole caption to lower case.
2. Removes all characters which are not letters.
Args:
caption: A list of words contained in the caption.
Returns:
"""
caption = caption.lower()
caption = re.sub("[^a-z' ]+", "", caption)
caption = re.sub("\s+", " ", caption).strip() # NOQA
caption = caption.strip()
return caption
class BaseCocoDataset(ABC):
# Adapted for working with the Microsoft COCO dataset.
def __init__(self, images_path: str, json_path: str):
"""Creates a dataset object.
Args:
images_path: Path where the images are located.
json_path: Path to the json file where the mappings are indicated as well
as the captions.
"""
json_file = self.read_json(json_path)
self.id_to_filename = self.parse_image_paths(json_file, images_path)
self.id_to_captions = self.parse_captions(json_file)
logger.info("Object variables set...")
@staticmethod
def parse_image_paths(
json_file: Dict[str, Any], images_path: str
) -> Dict[int, str]:
"""Parses the images metadata from the json file.
Args:
json_file: A dict representing the loaded json file.
images_path: A path where the images are.
Returns:
A dict that contains the image id and the image filename.
"""
id_to_filename = {}
for image_data in json_file["images"]:
id_to_filename[image_data["id"]] = os.path.join(
images_path, image_data["file_name"]
)
return id_to_filename
@staticmethod
def parse_captions(json_file: Dict[str, Any]) -> Dict[int, List[str]]:
"""Parses the captions metadata from the json file.
Args:
json_file: A dict representing the loaded json file.
Returns:
A dict that contains the image id and a list with the image captions.
"""
id_to_captions: Dict[int, List[str]] = {}
for captions_data in json_file["annotations"]:
if captions_data["image_id"] not in id_to_captions.keys():
id_to_captions[captions_data["image_id"]] = []
id_to_captions[captions_data["image_id"]].append(
preprocess_caption(captions_data["caption"])
)
return id_to_captions
@staticmethod
def read_json(json_path: str) -> Dict[str, Any]:
"""Reads json file given a path.
Args:
json_path: Path where the json file is.
Returns:
A dictionary representing the json file.
"""
with open(json_path) as file:
json_file = json.load(file)
return json_file
@staticmethod
def get_data_wrapper(
id_to_filename: Dict[int, str], id_to_captions: Dict[int, List[str]]
) -> Tuple[List[str], List[str]]:
"""Returns the image paths and captions.
Because in the dataset there are 5 captions for each image, what the method does
is create:
- A list of image paths where each image path is repeated 5 times.
- A list of lists of word tokens where the number of inner lists is equal to the
number of image paths.
Args:
id_to_filename: Pair id to image filename dict.
id_to_captions: Pair id to captions dict.
Returns:
The image paths, the captions and the lengths of the captions.
"""
assert len(id_to_filename.keys()) == len(id_to_captions.keys())
image_paths = []
captions = []
for pair_id in id_to_filename.keys():
for i in range(5):
image_paths.append(id_to_filename[pair_id])
captions.append(id_to_captions[pair_id][i])
assert len(image_paths) == len(captions)
return image_paths, captions
def get_data(self):
image_paths, captions = self.get_data_wrapper(
self.id_to_filename, self.id_to_captions
)
return image_paths, captions
class TrainCocoDataset(BaseCocoDataset):
# Adapted for working with the Microsoft COCO dataset.
def __init__(self, images_path: str, json_path: str):
"""Creates a dataset object.
Args:
images_path: Path where the images are located.
json_path: Path to the json file where the mappings are indicated as well
as the captions.
"""
super().__init__(images_path, json_path)
logger.info("Class variables set...")
class ValCocoDataset(BaseCocoDataset):
# Adapted for working with the Microsoft COCO dataset.
def __init__(self, images_path: str, json_path: str, val_size: int = None):
"""Creates a dataset object.
Args:
images_path: Path where the images are located.
json_path: Path to the json file where the mappings are indicated as well
as the captions.
val_size: The size of the validation set.
"""
super().__init__(images_path, json_path)
self.val_size = val_size
class FlickrDataset:
# Adapted for working with the Flickr8k and Flickr30k dataset.
def __init__(self, images_path: str, texts_path: str):
self.img_path_caption = self.parse_captions_filenames(texts_path)
self.images_path = images_path
logger.info("Object variables set...")
@staticmethod
def parse_captions_filenames(texts_path: str) -> Dict[str, List[str]]:
"""Creates a dictionary that holds:
Key: The full path to the image.
Value: A list of lists where each token in the inner list is a word. The number
of sublists is 5.
Args:
texts_path: Path where the text doc with the descriptions is.
Returns:
A dictionary that represents what is explained above.
"""
img_path_caption: Dict[str, List[str]] = {}
with open(texts_path, "r") as file:
for line in file:
line_parts = line.split("\t")
image_tag = line_parts[0].partition("#")[0]
caption = line_parts[1]
if image_tag not in img_path_caption:
img_path_caption[image_tag] = []
img_path_caption[image_tag].append(preprocess_caption(caption))
return img_path_caption
@staticmethod
def get_data_wrapper(
imgs_file_path: str,
img_path_caption: Dict[str, List[str]],
images_dir_path: str,
):
"""Returns the image paths, the captions and the lengths of the captions.
Args:
imgs_file_path: A path to a file where all the images belonging to the
validation part of the dataset are listed.
img_path_caption: Image name to list of captions dict.
images_dir_path: A path where all the images are located.
Returns:
Image paths, captions and lengths.
"""
image_paths = []
captions = []
with open(imgs_file_path, "r") as file:
for image_name in file:
# Remove the newline character at the end
image_name = image_name[:-1]
# If there is no specified codec in the name of the image append jpg
if not image_name.endswith(".jpg"):
image_name += ".jpg"
for i in range(5):
image_paths.append(os.path.join(images_dir_path, image_name))
captions.append(img_path_caption[image_name][i])
assert len(image_paths) == len(captions)
return image_paths, captions
def get_data(self, images_file_path: str):
image_paths, captions = self.get_data_wrapper(
images_file_path, self.img_path_caption, self.images_path
)
return image_paths, captions
class PascalSentencesDataset:
# Adapted for working with the Pascal sentences dataset.
def __init__(self, images_path, texts_path):
self.category_image_path_captions = self.parse_captions_filenames(
texts_path, images_path
)
@staticmethod
def parse_captions_filenames(
texts_path: str, images_path: str
) -> Dict[str, Dict[str, List[str]]]:
"""Creates a dictionary of dictionaries where:
1. The keys of the first dict are the different categories of data.
2. The keys of the second dict are the image paths for the corresponding
category.
3. The values of the of second dict are a list of list where each list holds the
5 different captions for the image path, and each sublist holds the indexed
words of the caption.
Args:
texts_path: Path where the image captions are.
images_path: Path where the images are.
Returns:
A dictionary as explained above.
"""
category_image_path_captions: Dict[str, Dict[str, List[str]]] = dict(dict())
for category in os.listdir(texts_path):
file_path = os.path.join(texts_path, category)
if os.path.isdir(file_path):
if category not in category_image_path_captions:
category_image_path_captions[category] = {}
for txt_file in os.listdir(file_path):
if txt_file.endswith(".txt"):
image_path = os.path.join(
images_path, category, txt_file[:-3] + "jpg"
)
if image_path not in category_image_path_captions[category]:
category_image_path_captions[category][image_path] = []
txt_file_path = os.path.join(file_path, txt_file)
with open(txt_file_path, "r") as f:
for caption in f:
category_image_path_captions[category][
image_path
].append(preprocess_caption(caption))
return category_image_path_captions
@staticmethod
def get_data_wrapper(category_image_path_captions, data_type: str):
"""Returns the image paths, the captions and the captions lengths.
Args:
category_image_path_captions: A really compex dict :(
data_type: The type of the data that is returned (Train, val or test).
Returns:
The image paths, the captions and the captions lengths.
"""
image_paths = []
captions = []
train_size = pascal_train_size * 50
val_size = pascal_val_size * 50
for category in category_image_path_captions.keys():
for v, image_path in enumerate(
category_image_path_captions[category].keys()
):
for caption in category_image_path_captions[category][image_path]:
if data_type == "train":
if v < train_size:
image_paths.append(image_path)
captions.append(caption)
elif data_type == "val":
if train_size + val_size > v >= train_size:
image_paths.append(image_path)
captions.append(caption)
elif data_type == "test":
if v >= train_size + val_size:
image_paths.append(image_path)
captions.append(caption)
else:
raise ValueError("Wrong data type!")
return image_paths, captions
def get_train_data(self):
img_paths, cap = self.get_data_wrapper(
self.category_image_path_captions, "train"
)
return img_paths, cap
def get_val_data(self):
img_paths, cap = self.get_data_wrapper(self.category_image_path_captions, "val")
return img_paths, cap
def get_test_data(self):
img_paths, cap = self.get_data_wrapper(
self.category_image_path_captions, "test"
)
return img_paths, cap
| 34.467391 | 88 | 0.601861 | 11,880 | 0.936613 | 0 | 0 | 8,705 | 0.686298 | 0 | 0 | 4,768 | 0.375907 |
dda43962342edb0739a7efdbd01b8d80c87c5e19
| 26,474 |
py
|
Python
|
libica/openapi/libgds/api/volumes_api.py
|
umccr-illumina/libica
|
916d27eea499f29bee590268b84208effb0cc576
|
[
"MIT"
] | null | null | null |
libica/openapi/libgds/api/volumes_api.py
|
umccr-illumina/libica
|
916d27eea499f29bee590268b84208effb0cc576
|
[
"MIT"
] | 4 |
2021-11-15T10:47:51.000Z
|
2022-02-22T04:43:20.000Z
|
libica/openapi/libgds/api/volumes_api.py
|
umccr-illumina/libica
|
916d27eea499f29bee590268b84208effb0cc576
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Genomic Data Store Service
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from libica.openapi.libgds.api_client import ApiClient
from libica.openapi.libgds.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class VolumesApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_volume(self, body, **kwargs): # noqa: E501
"""Create a volume in GDS and receive temporary credentials for upload # noqa: E501
Create a volume in GDS to hold folders and files. Returns upload credentials to the root folder of the volume when the include=objectStoreAccess parameter is used. You must create a volume prior to uploading files or folders. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_volume(body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param CreateVolumeRequest body: (required)
:param str include: Optionally include additional fields in the response. Possible values: ObjectStoreAccess
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: CreateVolumeResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_volume_with_http_info(body, **kwargs) # noqa: E501
def create_volume_with_http_info(self, body, **kwargs): # noqa: E501
"""Create a volume in GDS and receive temporary credentials for upload # noqa: E501
Create a volume in GDS to hold folders and files. Returns upload credentials to the root folder of the volume when the include=objectStoreAccess parameter is used. You must create a volume prior to uploading files or folders. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_volume_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param CreateVolumeRequest body: (required)
:param str include: Optionally include additional fields in the response. Possible values: ObjectStoreAccess
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(CreateVolumeResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'body',
'include'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_volume" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `create_volume`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'include' in local_var_params and local_var_params['include'] is not None: # noqa: E501
query_params.append(('include', local_var_params['include'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v1/volumes', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateVolumeResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_volume(self, volume_id, **kwargs): # noqa: E501
"""Deletes a volume by Id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_volume(volume_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str volume_id: Unique identifier for the Volume to be deleted. (required)
:param bool purge_object_store_data: Optional and for BYOB only. If true, the volume's data in object storage will be erased. This field is ignored for non-BYOB volumes where the object store data is always removed upon deleting the volume.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: VolumeResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_volume_with_http_info(volume_id, **kwargs) # noqa: E501
def delete_volume_with_http_info(self, volume_id, **kwargs): # noqa: E501
"""Deletes a volume by Id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_volume_with_http_info(volume_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str volume_id: Unique identifier for the Volume to be deleted. (required)
:param bool purge_object_store_data: Optional and for BYOB only. If true, the volume's data in object storage will be erased. This field is ignored for non-BYOB volumes where the object store data is always removed upon deleting the volume.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(VolumeResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'volume_id',
'purge_object_store_data'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_volume" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'volume_id' is set
if self.api_client.client_side_validation and ('volume_id' not in local_var_params or # noqa: E501
local_var_params['volume_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `volume_id` when calling `delete_volume`") # noqa: E501
collection_formats = {}
path_params = {}
if 'volume_id' in local_var_params:
path_params['volumeId'] = local_var_params['volume_id'] # noqa: E501
query_params = []
if 'purge_object_store_data' in local_var_params and local_var_params['purge_object_store_data'] is not None: # noqa: E501
query_params.append(('purgeObjectStoreData', local_var_params['purge_object_store_data'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v1/volumes/{volumeId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VolumeResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_volume(self, volume_id, **kwargs): # noqa: E501
"""Get information for the specified volume ID or volume name # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_volume(volume_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str volume_id: Unique identifier for the volume to retrieve information for. (required)
:param str tenant_id: Optional parameter to see shared data in another tenant
:param str metadata_include: Optional parameter to specify comma separated patterns to include metadata by their field names.
:param str metadata_exclude: Optional parameter to specify comma separated patterns to exclude metadata by their field names.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: VolumeResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_volume_with_http_info(volume_id, **kwargs) # noqa: E501
def get_volume_with_http_info(self, volume_id, **kwargs): # noqa: E501
"""Get information for the specified volume ID or volume name # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_volume_with_http_info(volume_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str volume_id: Unique identifier for the volume to retrieve information for. (required)
:param str tenant_id: Optional parameter to see shared data in another tenant
:param str metadata_include: Optional parameter to specify comma separated patterns to include metadata by their field names.
:param str metadata_exclude: Optional parameter to specify comma separated patterns to exclude metadata by their field names.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(VolumeResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'volume_id',
'tenant_id',
'metadata_include',
'metadata_exclude'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_volume" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'volume_id' is set
if self.api_client.client_side_validation and ('volume_id' not in local_var_params or # noqa: E501
local_var_params['volume_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `volume_id` when calling `get_volume`") # noqa: E501
collection_formats = {}
path_params = {}
if 'volume_id' in local_var_params:
path_params['volumeId'] = local_var_params['volume_id'] # noqa: E501
query_params = []
if 'tenant_id' in local_var_params and local_var_params['tenant_id'] is not None: # noqa: E501
query_params.append(('tenantId', local_var_params['tenant_id'])) # noqa: E501
if 'metadata_include' in local_var_params and local_var_params['metadata_include'] is not None: # noqa: E501
query_params.append(('metadata.include', local_var_params['metadata_include'])) # noqa: E501
if 'metadata_exclude' in local_var_params and local_var_params['metadata_exclude'] is not None: # noqa: E501
query_params.append(('metadata.exclude', local_var_params['metadata_exclude'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v1/volumes/{volumeId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VolumeResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_volumes(self, **kwargs): # noqa: E501
"""Get a list of volumes # noqa: E501
Get a list of volumes accessible by the current JWT token’s tenant ID in GDS. The default sort returned is alphabetical, ascending. The default page size is 10 items. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_volumes(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int page_size: START_DESC END_DESC
:param str page_token: START_DESC END_DESC
:param str include: Optionally include additional fields in the response. Multiple fields can be included by comma-separation. Possible values: TotalItemCount, InheritedAcl
:param str tenant_id: Optional parameter to see shared data in another tenant
:param str volume_configuration_name: Unique name of the volume configuration
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: VolumeListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_volumes_with_http_info(**kwargs) # noqa: E501
def list_volumes_with_http_info(self, **kwargs): # noqa: E501
"""Get a list of volumes # noqa: E501
Get a list of volumes accessible by the current JWT token’s tenant ID in GDS. The default sort returned is alphabetical, ascending. The default page size is 10 items. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_volumes_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int page_size: START_DESC END_DESC
:param str page_token: START_DESC END_DESC
:param str include: Optionally include additional fields in the response. Multiple fields can be included by comma-separation. Possible values: TotalItemCount, InheritedAcl
:param str tenant_id: Optional parameter to see shared data in another tenant
:param str volume_configuration_name: Unique name of the volume configuration
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(VolumeListResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'page_size',
'page_token',
'include',
'tenant_id',
'volume_configuration_name'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_volumes" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if self.api_client.client_side_validation and 'page_size' in local_var_params and local_var_params['page_size'] > 1000: # noqa: E501
raise ApiValueError("Invalid value for parameter `page_size` when calling `list_volumes`, must be a value less than or equal to `1000`") # noqa: E501
if self.api_client.client_side_validation and 'page_size' in local_var_params and local_var_params['page_size'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `page_size` when calling `list_volumes`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'page_size' in local_var_params and local_var_params['page_size'] is not None: # noqa: E501
query_params.append(('pageSize', local_var_params['page_size'])) # noqa: E501
if 'page_token' in local_var_params and local_var_params['page_token'] is not None: # noqa: E501
query_params.append(('pageToken', local_var_params['page_token'])) # noqa: E501
if 'include' in local_var_params and local_var_params['include'] is not None: # noqa: E501
query_params.append(('include', local_var_params['include'])) # noqa: E501
if 'tenant_id' in local_var_params and local_var_params['tenant_id'] is not None: # noqa: E501
query_params.append(('tenantId', local_var_params['tenant_id'])) # noqa: E501
if 'volume_configuration_name' in local_var_params and local_var_params['volume_configuration_name'] is not None: # noqa: E501
query_params.append(('volumeConfigurationName', local_var_params['volume_configuration_name'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v1/volumes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VolumeListResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 49.116883 | 261 | 0.619136 | 25,920 | 0.978926 | 0 | 0 | 0 | 0 | 0 | 0 | 16,933 | 0.639512 |
dda4affe6b2847c17389112e2763a725bc4f7b5b
| 5,545 |
py
|
Python
|
jaxtorch/image.py
|
GallagherCommaJack/jaxtorch
|
3bc6785d781f12fabf3a436d9cfc0b839ebf5aec
|
[
"MIT"
] | null | null | null |
jaxtorch/image.py
|
GallagherCommaJack/jaxtorch
|
3bc6785d781f12fabf3a436d9cfc0b839ebf5aec
|
[
"MIT"
] | null | null | null |
jaxtorch/image.py
|
GallagherCommaJack/jaxtorch
|
3bc6785d781f12fabf3a436d9cfc0b839ebf5aec
|
[
"MIT"
] | null | null | null |
import math
from typing import Tuple
import jax
import jax.numpy as jnp
import numpy as np
from einops import repeat
def factor_int(n: int) -> Tuple[int, int]:
f1 = int(math.ceil(math.sqrt(n)))
while n % f1:
f1 -= 1
f2 = n // f1
return min(f1, f2), max(f1, f2)
def compute_channel_change_mat(c_in: int, c_out: int) -> np.ndarray:
assert max(c_in, c_out) % min(c_in, c_out) == 0
io_ratio = max(c_in, c_out) // min(c_in, c_out)
base = np.eye(min(c_in, c_out))
if c_in < c_out:
return repeat(base, "d1 d2 -> (d1 r) d2", r=io_ratio)
elif c_out < c_in:
# decreasing channel count, average nearby channels
return repeat(base, "d1 d2 -> d1 (d2 r)", r=io_ratio) / io_ratio
else:
return base
upsample_arrays = dict(
lanczos3=np.array(
[
0.0073782638646662235,
0.030112292617559433,
-0.06799723953008652,
-0.13327467441558838,
0.2710106074810028,
0.8927707076072693,
0.8927707672119141,
0.2710106074810028,
-0.13327467441558838,
-0.06799724698066711,
0.03011229634284973,
0.007378263399004936,
],
),
cubic=np.array(
[
-0.0234375,
-0.0703125,
0.2265625,
0.8671875,
0.8671875,
0.2265625,
-0.0703125,
-0.0234375,
],
),
linear=np.array([0.25, 0.75, 0.75, 0.25]),
)
downsample_arrays = dict(
lanczos3=np.array(
[
0.003689131001010537,
0.015056144446134567,
-0.03399861603975296,
-0.066637322306633,
0.13550527393817902,
0.44638532400131226,
0.44638532400131226,
0.13550527393817902,
-0.066637322306633,
-0.03399861603975296,
0.015056144446134567,
0.003689131001010537,
]
),
cubic=np.array(
[
-0.01171875,
-0.03515625,
0.11328125,
0.43359375,
0.43359375,
0.11328125,
-0.03515625,
-0.01171875,
]
),
linear=np.array([0.125, 0.375, 0.375, 0.125]),
)
def upsample_kernel(
c_in: int,
c_out: int,
method: str = "linear",
) -> np.ndarray:
cmat = compute_channel_change_mat(c_in, c_out)
kernel = upsample_arrays[method]
weight = np.einsum("oi,h,w->oihw", cmat, kernel, kernel)
return weight
def downsample_kernel(
c_in: int,
c_out: int,
method="linear",
) -> np.ndarray:
cmat = compute_channel_change_mat(c_in, c_out)
kernel = downsample_arrays[method]
weight = np.einsum("oi,h,w->oihw", cmat, kernel, kernel)
return weight
def upsample2x_base(
img: jnp.ndarray,
kern: jnp.ndarray,
format: str = "NCHW",
norm:bool=True,
):
ksize = kern.shape[-1]
kern = jax.lax.convert_element_type(kern, img.dtype)
out = jax.lax.conv_general_dilated(
img,
kern,
window_strides=[1, 1],
padding=[(ksize // 2, ksize // 2), (ksize // 2, ksize // 2)],
lhs_dilation=[2, 2],
rhs_dilation=None,
dimension_numbers=(format, "OIHW", format),
)
if norm:
# normalization for parts that touch the zero-padding
norm = jax.lax.conv_general_dilated(
jnp.ones([1, *img.shape[-3:]], dtype=img.dtype),
kern,
window_strides=[1, 1],
padding=[(ksize // 2, ksize // 2), (ksize // 2, ksize // 2)],
lhs_dilation=[2, 2],
rhs_dilation=None,
dimension_numbers=(format, "OIHW", format),
)
out = out / norm
return out
def downsample2x_base(
x: jnp.ndarray,
kern: jnp.ndarray,
format: str = "NCHW",
norm:bool=True,
):
ksize = kern.shape[-1]
kern = jax.lax.convert_element_type(kern, x.dtype)
out = jax.lax.conv_general_dilated(
x,
kern,
window_strides=[2, 2],
padding=[(ksize // 2 - 1, ksize // 2 - 1), (ksize // 2 - 1, ksize // 2 - 1)],
lhs_dilation=[1, 1],
rhs_dilation=None,
dimension_numbers=(format, "OIHW", format),
)
if norm:
# normalization for parts that touch the zero-padding
norm = jax.lax.conv_general_dilated(
jnp.ones([1, *x.shape[-3:]], dtype=x.dtype),
kern,
window_strides=[2, 2],
padding=[
(ksize // 2 - 1, ksize // 2 - 1),
(ksize // 2 - 1, ksize // 2 - 1),
],
lhs_dilation=[1, 1],
rhs_dilation=None,
dimension_numbers=(format, "OIHW", format),
)
out = out / norm
return out
def upsample2x(
img: jnp.ndarray,
c_out: int = None,
method: str = "linear",
format: str = "NCHW",
) -> jnp.ndarray:
c_in = img.shape[-3]
if c_out is None:
c_out = c_in
kern = upsample_kernel(c_in, c_out, method=method)
kern = jnp.array(kern, dtype=img.dtype)
return upsample2x_base(img, kern, format)
def downsample2x(
img: jnp.ndarray,
c_out: int = None,
method: str = "linear",
format: str = "NCHW",
) -> jnp.ndarray:
c_in = img.shape[-3]
if c_out is None:
c_out = c_in
kern = downsample_kernel(c_in, c_out, method=method)
kern = jax.lax.convert_element_type(kern, img.dtype)
return downsample2x_base(img, kern, format)
| 25.671296 | 85 | 0.544274 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 305 | 0.055005 |
dda604bdbe931306a411dfabae424401c18dc54e
| 1,210 |
py
|
Python
|
1-image2ascii/image2ascii.py
|
dourgey/Python_Exercise
|
f41d69033b76d2fea3671f751e936cb804742b57
|
[
"MIT"
] | null | null | null |
1-image2ascii/image2ascii.py
|
dourgey/Python_Exercise
|
f41d69033b76d2fea3671f751e936cb804742b57
|
[
"MIT"
] | null | null | null |
1-image2ascii/image2ascii.py
|
dourgey/Python_Exercise
|
f41d69033b76d2fea3671f751e936cb804742b57
|
[
"MIT"
] | null | null | null |
# Author: @dourgey
# Create Time: 2019/12/27: 18:06
# 主要知识点:
# argparse的使用
# 检查文件路径是否存在
# PILLOW读取图片并处理
# 文件写入
import argparse
import os
import sys
from PIL import Image
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--image", help="主人要转换的图片路径喵,默认在当前路径下读取喵~")
parser.add_argument("-f", "--file", help="主人要保存的字符画文件路径喵,默认保存在当前路径下喵~")
args = parser.parse_args()
if not os.path.exists(args.image): # 如果图片路径不存在
print("图片路径不存在呢,粗心的主人请再次检查喵~")
sys.exit(0)
img_path = args.image
im = Image.open(img_path)
width, height = im.size
t_height = int(height / width * 100 / 2.5)
im = im.resize((100, t_height), Image.ANTIALIAS)
def get_char(r, g, b, alpha=256):
ascii_char = "$@B%8&WM#*oahkbdpqwmZO0QLCJUYXzcvunxrjft/\|()1{}[]?-_+~<>i!lI;:,\"^`'. " # 设定映射字符
if alpha == 0:
return " "
gray = (r * 38 + g * 75 + b * 15) >> 7 # RGB转灰阶参考https://www.cnblogs.com/carekee/articles/3629964.html
return ascii_char[gray % 70]
f = open(args.file, "w") # 新建文件写入
# 逐行逐像素转换,写入文件
for i in range(t_height):
for j in range(100):
r, g, b = im.getpixel((j, i))
f.write(
get_char(r, g, b)
)
f.write("\n")
f.close()
print("已经为主人处理好了喵~")
| 22 | 107 | 0.629752 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 701 | 0.46609 |
dda62a60e83b2ac0fa35757329d616e26ea6b265
| 6,536 |
py
|
Python
|
python/ray/serialization.py
|
delding/ray
|
8532ba4272556aa24b5e0c7d275c7b383815c022
|
[
"Apache-2.0"
] | null | null | null |
python/ray/serialization.py
|
delding/ray
|
8532ba4272556aa24b5e0c7d275c7b383815c022
|
[
"Apache-2.0"
] | null | null | null |
python/ray/serialization.py
|
delding/ray
|
8532ba4272556aa24b5e0c7d275c7b383815c022
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import ray.numbuf
import ray.pickling as pickling
def check_serializable(cls):
"""Throws an exception if Ray cannot serialize this class efficiently.
Args:
cls (type): The class to be serialized.
Raises:
Exception: An exception is raised if Ray cannot serialize this class
efficiently.
"""
if is_named_tuple(cls):
# This case works.
return
if not hasattr(cls, "__new__"):
raise Exception("The class {} does not have a '__new__' attribute, and is "
"probably an old-style class. We do not support this. "
"Please either make it a new-style class by inheriting "
"from 'object', or use "
"'ray.register_class(cls, pickle=True)'. However, note "
"that pickle is inefficient.".format(cls))
try:
obj = cls.__new__(cls)
except:
raise Exception("The class {} has overridden '__new__', so Ray may not be "
"able to serialize it efficiently. Try using "
"'ray.register_class(cls, pickle=True)'. However, note "
"that pickle is inefficient.".format(cls))
if not hasattr(obj, "__dict__"):
raise Exception("Objects of the class {} do not have a `__dict__` "
"attribute, so Ray cannot serialize it efficiently. Try "
"using 'ray.register_class(cls, pickle=True)'. However, "
"note that pickle is inefficient.".format(cls))
if hasattr(obj, "__slots__"):
raise Exception("The class {} uses '__slots__', so Ray may not be able to "
"serialize it efficiently. Try using "
"'ray.register_class(cls, pickle=True)'. However, note "
"that pickle is inefficient.".format(cls))
# This field keeps track of a whitelisted set of classes that Ray will
# serialize.
whitelisted_classes = {}
classes_to_pickle = set()
custom_serializers = {}
custom_deserializers = {}
def class_identifier(typ):
"""Return a string that identifies this type."""
return "{}.{}".format(typ.__module__, typ.__name__)
def is_named_tuple(cls):
"""Return True if cls is a namedtuple and False otherwise."""
b = cls.__bases__
if len(b) != 1 or b[0] != tuple:
return False
f = getattr(cls, "_fields", None)
if not isinstance(f, tuple):
return False
return all(type(n) == str for n in f)
def add_class_to_whitelist(cls, pickle=False, custom_serializer=None,
custom_deserializer=None):
"""Add cls to the list of classes that we can serialize.
Args:
cls (type): The class that we can serialize.
pickle (bool): True if the serialization should be done with pickle. False
if it should be done efficiently with Ray.
custom_serializer: This argument is optional, but can be provided to
serialize objects of the class in a particular way.
custom_deserializer: This argument is optional, but can be provided to
deserialize objects of the class in a particular way.
"""
class_id = class_identifier(cls)
whitelisted_classes[class_id] = cls
if pickle:
classes_to_pickle.add(class_id)
if custom_serializer is not None:
custom_serializers[class_id] = custom_serializer
custom_deserializers[class_id] = custom_deserializer
# Here we define a custom serializer and deserializer for handling numpy
# arrays that contain objects.
def array_custom_serializer(obj):
return obj.tolist(), obj.dtype.str
def array_custom_deserializer(serialized_obj):
return np.array(serialized_obj[0], dtype=np.dtype(serialized_obj[1]))
add_class_to_whitelist(np.ndarray, pickle=False,
custom_serializer=array_custom_serializer,
custom_deserializer=array_custom_deserializer)
def serialize(obj):
"""This is the callback that will be used by numbuf.
If numbuf does not know how to serialize an object, it will call this method.
Args:
obj (object): A Python object.
Returns:
A dictionary that has the key "_pyttype_" to identify the class, and
contains all information needed to reconstruct the object.
"""
class_id = class_identifier(type(obj))
if class_id not in whitelisted_classes:
raise Exception("Ray does not know how to serialize objects of type {}. "
"To fix this, call 'ray.register_class' with this class."
.format(type(obj)))
if class_id in classes_to_pickle:
serialized_obj = {"data": pickling.dumps(obj)}
elif class_id in custom_serializers.keys():
serialized_obj = {"data": custom_serializers[class_id](obj)}
else:
# Handle the namedtuple case.
if is_named_tuple(type(obj)):
serialized_obj = {}
serialized_obj["_ray_getnewargs_"] = obj.__getnewargs__()
elif hasattr(obj, "__dict__"):
serialized_obj = obj.__dict__
else:
raise Exception("We do not know how to serialize the object '{}'"
.format(obj))
result = dict(serialized_obj, **{"_pytype_": class_id})
return result
def deserialize(serialized_obj):
"""This is the callback that will be used by numbuf.
If numbuf encounters a dictionary that contains the key "_pytype_" during
deserialization, it will ask this callback to deserialize the object.
Args:
serialized_obj (object): A dictionary that contains the key "_pytype_".
Returns:
A Python object.
"""
class_id = serialized_obj["_pytype_"]
cls = whitelisted_classes[class_id]
if class_id in classes_to_pickle:
obj = pickling.loads(serialized_obj["data"])
elif class_id in custom_deserializers.keys():
obj = custom_deserializers[class_id](serialized_obj["data"])
else:
# In this case, serialized_obj should just be the __dict__ field.
if "_ray_getnewargs_" in serialized_obj:
obj = cls.__new__(cls, *serialized_obj["_ray_getnewargs_"])
else:
obj = cls.__new__(cls)
serialized_obj.pop("_pytype_")
obj.__dict__.update(serialized_obj)
return obj
def set_callbacks():
"""Register the custom callbacks with numbuf.
The serialize callback is used to serialize objects that numbuf does not know
how to serialize (for example custom Python classes). The deserialize
callback is used to serialize objects that were serialized by the serialize
callback.
"""
ray.numbuf.register_callbacks(serialize, deserialize)
| 35.521739 | 79 | 0.684823 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,282 | 0.502142 |
dda91173fa6aa6ba29a55f8ecc21898b460a57e2
| 3,729 |
py
|
Python
|
wlcsim/FrankElastic/stonefence.py
|
SpakowitzLab/BasicWLC
|
13edbbc8e8cd36a3586571ff4d80880fc89d30e6
|
[
"MIT"
] | 1 |
2021-03-16T01:39:18.000Z
|
2021-03-16T01:39:18.000Z
|
wlcsim/FrankElastic/stonefence.py
|
riscalab/wlcsim
|
e34877ef6c5dc83c6444380dbe624b371d70faf2
|
[
"MIT"
] | 17 |
2016-07-08T21:17:40.000Z
|
2017-01-24T09:05:25.000Z
|
wlcsim/FrankElastic/stonefence.py
|
riscalab/wlcsim
|
e34877ef6c5dc83c6444380dbe624b371d70faf2
|
[
"MIT"
] | 9 |
2016-06-21T22:03:53.000Z
|
2016-11-10T00:55:01.000Z
|
from numpy import sqrt
import numpy as np
#from util import sphinx_compat_jit as jit
from numba import jit
ORDER_L=50
@jit
def alpha(l,m):
return sqrt((3*(l-m)*(l+m))/(4*np.pi*(2*l-1)*(2*l+1)))
@jit
def alpha_plus(l,m):
return sqrt((3*(l+m)*(l+m+1))/(8*np.pi*(2*l-1)*(2*l+1)))
@jit
def Alm(l,m):
return alpha(l,m)*alpha(l-1,m)/alpha(2,0)
@jit
def Blm(l,m):
return (alpha(l+1,m)*alpha(l+1,m)
- alpha(1,0)*sqrt(0.25/np.pi) +
alpha(l,m)*alpha(l,m))/alpha(2,0)
@jit
def PgammaB_vec(m, p, gamma):
""" P - \gamma \\beta
Returns:
vector with index ell
"""
PgammaB = np.zeros(ORDER_L, np.complex128)
for ell in range(abs(m),ORDER_L):
PgammaB[ell] = p+ell*(ell+1) - gamma*Blm(ell,m)
return PgammaB
@jit
def Alm_vec(m):
Am_vec = np.zeros(ORDER_L, np.complex128)
for ell in range(abs(m)+2,ORDER_L):
Am_vec[ell] = Alm(ell,m)
return Am_vec
@jit
def Wplus_vec(m, gamma, p, Am, PgammaB):
Wplus = np.zeros(ORDER_L, np.complex128)
for ell in (ORDER_L-1,ORDER_L-2):
Wplus[ell] = 1.0/PgammaB[ell]
for ell in range(ORDER_L-3,abs(m)-1,-1):
Wplus[ell] = 1.0/(PgammaB[ell] - (gamma*Am[ell+2])**2*Wplus[ell+2])
return Wplus
@jit
def Wminus_vec(m, gamma, p, Am, PgammaB):
Wminus = np.zeros(ORDER_L, np.complex128)
for ell in (abs(m),abs(m)+1):
Wminus[ell] = 1.0/PgammaB[ell]
for ell in range(abs(m)+2,ORDER_L):
Wminus[ell] = 1.0/(PgammaB[ell] - (gamma*Am[ell])**2*Wminus[ell-2])
return Wminus
@jit
def Gmll_matrix(Wplus, Wminus, Am, PgammaB, gamma, m):
""""Matrox of propagators between starting and ending l value.
Args:
Wplus (numpy array): Result of Wplus_vec for same m, p
Wminus (numpy array): Reult of Wminus_vec for same m, p
Am (numpy array): Result of Am_vec for same m
PgammaB (numpy array): Result of PgammaB_vec for same m, p, gamma
gamma (float): alignment strength, in kT's per Kuhn length
m (int): z component of agular momentum quantum number
Returns:
An ORDER_L x ORDER_L numpy matrix with propagators that use Maier-Saupe
steps to get from l0 to lf.
"""
Wpm = np.zeros(ORDER_L,np.complex128)
absm = abs(m)
Wpm[absm:] = (Wplus[absm:]*Wminus[absm:])\
/(Wminus[absm:]
- PgammaB[absm:]*Wplus[absm:]*Wminus[absm:] + Wplus[absm:])
Gmll = np.zeros((ORDER_L,ORDER_L), np.complex128)
for ell in range(abs(m),ORDER_L):
Gmll[ell,ell] = Wpm[ell]
for lf in range(ell+2,ORDER_L,2):
Gmll[ell, lf] = Gmll[ell, lf-2]*Wplus[lf]*Am[lf]*gamma
Gmll[lf, ell] = Gmll[ell, lf] # Must be symmetric
# the following loop is an alturnative the using the symmetric propory
#for lf in range(ell-2,-1,-2):
# Gmll[ell, lf] = Gmll[ell, lf+2]*Wminus[lf]*Am[lf+2]*gamma
return Gmll
def precalculate_data(p, gamma, m_values=[0]):
"""Precalculate W_plus, W_minus, W_pm, and G_m_ll
Args:
p (complex): laplace conjugate of path length
gamma (real): aligning l=2 (Maier-Saupe) field strength
m_values (list): list of integer m values to precalculate for
"""
Wps = {}
Wms = {}
Gmlls = {}
for m in m_values:
Am = Alm_vec(m)
PgammaB = PgammaB_vec(m, p, gamma)
Wplus = Wplus_vec(m, gamma, p, Am, PgammaB)
Wminus = Wminus_vec(m, gamma, p, Am, PgammaB)
Gmll = Gmll_matrix(Wplus, Wminus, Am, PgammaB, gamma, m)
Wps[m]=Wplus
Wms[m]=Wminus
Gmlls[m] = Gmll
return {"Wplus":Wps, "Wminus":Wms, "Gmll":Gmlls, "ms":m_values, "p":p,
"gamma":gamma}
| 30.565574 | 79 | 0.592116 | 0 | 0 | 0 | 0 | 2,799 | 0.750603 | 0 | 0 | 1,191 | 0.319389 |
dda9ad1f88ac589e37e334d17add8aea6f4a5cd4
| 2,694 |
py
|
Python
|
main.py
|
vojtsek/twitter-sentiment
|
8f3fbb7be8ac9c0e786b5a58253a24ee8b3f5dae
|
[
"MIT"
] | null | null | null |
main.py
|
vojtsek/twitter-sentiment
|
8f3fbb7be8ac9c0e786b5a58253a24ee8b3f5dae
|
[
"MIT"
] | null | null | null |
main.py
|
vojtsek/twitter-sentiment
|
8f3fbb7be8ac9c0e786b5a58253a24ee8b3f5dae
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import logging
import json
import os
import os.path as path
from collections import OrderedDict
import argparse
import tweepy
from tweepy import Stream
import twitter_config
from tweet_writer_listener import TweetWriterListener
CITIES = ['San Francisco', 'New York', 'Boston', 'Los Angeles', 'Dallas', 'Miami']
OUT_DIR = 'out'
BBOX_FILE = path.join(OUT_DIR, 'bboxes.json')
def poly2bb(coords):
longitudes = list(map(lambda x: x[0], coords))
latitudes= list(map(lambda x: x[1], coords))
sw_lat = min(latitudes)
sw_long = min(longitudes)
ne_lat = max(latitudes)
ne_long = max(longitudes)
return [sw_long, sw_lat, ne_long, ne_lat]
def place2coords(place, gran="city"):
places = api.geo_search(query=place, granularity=gran)
coords = places[0].bounding_box.coordinates
return poly2bb(coords[0])
def mkdir_if_not_exists(dir_name):
try:
os.makedirs(dir_name)
except OSError:
pass
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
logging.getLogger("tweetpy").setLevel(logging.WARNING)
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
parser = argparse.ArgumentParser('Twitter sentiment analysis')
parser.add_argument('--limit', type=int, required=True, help='Tweet limit per city')
args = parser.parse_args()
mkdir_if_not_exists(OUT_DIR)
logging.info('API authentization')
auth = tweepy.OAuthHandler(twitter_config.CONSUMER_KEY, twitter_config.CONSUMER_SECRET)
auth.set_access_token(twitter_config.ACCESS_TOKEN, twitter_config.ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
# for country in countries:
if path.isfile(BBOX_FILE):
logging.info('Using the cached bounding boxes from file %s', BBOX_FILE)
bboxes = json.load(open(BBOX_FILE, 'r'))
else:
logging.info('Caching the bounding boxes into file %s', BBOX_FILE)
bboxes = OrderedDict()
for city in CITIES:
try:
place_bb = place2coords(city)
bboxes[city] = place_bb
except:
print('Coords error')
json.dump(bboxes, open(BBOX_FILE, 'w'))
logging.info('Creating stream')
for city, locations in bboxes.items():
logging.info('Getting tweets from %s (%s)', city, locations)
with open(path.join(OUT_DIR, "{}_tweets.txt".format(city.replace(' ', ''))), 'a') as f_out:
stream = Stream(auth, TweetWriterListener(f_out, args.limit))
stream.filter(locations=locations, languages=["en"], async=False)
| 32.071429 | 99 | 0.682257 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 473 | 0.175575 |
ddaa279788a0d07cc55b56c6e5a215a8e2e118cc
| 9,283 |
py
|
Python
|
controllers/default.py
|
npfe/pursuit
|
edd2d66ec0770251041b748c4b9f967a15c138b5
|
[
"Unlicense"
] | null | null | null |
controllers/default.py
|
npfe/pursuit
|
edd2d66ec0770251041b748c4b9f967a15c138b5
|
[
"Unlicense"
] | 16 |
2020-03-30T13:00:10.000Z
|
2020-05-16T16:42:52.000Z
|
controllers/default.py
|
npfe/pursuit
|
edd2d66ec0770251041b748c4b9f967a15c138b5
|
[
"Unlicense"
] | null | null | null |
# -*- coding: utf-8 -*-
# -------------------------------------------------------------------------
# This is a sample controller
# this file is released under public domain and you can use without limitations
# -------------------------------------------------------------------------
import json
from datetime import datetime
from pprint import pprint
status = {1:'not_started', 2:'hold', 3:'track', 4:'done'}
# ---- index page ----
def index():
level = 1
archive = list()
# top entries selection
entries_list = db(db.entry.parent == None).select().as_list()
for entry in entries_list:
# sets the level of the top entry
entry['level'] = 0
# adds the qty of children
entry['children'] = db(db.entry.parent == entry['id']).count()
# adds the quantity of each subitems category for progress bar
entry['not_started'], entry['hold'], entry['track'], entry['done'], entry['sum_w'] = get_w_progress(entry['name'], entry)
# creates a list containing ids of each entry
entries_id = [entry['id'] for entry in entries_list]
next_id = list() # placeholder for ids to be pushed into the loop
# recursively populates the list
for i, id in enumerate(entries_id):
children = db(db.entry.parent == id).select().as_list()
for child in children:
temp_level = level
# finds the position in entries_list where to insert child
index = next((index for (index, d) in enumerate(entries_list) if d['id']==child['parent']), None)
# if the parent is not be displayed (e.g. parent status is done)
if index == None:
level = 1 # item level is set to 1
index = 1 # item to be shown on top
else:
level = entries_list[index]['level']+1
# position index to 1 after position of the parent
index+=1
# append id of current child in the loop to the next list
next_id.append(child['id'])
# sets the entry
child['level'] = level
# sets level of the entry back
level = temp_level
# counts the children of the current entry
if child['status'] != 4:
child['children'] = db((db.entry.parent == child['id']) & (db.entry.status != 4)).count()
else:
child['children'] = 0
# fetches the last log of the current entry
log = db(db.journal.parent == child['id']).select().last()
# adds last log and last edit date
if log != None :
child['log'] = log.body[:75]+'...' if len(log.body) > 75 else log.body
child['last'] = get_status(log.created_on)
else:
child['log'] = ''
# skips items that are done or insert them in the final structure
if child['status'] != 4:
entries_list.insert(index,child)
else:
archive.append(child)
if i == len(entries_id)-1:
for item in next_id:
entries_id.append(item)
next_id = []
level+=1
return dict(data=entries_list, status=status, archive=archive)
def get_w_progress(name, id):
# entry status count dictionnary
entry_status = {1:0, 2:0, 3:0, 4:0}
entries_list = [id]
entries_id = [id['id']]
next_id = list()
level=1
# recursively populates the list
for i, id in enumerate(entries_id):
children = db(db.entry.parent == id).select().as_list()
for child in children:
# finds index in entries_list where to insert the children
index = next((index for (index, d) in enumerate(entries_list) if d['id']==child['parent']), None)
# position index to 1 after position of the parent
index = index+1 if index!=None else index
next_id.append(child['id'])
# counts tasks status
entry_status[child['status']]+=1
if i == len(entries_id)-1:
for item in next_id:
entries_id.append(item)
next_id = []
level+=1
total = sum(entry_status.values())
return entry_status[1], entry_status[2], entry_status[3], entry_status[4], total
def get_status(status):
wrapper_class = 'badge badge-pill '
class_type = {2:'badge-success', 3:'badge-warning', 5:'badge-danger'}
# creates a list of the class_type dict keys
list_type = list(class_type.keys())
# finds out the duration since last log was entered
now = datetime.now()
delta = (now-status).days
if delta < 1:
delta = int((now-status).seconds/3600)
value = '%s hour%s' % (str(delta), 's' if delta>1 else '')
wrapper_class = wrapper_class+class_type[list_type[0]]
else:
value = '%s day%s' % (str(delta), 's' if delta>1 else '')
if delta > list_type[2]:
wrapper_class = wrapper_class+class_type[list_type[2]]
elif delta >= list_type[1]:
wrapper_class = wrapper_class+class_type[list_type[1]]
else:
wrapper_class = wrapper_class+class_type[list_type[0]]
# creates the span html to be displayed
delta = SPAN(value, _class=wrapper_class)
return delta
def children(parent):
children_list = list()
db_children = db(db.entry.parent == parent['id']).select()
if len(db_children) > 0:
for db_child in db_children:
child_data = {'name': db_child.name, 'id': db_child.id, 'children':{}}
child_data['children'] = children(db_child)
children_list.append(child_data)
return children_list
def children_list(parent):
c_list = list()
children = db(db.entry.parent == parent).select()
for child in children:
c_count = db(db.entry.parent == child.id).count()
c_list.append({'name': child.name, 'id': child.id, 'children': c_count})
return c_list
def item():
item = request.args(0)
status = {1:'not_started', 2:'hold', 3:'track', 4:'done'}
record = db.entry[item]
parent = db.entry[record.parent]
children = db(db.entry.parent == record.id).select()
notes = db(db.notes.parent == item).select(db.notes.id, db.notes.title, db.notes.modified_on)
return locals()
def new_item():
parent = request.args(0)
if parent == 0:
parent = None
db.entry.parent.default = parent
form = SQLFORM(db.entry)
form.vars.parent = parent
if form.process(session=None, formname='_newitem').accepted:
response.js = "location.reload();"
response.flash=('Log inserted')
else:
print(form.vars)
return locals()
def edit_item():
entry = request.args(0)
redirection = request.args(1).replace('_', '/')
db.entry.id.readable = db.entry.id.writable = False
db.entry.parent.readable = db.entry.parent.writable = False
db.entry.status.readable = db.entry.status.writable = False
form = SQLFORM(db.entry, entry)
if form.process().accepted:
redirect(URL( redirection))
return locals()
def delete_item():
entry = request.args(0)
form = FORM.confirm('Yes', {'Back':URL('index')})
if form.accepted:
db(db.entry.id==entry).delete()
session.flash = "entry deleted"
redirect(URL('default', 'index'))
return locals()
def set_status():
record = request.args(0)
status = request.args(1)
db(db.entry.id == record).update(status=status)
session.flash = '%s status updated' % (db.entry[record].name)
redirect(URL('default', 'item', args=record))
def log_form():
record = request.args(0)
db.journal.parent.readable = db.journal.parent.writable = False
db.journal.parent.default = record
db.journal.created_on.default = request.now
form = SQLFORM(db.journal)
form.vars.created_on = request.now
if form.process(session=None, formname='_newlog').accepted:
response.js = "jQuery('#%s').get(0).reload()" % request.vars.reload_div
response.flash=('Log inserted')
return locals()
def log_journal():
record = request.args(0)
logs = db(db.journal.parent == record).select(orderby=~db.journal.id)
return dict(logs=logs)
def log_delete():
record = request.args(0)
db(db.journal.id == record).delete()
response.js = "jQuery('#%s').get(0).reload()" % request.args(1)
response.flash=('Log deleted')
def log_edit():
record = db.journal(request.args(0))
db.journal.id.readable = db.journal.id.writable = False
db.journal.parent.readable = db.journal.parent.writable = False
db.journal.created_on.readable = db.journal.created_on.writable = False
form = SQLFORM(db.journal, record)
if form.process().accepted:
response.js = "jQuery('#log_journal').get(0).reload(); "
response.js += "$('body').removeClass('modal-open'); "
response.js += "$('.modal-backdrop').remove(); "
return dict(form=form)
# ---- action to server uploaded static content (required) ---
@cache.action()
def download():
"""
allows downloading of uploaded files
http://..../[app]/default/download/[filename]
"""
return response.download(request, db)
| 39.004202 | 129 | 0.596251 | 0 | 0 | 0 | 0 | 180 | 0.01939 | 0 | 0 | 2,324 | 0.25035 |
ddaaf774a1abba687897e41a33028b028a3ed7fd
| 2,200 |
py
|
Python
|
convertgf.py
|
Wivik/gundam-france
|
65d84098eec431e7e27b6a6c0f1e6eadea1c2bc8
|
[
"MIT"
] | null | null | null |
convertgf.py
|
Wivik/gundam-france
|
65d84098eec431e7e27b6a6c0f1e6eadea1c2bc8
|
[
"MIT"
] | null | null | null |
convertgf.py
|
Wivik/gundam-france
|
65d84098eec431e7e27b6a6c0f1e6eadea1c2bc8
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from markdownify import markdownify as md
import argparse
import re
import os
import sys
parser = argparse.ArgumentParser()
parser.add_argument('input_file', help='file to convert')
args = parser.parse_args()
input_file = args.input_file
print(input_file)
if not re.search('.dist.php', input_file):
test_file = re.sub('\.php', '.dist.php', input_file)
print(test_file)
try:
os.stat(test_file)
print('file dist exists, ignoring')
sys.exit(0)
except:
os.rename(input_file, test_file)
print('file renamed, rerun job')
sys.exit(0)
output_file = os.path.splitext(input_file)[0]
output_file = os.path.splitext(output_file)[0]
output_file = output_file + '.md'
print(output_file)
# sys.exit(0)
with open(input_file, 'r') as file:
content = file.read()
html = md(content)
firstline = html.split('\n', 1)[0]
if re.search('php include', firstline):
print('ignore file')
# os.remove(input_file)
sys.exit(0)
# print(firstline)
## fix images path
html = html.replace('](images/', '](/images/')
# html = re.sub("{lien:db:(\d+):", "", html)
# html = re.sub("(html:lien})", "", html)
html = re.sub("(\s)({lien:db:)(\d+):(.*):(.*)(\.html:lien})(\s)", " \\4 ", html)
html = re.sub("(’)({lien:db:)(\d+):(.*):(.*)(\.html:lien})(,)", "'\\4 ", html)
html = re.sub("(\s)({lien:db:)(\d+):(.*):(.*)(\.html:lien})(,)", " \\4,", html)
# html = re.sub("(\ )({lien:db:)(\d+):(.*):(.*)(\.html:lien})(,)", " \\4,", html)
# html = re.sub("(\ )({lien:db:)(\d+):(.*):(.*)(\.html:lien})(\s)", " \\4,", html)
# html = re.sub("(\ )({lien:db:)(\d+):(.*):(.*)(\.html:lien})(\.)", " \\4.", html)
html = re.sub("(\s)({lien:db:)(\d+):(.*):(.*)(\.html:lien})(\.)", " \\4.", html)
html = re.sub("(<\?php echo \$_SERVER\[\'REQUEST_URI\'\]; \?>)", "", html)
html = re.sub("(php include\(\"modules/flag\\_spoiler\.php\"\); \?)", "", html)
# print(html)
result = '---\ntitle: "'+ firstline + '"\n---\n\n' + html
# print(output)
with (open(output_file, 'w')) as output:
output.write(result)
output.close()
file.close()
| 32.352941 | 86 | 0.537727 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 991 | 0.449433 |
ddab10387c063d1c5dd03502020dc60340b9c9c1
| 1,957 |
py
|
Python
|
scripts/disktest.py
|
CloudAdvocacy/ObjectDetection
|
ba823455a43684dea8f0bc1eab6f669a818895bb
|
[
"MIT"
] | null | null | null |
scripts/disktest.py
|
CloudAdvocacy/ObjectDetection
|
ba823455a43684dea8f0bc1eab6f669a818895bb
|
[
"MIT"
] | null | null | null |
scripts/disktest.py
|
CloudAdvocacy/ObjectDetection
|
ba823455a43684dea8f0bc1eab6f669a818895bb
|
[
"MIT"
] | null | null | null |
import io, os
import argparse
from timeit import default_timer as timer
parser = argparse.ArgumentParser(description="File Performance Testing Util")
parser.add_argument("command",help="Test to perform",choices=['read','write','readany'])
parser.add_argument("dir",help="Directory to use")
args = parser.parse_args()
def time(msg,size,f):
print(msg,end='...')
st = timer()
f()
el = timer()-st
print("{} sec, {} Mb/sec".format(el,size/el/1024/1024))
def write_test(n,size):
fn = "test_{}".format(size)+"_{}"
buf = os.urandom(size)
for i in range(n):
with open(os.path.join(args.dir,fn.format(i)),'wb') as f:
f.write(buf)
def read_test(n,size):
fn = "test_{}".format(size)+"_{}"
for i in range(n):
with open(os.path.join(args.dir,fn.format(i)),'rb') as f:
buf = bytearray(f.read())
def read_test(n=1000):
sz = 0
i = 0
st = timer()
for x in os.listdir(args.dir):
with open(os.path.join(args.dir,x),'rb') as f:
buf = bytearray(f.read())
sz += len(buf)
i += 1
n-=1
if n==0:
break
en = timer()-st
print("{} secs, {} Mb/Sec, av file size: {} Mb".format(en,sz/1024/1024/en,sz/i/1024/1024))
if args.command == "read":
time("1000 1k files",1024*1000,lambda: read_test(1000,1024))
time("100 1M files",1024*1024*100,lambda: read_test(100,1024*1024))
time("10 10M files",10*1024*1024*10,lambda: read_test(10,1024*1024*10))
time("1 100M files",1*1024*1024*100,lambda: read_test(1,1024*1024*100))
elif args.command == "write":
time("1000 1k files",1024*1000,lambda: write_test(1000,1024))
time("100 1M files",1024*1024*100,lambda: write_test(100,1024*1024))
time("10 10M files",10*1024*1024*10,lambda: write_test(10,1024*1024*10))
time("1 100M files",1*1024*1024*100,lambda: write_test(1,1024*1024*100))
elif args.command == "readany":
read_test()
| 33.169492 | 94 | 0.616249 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 343 | 0.175268 |
ddab703f80463a12929dce437920fcb4f1403fd4
| 151 |
py
|
Python
|
assignment2/scripts/exploit15/exploit15.py
|
vhazali/cs5331
|
3b3618aaa17199ebcd3c01bc6c25ddbdbe4f3d0f
|
[
"MIT"
] | 8 |
2020-02-22T12:47:12.000Z
|
2021-12-03T11:39:19.000Z
|
assignment2/scripts/exploit15/exploit15.py
|
vhazali/cs5331
|
3b3618aaa17199ebcd3c01bc6c25ddbdbe4f3d0f
|
[
"MIT"
] | null | null | null |
assignment2/scripts/exploit15/exploit15.py
|
vhazali/cs5331
|
3b3618aaa17199ebcd3c01bc6c25ddbdbe4f3d0f
|
[
"MIT"
] | 4 |
2018-08-15T12:58:36.000Z
|
2021-12-29T07:06:29.000Z
|
import webbrowser
url = 'http://www.wsb.com/Assignment2/case15.php?videourl=" onerror="alert(document.cookie)'
new = 2
webbrowser.open(url, new=new)
| 21.571429 | 92 | 0.748344 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 86 | 0.569536 |
ddabee57641e5f2212bdb1af4233c76d2dc9db3e
| 2,238 |
py
|
Python
|
introspection/call_stack.py
|
Aran-Fey/introspection
|
0ce3a16688b51bdcb72c7b070d571a1004f5151b
|
[
"MIT"
] | 1 |
2022-03-02T23:13:06.000Z
|
2022-03-02T23:13:06.000Z
|
introspection/call_stack.py
|
Aran-Fey/introspection
|
0ce3a16688b51bdcb72c7b070d571a1004f5151b
|
[
"MIT"
] | null | null | null |
introspection/call_stack.py
|
Aran-Fey/introspection
|
0ce3a16688b51bdcb72c7b070d571a1004f5151b
|
[
"MIT"
] | null | null | null |
import types
from typing import Iterable, Union
from .call_frame import CallFrame
__all__ = ['CallStack']
class CallStack:
"""
Represents the call stack - a series of :class:`CallFrame` instances.
This class can be used like a read-only list. It supports iteration, indexing, membership testing, etc. The root frame is first in the list, at index 0.
Because holding references to call frames can result in reference cycles,
it's recommended to use CallStack objects as context managers. Upon exit,
the frame objects are released and the CallStack becomes empty::
with CallStack.current() as stack:
... # do something with the stack
# at this point, len(stack) is 0
"""
__slots__ = ('__frames',)
def __init__(self, frames: Iterable[Union[CallFrame, types.FrameType]]):
"""
Creates a new ``CallStack`` from the given frame objects.
:param frames: An iterable of frame objects, starting with the root frame
"""
self.__frames = [CallFrame.from_frame(frame) for frame in frames]
@classmethod
def current(cls) -> 'CallStack':
"""
Get the current call stack.
"""
with CallFrame.current() as frame:
return cls.from_frame(frame.parent)
@classmethod
def from_frame(cls, frame) -> 'CallStack':
"""
Creates a ``CallStack`` containing ``frame`` and all its parents.
:param frame: The last frame in the call stack
:return: A new ``CallStack`` instance
"""
frames = [frame]
while True:
frame = frame.f_back
if frame is None:
break
frames.append(frame)
frames.reverse()
return cls(frames)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.__frames.clear()
def __iter__(self):
return iter(self.__frames)
def __reversed__(self):
return reversed(self.__frames)
def __getitem__(self, index):
return self.__frames[index]
def __len__(self):
return len(self.__frames)
def __contains__(self, frame):
return frame in self.__frames
| 26.963855 | 156 | 0.626005 | 2,125 | 0.949508 | 0 | 0 | 677 | 0.302502 | 0 | 0 | 1,050 | 0.469169 |
ddaf61fd3b67b0ad82d3ff5a5a750292ac61bd57
| 2,728 |
py
|
Python
|
products/views.py
|
deepindo/DoPython
|
d80b85b3b24566de6ece9f452564e4827e705959
|
[
"MIT"
] | 4 |
2022-01-04T09:56:19.000Z
|
2022-01-20T12:14:16.000Z
|
products/views.py
|
deepindo/DoPython
|
d80b85b3b24566de6ece9f452564e4827e705959
|
[
"MIT"
] | null | null | null |
products/views.py
|
deepindo/DoPython
|
d80b85b3b24566de6ece9f452564e4827e705959
|
[
"MIT"
] | 1 |
2022-01-20T09:40:16.000Z
|
2022-01-20T09:40:16.000Z
|
from django.shortcuts import render, get_object_or_404
from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage
from products.models import Product
def productList(request, productName):
"""产品的列表页"""
submenu = productName
if productName == 'robot':
productName = '家用机器人'
elif productName == 'monitor':
productName = '智能门锁'
else:
productName = '人脸识别解决方案'
product_list = Product.objects.filter(product_type=productName).order_by('-publish_date')
# 分页处理
# 每页显示2条数据
p = Paginator(product_list, 2)
if p.num_pages <= 1:
page_data = ''
else:
# 得到当前页,默认为1
page = int(request.GET.get('page', 1))
# 对页数进行分页
product_list = p.page(page)
left = []
right = []
left_has_more = False
right_has_more = False
first = False
last = False
total_pages = p.num_pages # 总页数
page_range = p.page_range # 页数迭代
if page == 1:
right = page_range[page:page + 2]
print(total_pages)
if right[-1] < total_pages - 1:
right_has_more = True
if right[-1] < total_pages:
last = True
elif page == total_pages:
left = page_range[(page - 3) if (page - 3) > 0 else 0:page - 1]
if left[0] > 2:
left_has_more = True
if left[0] > 1:
first = True
else:
left = page_range[(page - 3) if (page - 3) > 0 else 0:page - 1]
right = page_range[page:page + 2]
if left[0] > 2:
left_has_more = True
if left[0] > 1:
first = True
if right[-1] < total_pages - 1:
right_has_more = True
if right[-1] < total_pages:
last = True
page_data = {
'left': left,
'right': right,
'left_has_more': left_has_more,
'right_has_more': right_has_more,
'first': first,
'last': last,
'total_pages': total_pages,
'page': page,
}
context = {
'active_menu': 'products',
'sub_menu': submenu,
'productName': productName,
'productList': product_list,
'pageData': page_data
}
return render(request, 'products/productList.html', context)
def productDetail(request, id):
"""产品的详情页"""
product = get_object_or_404(Product, id=id) # 按id进行查找,没有时返回404
product.product_views += 1 # 浏览数加1
product.save()
return render(request, 'products/productDetail.html', {
'active_menu': 'products',
'product': product,
})
| 29.978022 | 93 | 0.535924 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 548 | 0.190014 |
ddb0240924a8101cddcbf80261a52d4f5843c4bf
| 1,545 |
py
|
Python
|
misc/CharacterMotion.py
|
qwewqa/dl-datamine
|
a8e050731f67e4cf49123947eadf66ac0fd948ca
|
[
"MIT"
] | 2 |
2020-03-31T00:07:54.000Z
|
2020-04-01T23:39:23.000Z
|
misc/CharacterMotion.py
|
qwewqa/dl-datamine
|
a8e050731f67e4cf49123947eadf66ac0fd948ca
|
[
"MIT"
] | null | null | null |
misc/CharacterMotion.py
|
qwewqa/dl-datamine
|
a8e050731f67e4cf49123947eadf66ac0fd948ca
|
[
"MIT"
] | 1 |
2021-08-02T04:21:31.000Z
|
2021-08-02T04:21:31.000Z
|
import json
import os
from dataclasses import dataclass, field
from typing import Dict, Optional
@dataclass
class AnimationClipData:
name: str
startTime: float
stopTime: float
id: Optional[int] = field(init=False)
duration: float = field(init=False)
def __post_init__(self):
self.duration = self.stopTime - self.startTime
try:
self.id = int(self.name.split('_')[-1])
except (IndexError, ValueError):
self.id = None
def load_animation_clip_data(in_path: str) -> Optional[AnimationClipData]:
with open(in_path) as f:
data = json.load(f)
return AnimationClipData(
name=data['name'],
startTime=data['m_MuscleClip']['m_StartTime'],
stopTime=data['m_MuscleClip']['m_StopTime']
)
def get_animation_clip_data(in_dir: str) -> Dict[str, AnimationClipData]:
clips = {}
for root, _, files in os.walk(in_dir):
for file_name in files:
file_path = os.path.join(root, file_name)
try:
clip = load_animation_clip_data(file_path)
clips[clip.name] = clip
except (KeyError, TypeError):
pass
return clips
def get_animation_clip_data_by_id(in_dir: str) -> Dict[Optional[int], Dict[str, AnimationClipData]]:
clips = {}
data = get_animation_clip_data(in_dir)
for clip in data.values():
if clip.id not in clips:
clips[clip.id] = {}
clips[clip.id][clip.name] = clip
return clips
| 28.611111 | 100 | 0.618123 | 379 | 0.245307 | 0 | 0 | 390 | 0.252427 | 0 | 0 | 62 | 0.040129 |
ddb050b82209d0997ed09ca448c8c2752e16f7c5
| 14,431 |
py
|
Python
|
kube/config.py
|
nearmap/kubefs
|
e2f6c019f04e436d031874e40c59ba0ee61d8c58
|
[
"MIT"
] | 3 |
2021-05-31T06:45:37.000Z
|
2021-10-05T22:36:37.000Z
|
kube/config.py
|
nearmap/kubefs
|
e2f6c019f04e436d031874e40c59ba0ee61d8c58
|
[
"MIT"
] | 8 |
2021-09-06T00:43:13.000Z
|
2021-10-01T00:22:53.000Z
|
kube/config.py
|
nearmap/kubefs
|
e2f6c019f04e436d031874e40c59ba0ee61d8c58
|
[
"MIT"
] | null | null | null |
import base64
import fnmatch
import logging
import os
import tempfile
from ssl import SSLContext, create_default_context
from typing import Dict, List, Optional, Sequence
import yaml
from kube.tools.repr import disp_secret_blob, disp_secret_string
class ExecCmd:
def __init__(self, *, command: str, args: List[str], env: Dict[str, str]) -> None:
self.command = command
self.args = args
self.env = env
def __repr__(self) -> str:
return "<%s command=%r, args=%r, env=%r>" % (
self.__class__.__name__,
self.command,
self.args,
self.env,
)
class User:
def __init__(
self,
*,
name: str,
username: Optional[str],
password: Optional[str],
client_cert_path: Optional[str],
client_key_path: Optional[str],
client_cert_data: Optional[str],
client_key_data: Optional[str],
exec: Optional[ExecCmd],
) -> None:
self.name = name
self.username = username
self.password = password
self.client_cert_path = client_cert_path
self.client_key_path = client_key_path
self.client_cert_data = client_cert_data
self.client_key_data = client_key_data
self.exec = exec
# host.company.com -> host
self.short_name = name.split(".")[0]
def __repr__(self) -> str:
return (
"<%s name=%r, username=%r, password=%s, "
"client_cert_path=%r, client_key_path=%r, "
"client_cert_data=%s, client_key_data=%s, "
"exec=%r>"
) % (
self.__class__.__name__,
self.name,
self.username,
disp_secret_string(self.password),
self.client_cert_path,
self.client_key_path,
disp_secret_blob(self.client_cert_data),
disp_secret_blob(self.client_key_data),
self.exec,
)
def get_attribute_names(self) -> List[str]:
"Returns names of all attributes that are set"
names = []
attnames = dir(self)
for attname in attnames:
value = getattr(self, attname)
if attname.startswith("_") or callable(value):
continue
if not value:
continue
names.append(attname)
return names
class Cluster:
def __init__(
self,
*,
name: str,
server: str,
ca_cert_path: Optional[str],
ca_cert_data: Optional[str],
) -> None:
self.name = name
self.server = server
self.ca_cert_path = ca_cert_path
self.ca_cert_data = ca_cert_data
# host.company.com -> host
self.short_name = name.split(".")[0]
def __repr__(self) -> str:
return "<%s name=%r, server=%r, ca_cert_path=%r, ca_cert_data=%r>" % (
self.__class__.__name__,
self.name,
self.server,
self.ca_cert_path,
disp_secret_blob(self.ca_cert_data),
)
class Context:
def __init__(
self,
*,
name: str,
user: User,
cluster: Cluster,
namespace: Optional[str],
) -> None:
self.name = name
self.user = user
self.cluster = cluster
self.namespace = namespace
self.file: "KubeConfigFile" = None # type: ignore
# host.company.com -> host
self.short_name = name.split(".")[0]
def __repr__(self) -> str:
return "<%s name=%r, short_name=%r, user=%r, cluster=%r, namespace=%r>" % (
self.__class__.__name__,
self.name,
self.short_name,
self.user,
self.cluster,
self.namespace,
)
def set_file(self, file: "KubeConfigFile") -> None:
self.file = file
def create_ssl_context(self) -> SSLContext:
kwargs = {}
if self.cluster.ca_cert_path:
kwargs["cafile"] = self.cluster.ca_cert_path
elif self.cluster.ca_cert_data:
value = base64.b64decode(self.cluster.ca_cert_data)
cert_data = value.decode()
kwargs["cadata"] = cert_data
ssl_context = create_default_context(**kwargs)
# If the cert and key are in the form of blobs then we need to create
# temporary files for them because the ssl lib only accepts file paths.
# We first create a tempdir which is rwx only for the current user, so
# no other users can even list its contents. We then create the two temp
# files inside it. The tempdir and its contents get removed when the
# context manager exits.
if self.user.client_cert_data and self.user.client_key_data:
with tempfile.TemporaryDirectory(prefix="kube-client.") as tempdir_name:
cert_content = base64.b64decode(self.user.client_cert_data)
cert_file_fd, cert_file_name = tempfile.mkstemp(dir=tempdir_name)
os.write(cert_file_fd, cert_content)
cert_content = base64.b64decode(self.user.client_key_data)
key_file_fd, key_file_name = tempfile.mkstemp(dir=tempdir_name)
os.write(key_file_fd, cert_content)
ssl_context.load_cert_chain(
certfile=cert_file_name,
keyfile=key_file_name,
)
elif self.user.client_cert_path and self.user.client_key_path:
ssl_context.load_cert_chain(
certfile=self.user.client_cert_path,
keyfile=self.user.client_key_path,
)
return ssl_context
class KubeConfigFile:
def __init__(
self,
*,
filepath: str,
contexts: Sequence[Context],
users: Sequence[User],
clusters: Sequence[Cluster],
ctime: float,
mtime: float,
atime: float,
) -> None:
self.filepath = filepath
self.contexts = contexts or []
self.users = users or []
self.clusters = clusters or []
self.ctime = ctime
self.mtime = mtime
self.atime = atime
def __repr__(self) -> str:
return "<%s filepath=%r, contexts=%r, users=%r, clusters=%r>" % (
self.__class__.__name__,
self.filepath,
self.contexts,
self.users,
self.clusters,
)
class KubeConfigCollection:
def __init__(self) -> None:
self.clusters: Dict[str, Cluster] = {}
self.contexts: Dict[str, Context] = {}
self.users: Dict[str, User] = {}
def add_file(self, config_file: KubeConfigFile) -> None:
# NOTE: does not enforce uniqueness of context/user/cluster names
for cluster in config_file.clusters:
self.clusters[cluster.name] = cluster
for context in config_file.contexts:
self.contexts[context.name] = context
for user in config_file.users:
self.users[user.name] = user
def get_context_names(self) -> Sequence[str]:
names = list(self.contexts.keys())
names.sort()
return names
def get_context(self, name) -> Optional[Context]:
return self.contexts.get(name)
class KubeConfigSelector:
def __init__(self, *, collection: KubeConfigCollection) -> None:
self.collection = collection
def fnmatch_context(self, pattern: str) -> List[Context]:
names = self.collection.get_context_names()
names = fnmatch.filter(names, pattern)
objs = [self.collection.get_context(name) for name in names]
contexts = [ctx for ctx in objs if ctx]
return contexts
class KubeConfigLoader:
def __init__(
self, *, config_dir="$HOME/.kube", config_var="KUBECONFIG", logger=None
) -> None:
self.config_dir = config_dir
self.config_var = config_var
self.logger = logger or logging.getLogger("config-loader")
def get_candidate_files(self) -> Sequence[str]:
# use config_var if set
env_var = os.getenv(self.config_var)
if env_var:
filepaths = env_var.split(":")
filepaths = [fp.strip() for fp in filepaths if fp.strip()]
return filepaths
# fall back on config_dir
path = os.path.expandvars(self.config_dir)
filenames = os.listdir(path)
filepaths = []
for fn in filenames:
fp = os.path.join(path, fn)
if not os.path.isfile(fp):
continue
filepaths.append(fp)
return filepaths
def take_after_last_slash(self, name: str) -> str:
# arn:aws:iam::123:role/myrole -> myrole
if "/" in name:
name = name.rsplit("/")[1]
return name
def parse_context(
self, clusters: Sequence[Cluster], users: Sequence[User], dct
) -> Optional[Context]:
name = dct.get("name")
name = self.take_after_last_slash(name)
obj = dct.get("context")
cluster_id = obj.get("cluster")
namespace = obj.get("namespace")
user_id = obj.get("user")
cluster_id = self.take_after_last_slash(cluster_id)
user_id = self.take_after_last_slash(user_id)
# 'name', 'cluster' and 'user' are required attributes
if all((name, cluster_id, user_id)):
users = [user for user in users if user.name == user_id]
if not users:
self.logger.warn(
"When parsing context %r could not find matching user %r",
name,
user_id,
)
clusters = [cluster for cluster in clusters if cluster.name == cluster_id]
if not clusters:
self.logger.warn(
"When parsing context %r could not find matching cluster %r",
name,
cluster_id,
)
if users and clusters:
return Context(
name=name,
user=users[0],
cluster=clusters[0],
namespace=namespace,
)
return None
def parse_cluster(self, dct) -> Optional[Cluster]:
name = dct.get("name")
name = self.take_after_last_slash(name)
obj = dct.get("cluster")
server = obj.get("server")
ca_cert_path = obj.get("certificate-authority")
ca_cert_data = obj.get("certificate-authority-data")
# 'name' and 'server' are required attributes
if name and server:
return Cluster(
name=name,
server=server,
ca_cert_path=ca_cert_path,
ca_cert_data=ca_cert_data,
)
return None
def parse_user(self, dct) -> Optional[User]:
name = dct.get("name")
name = self.take_after_last_slash(name)
obj = dct.get("user")
password = obj.get("password")
username = obj.get("username")
client_cert_path = obj.get("client-certificate")
client_key_path = obj.get("client-key")
client_cert_data = obj.get("client-certificate-data")
client_key_data = obj.get("client-key-data")
exec_obj = obj.get("exec")
# 'name' is the only required attribute
if name:
exec = None
if exec_obj:
command = exec_obj.get("command")
args = exec_obj.get("args") or []
env_map = exec_obj.get("env") or {}
env = {}
for item_dct in env_map:
key = item_dct.get("name")
value = item_dct.get("value")
if key and value:
env[key] = value
exec = ExecCmd(command=command, args=args, env=env)
return User(
name=name,
username=username,
password=password,
client_cert_path=client_cert_path,
client_key_path=client_key_path,
client_cert_data=client_cert_data,
client_key_data=client_key_data,
exec=exec,
)
return None
def load_file(self, filepath: str) -> Optional[KubeConfigFile]:
with open(filepath, "rb") as fl:
try:
dct = yaml.load(fl, Loader=yaml.SafeLoader)
except Exception:
self.logger.warn("Failed to parse kube config as yaml: %s", filepath)
return None
kind = dct.get("kind")
if not kind == "Config":
self.logger.warn("Kube config does not have kind: Config: %s", filepath)
return None
clust_list = [self.parse_cluster(clus) for clus in dct.get("clusters") or []]
clusters = [cluster for cluster in clust_list if cluster]
user_list = [self.parse_user(user) for user in dct.get("users") or []]
users = [user for user in user_list if user]
ctx_list = [
self.parse_context(clusters, users, ctx)
for ctx in dct.get("contexts") or []
]
contexts = [ctx for ctx in ctx_list if ctx]
st = os.stat(filepath)
# The context is the organizing principle of a kube config so if we
# didn't find any we failed to parse the file
if contexts:
config_file = KubeConfigFile(
filepath=filepath,
contexts=contexts,
users=users,
clusters=clusters,
ctime=st.st_ctime,
mtime=st.st_mtime,
atime=st.st_atime,
)
for context in contexts:
context.set_file(config_file)
return config_file
return None
def create_collection(self) -> KubeConfigCollection:
collection = KubeConfigCollection()
for filepath in self.get_candidate_files():
config_file = self.load_file(filepath)
if config_file:
collection.add_file(config_file)
return collection
def get_selector() -> KubeConfigSelector:
loader = KubeConfigLoader()
collection = loader.create_collection()
selector = KubeConfigSelector(collection=collection)
return selector
| 30.901499 | 86 | 0.56538 | 13,960 | 0.967362 | 0 | 0 | 0 | 0 | 0 | 0 | 1,893 | 0.131176 |
ddb11949c25d2f8ec4e231606475f6d7c71dff61
| 1,256 |
py
|
Python
|
other/application/windowApp/test6.py
|
Ethan7102/FYP
|
c6560a0b95ad78d5e1a341ab2d93c063e10c6631
|
[
"MIT"
] | null | null | null |
other/application/windowApp/test6.py
|
Ethan7102/FYP
|
c6560a0b95ad78d5e1a341ab2d93c063e10c6631
|
[
"MIT"
] | null | null | null |
other/application/windowApp/test6.py
|
Ethan7102/FYP
|
c6560a0b95ad78d5e1a341ab2d93c063e10c6631
|
[
"MIT"
] | 1 |
2021-01-23T07:59:57.000Z
|
2021-01-23T07:59:57.000Z
|
from PyQt5.QtCore import QThread, pyqtSignal, QDateTime, QObject
from PyQt5.QtWidgets import QApplication, QDialog, QLineEdit
import time
import sys
class BackendThread(QObject):
# 通过类成员对象定义信号
update_date = pyqtSignal(str)
# 处理业务逻辑
def run(self):
while True:
data = QDateTime.currentDateTime()
currTime = data.toString("yyyy-MM-dd hh:mm:ss")
self.update_date.emit(str(currTime))
time.sleep(1)
class Window(QDialog):
def __init__(self):
QDialog.__init__(self)
self.setWindowTitle('PyQt 5界面实时更新例子')
self.resize(400, 100)
self.input = QLineEdit(self)
self.input.resize(400, 100)
self.initUI()
def initUI(self):
# 创建线程
self.backend = BackendThread()
# 连接信号
self.backend.update_date.connect(self.handleDisplay)
self.thread = QThread()
self.backend.moveToThread(self.thread)
# 开始线程
self.thread.started.connect(self.backend.run)
self.thread.start()
# 将当前时间输出到文本框
def handleDisplay(self, data):
self.input.setText(data)
if __name__ == '__main__':
app = QApplication(sys.argv)
win = Window()
win.show()
sys.exit(app.exec_())
| 25.632653 | 64 | 0.627389 | 1,076 | 0.795858 | 0 | 0 | 0 | 0 | 0 | 0 | 195 | 0.144231 |
ddb1e5dab629942b29ba8fb6aab9cb866f52c858
| 13,098 |
py
|
Python
|
model/meter.py
|
meiwei92/meter-alignment
|
dc92e4aca0ca80ed1c9418027b050e9631b5fb7a
|
[
"MIT"
] | null | null | null |
model/meter.py
|
meiwei92/meter-alignment
|
dc92e4aca0ca80ed1c9418027b050e9631b5fb7a
|
[
"MIT"
] | null | null | null |
model/meter.py
|
meiwei92/meter-alignment
|
dc92e4aca0ca80ed1c9418027b050e9631b5fb7a
|
[
"MIT"
] | null | null | null |
from __future__ import annotations
from typing import List, Tuple, OrderedDict as OrderedDictType, DefaultDict, Optional
from collections import OrderedDict, defaultdict
from metric import MusicNote, TimePointSequence
from model.base import MidiModel, MidiModelState
from model.beat import TatumTrackingModelState, TatumTrackingGrammarModelState
from model.voice import VoiceSplittingModelState, VoiceSplittingGrammarModelState
from model.hierarchy import HierarchyModelState, HierarchyGrammarModelState
class MeterModel(MidiModel):
new_voice_states: DefaultDict[VoiceSplittingModelState, List[VoiceSplittingModelState]]
new_tatum_states: DefaultDict[TatumTrackingModelState, DefaultDict[Tuple[MusicNote], List[TatumTrackingModelState]]]
def __init__(self, hierarchy_state: HierarchyModelState = None,
voice_state: VoiceSplittingModelState = None,
tatum_state: TatumTrackingModelState = None,
beam_size: int = 200):
super(MeterModel).__init__()
self.beam_size = -1 if beam_size is None else beam_size
self.started = False
self.new_voice_states = defaultdict(lambda: [])
self.new_tatum_states = defaultdict(lambda: defaultdict(lambda: []))
self.started_states: OrderedDictType[MeterModelState, None] = OrderedDict()
self.hypothesis_states: OrderedDictType[MeterModelState, None] = OrderedDict()
mms = MeterModelState(meter_model=self,
hierarchy_state=hierarchy_state,
voice_state=voice_state,
tatum_state=tatum_state)
self.hypothesis_states.update({mms: None})
def get_beam_size(self) -> int:
return self.beam_size
def is_beam_full(self) -> bool:
return (not self.beam_size <= -1) and (len(self.started_states) >= self.beam_size)
def transition(self, notes: List[MusicNote] = None) -> OrderedDictType[MeterModelState]:
return self.__transition_close_worker(notes=notes, none_as_close=False)
def close(self) -> OrderedDictType[MeterModelState]:
return self.__transition_close_worker(notes=None, none_as_close=True)
def __transition_close_worker(self, notes: List[MusicNote] = None, none_as_close=False) -> OrderedDictType[MeterModelState]:
new_states: OrderedDict[MeterModelState, None] = OrderedDict()
do_close = (notes is None and none_as_close)
self.started_states = OrderedDict()
self.new_tatum_states = defaultdict(lambda: defaultdict(lambda: []))
self.new_voice_states = defaultdict(lambda: [])
if not self.started:
self.started = True
for mms in self.hypothesis_states:
if do_close:
ts = mms.close()
else:
ts = mms.transition(notes)
for ns in ts.keys():
new_states.update({ns: None})
if ns.is_started():
self.started_states.update({ns: None})
## fixForBeam ????
if not do_close:
#todo logging
pass
self.hypothesis_states = new_states
return self.hypothesis_states
def get_hypotheses(self) -> OrderedDictType[MidiModelState, None]:
pass
def get_tatum_hypotheses(self) -> List[TatumTrackingModelState]:
tatum_hypotheses = []
for mms in self.hypothesis_states:
tatum_hypotheses.append(mms.tatum_tracking_state)
return tatum_hypotheses
class MeterGrammarModel(MeterModel):
def __init__(self, sequence: TimePointSequence, beam_size: int = 200):
hs: HierarchyModelState = HierarchyGrammarModelState(sequence=sequence)
vs: VoiceSplittingModelState = VoiceSplittingGrammarModelState(sequence=sequence)
ts: TatumTrackingModelState = TatumTrackingGrammarModelState(sequence=sequence)
super(MeterGrammarModel, self).__init__(hierarchy_state=hs, voice_state=vs, tatum_state=ts, beam_size=beam_size)
class MeterPredictionModel(MeterModel):
def __init__(self, sequence: TimePointSequence, beam_size: int = 200):
super(MeterPredictionModel, self).__init__(beam_size=beam_size)
pass
class MeterModelState(MidiModelState):
def __init__(self, meter_model: MeterModel,
hierarchy_state: HierarchyModelState,
voice_state: VoiceSplittingModelState = None,
tatum_state: TatumTrackingModelState = None) -> None:
super(MeterModelState).__init__()
tts_none = tatum_state is None
vss_none = voice_state is None
# voice_state and tatum_state must be both set or both None. One set and one None is not allowed.
if tts_none == vss_none:
self.meter_model: MeterModel = meter_model
self.voice_splitting_state: VoiceSplittingModelState = voice_state
self.hierarchy_state: HierarchyModelState = hierarchy_state
self.tatum_tracking_state: TatumTrackingModelState = tatum_state
# this branch can only be reached if both values have the same truth value
# therefore just checking one is enough
if tts_none:
self.voice_splitting_state = hierarchy_state.get_voice_splitting_state()
self.tatum_tracking_state = self.hierarchy_state.get_tatum_tracking_state().deep_copy()
self.hierarchy_state = self.hierarchy_state.deep_copy()
self.tatum_tracking_state.set_hierarchy_state(self.hierarchy_state)
else:
raise ValueError("Given value-combination is not supported")
def set_tatum_tracking_state(self, state: TatumTrackingModelState):
self.tatum_tracking_state = state
def set_voice_splitting_state(self, state: VoiceSplittingModelState):
self.voice_splitting_state = state
def set_hierarchy_state(self, state: HierarchyModelState):
self.hierarchy_state = state
def transition(self, notes: List[MusicNote] = None) -> OrderedDictType[MeterModelState]:
return self.__transition_close_worker(notes=notes, none_as_close=False)
def close(self) -> OrderedDictType[MeterModelState]:
return self.__transition_close_worker(notes=None, none_as_close=True)
def __transition_close_worker(self, notes: List[MusicNote] = None, none_as_close=False) -> OrderedDictType[MeterModelState, None]:
new_state: OrderedDictType[MeterModelState, None] = OrderedDict()
do_close = (notes is None and none_as_close)
beam_full = self.meter_model.is_beam_full()
if beam_full:
voice_outside_beam = self.get_score() < next(reversed(self.meter_model.started_states)).get_score()
if voice_outside_beam:
return new_state
new_voice_states: List[VoiceSplittingModelState] = self.meter_model.new_voice_states[self.voice_splitting_state]
if len(new_voice_states) == 0:
if do_close:
state_dict = self.voice_splitting_state.close()
else:
state_dict = self.voice_splitting_state.transition(notes)
new_voice_states = [*state_dict.keys()]
self.meter_model.new_voice_states.update({self.voice_splitting_state: new_voice_states})
new_tatum_states = []
new_notes_list = []
for v_state in new_voice_states:
if beam_full:
beat_score = v_state.get_score() + \
self.tatum_tracking_state.get_score() + \
self.tatum_tracking_state.get_hierarchy_state().get_score()
tatum_outside_beam = beat_score <= next(reversed(self.meter_model.started_states)).get_score()
if tatum_outside_beam:
if not do_close:
new_notes_list.append([])
new_tatum_states.append([])
continue
tatum_state_copy = self.tatum_tracking_state.deep_copy()
tatum_state_copy.set_hierarchy_state(self.tatum_tracking_state.hierarchy_state)
if do_close:
nts = tatum_state_copy.close()
nts = [*nts.keys()]
new_tatum_states.append(nts)
else:
new_notes: List[MusicNote] = []
for n in notes:
if self.voice_splitting_state.keep_note(n):
new_notes.append(n)
new_notes_list.append(new_notes)
if tatum_state_copy.is_started():
nts = tatum_state_copy.transition(notes)
nts = [*nts.keys()]
new_tatum_states.append(nts)
else:
tatums_map = self.meter_model.new_tatum_states[tatum_state_copy]
if len(tatums_map) == 0:
pass
branched_states = tatums_map[tuple(new_notes)]
if len(branched_states) == 0:
bs = tatum_state_copy.transition(new_notes)
branched_states = [*bs.keys()]
tatums_map.update({tuple(new_notes): branched_states})
new_tatum_states.append(branched_states)
for i in range(len(new_tatum_states)):
new_voice_state = new_voice_states[i]
tatum_states = new_tatum_states[i]
nnotes = new_notes_list[i] if not do_close else []
new_states_tmp = []
for tstate in tatum_states:
beat_score = new_voice_state.get_score() + \
tstate.get_score() + \
tstate.get_hierarchy_state().get_score()
if beam_full:
state_outside_beam = next(reversed(self.meter_model.started_states)).get_score() >= beat_score
if state_outside_beam:
# todo: Logging
continue
hierarchy_state_copy: HierarchyModelState = self.hierarchy_state.deep_copy()
hierarchy_state_copy.set_voice_splitting_state(new_voice_state)
hierarchy_state_copy.set_tatum_tracking_state(tstate)
tstate.set_hierarchy_state(hierarchy_state_copy)
if not do_close:
# todo: Special case
pass
if do_close:
new_hierarchy_states = hierarchy_state_copy.close()
else:
new_hierarchy_states = hierarchy_state_copy.transition(nnotes)
new_hierarchy_states = [*new_hierarchy_states.keys()]
for hms in new_hierarchy_states:
self.add_new_state(hms, new_states_tmp, check_duplicate=True)
for mms in new_states_tmp:
new_state.update({mms: None})
return new_state
def get_score(self) -> float:
vs_score = 0 if self.voice_splitting_state is None else self.voice_splitting_state.get_score()
tt_score = 0 if self.tatum_tracking_state is None else self.tatum_tracking_state.get_score()
h_score = 0 if self.hierarchy_state is None else self.hierarchy_state.get_score()
return vs_score + tt_score + h_score
def add_new_state(self, hms: HierarchyModelState,
temp_states_list: List[MeterModelState] = [], check_duplicate: bool = True):
mms = MeterModelState(meter_model=self.meter_model, hierarchy_state=hms)
if not check_duplicate:
temp_states_list.append(mms)
else:
duplicated_state: Optional[MeterModelState] = None
for s in temp_states_list:
if s.is_duplicate_of(mms):
duplicated_state = s
break
if duplicated_state is not None:
if duplicated_state.get_score() < mms.get_score():
temp_states_list.append(mms)
temp_states_list.remove(duplicated_state)
# todo add logging for removal of duplicated_state
else:
# todo add logging for removal of mms
pass
else:
temp_states_list.append(mms)
def is_duplicate_of(self, state: MeterModelState) -> bool:
hierachy_duplicate = self.hierarchy_state.is_duplicate_of(state.hierarchy_state)
tatum_duplicate = self.tatum_tracking_state.is_duplicate_of(state.tatum_tracking_state)
voice_duplicate = True
# if something change voice_duplicate (todo at MeterDetection phase)
# voice_duplicate = self.voice_splitting_state.is_duplicate_of(state.voice_splitting_state)
return hierachy_duplicate and tatum_duplicate and voice_duplicate
def get_measure_count(self):
return self.tatum_tracking_state.get_measure_count()
def is_started(self) -> bool:
measure_count = self.get_measure_count()
return measure_count > 0
| 42.803922 | 134 | 0.64506 | 12,580 | 0.960452 | 0 | 0 | 0 | 0 | 0 | 0 | 564 | 0.04306 |
ddb3b3248298a56481c0e14a355de5998e1c7be4
| 1,029 |
py
|
Python
|
hw2skeleton/find_features.py
|
hasuni-max/hw2-skeleton
|
498f5d250ec18042c1e21fac177a92f3c7d3da7c
|
[
"Apache-2.0"
] | null | null | null |
hw2skeleton/find_features.py
|
hasuni-max/hw2-skeleton
|
498f5d250ec18042c1e21fac177a92f3c7d3da7c
|
[
"Apache-2.0"
] | null | null | null |
hw2skeleton/find_features.py
|
hasuni-max/hw2-skeleton
|
498f5d250ec18042c1e21fac177a92f3c7d3da7c
|
[
"Apache-2.0"
] | null | null | null |
global plus
global minus
minus = ["ASP","GLU"]
plus = ["ARG","HIS","LYS"]
def find_charge(residues):
"""
Takes a list of residues and returns the number of plus and
minus charged residues.
This function uses the global plus and minus variables
"""
global plus
global minus
plus_charge = sum([res in plus for res in residues])
minus_charge = sum([res in minus for res in residues])
return plus_charge, minus_charge
def calc_features(active_sites):
features = {}
for act in active_sites:
features[act.name] = []
number_of_residues = len(act.residues)
three_letter = [str(x)[0:3] for x in act.residues]
plus_charge, minus_charge = find_charge(three_letter)
number_of_chains = len(act.chains)
features[act.name].append(number_of_residues) #number of residues
features[act.name].append(plus_charge) #number of plus charges - done
features[act.name].append(minus_charge) #number of minus charges - done
features[act.name].append(number_of_chains) #number of chains - done
return features
| 25.097561 | 73 | 0.737609 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 284 | 0.275996 |
ddb4c04468f28635f43140d7b43e540cbcb4c57d
| 632 |
py
|
Python
|
main.py
|
jalexray/CSV-Empty-Finder
|
69d545ec9a757d331dbd2b1b298842de2c079129
|
[
"MIT"
] | null | null | null |
main.py
|
jalexray/CSV-Empty-Finder
|
69d545ec9a757d331dbd2b1b298842de2c079129
|
[
"MIT"
] | null | null | null |
main.py
|
jalexray/CSV-Empty-Finder
|
69d545ec9a757d331dbd2b1b298842de2c079129
|
[
"MIT"
] | null | null | null |
# imports
import csv
# open the file
with open("example.csv") as file:
reader = csv.reader(file)
# prep to store names of columns
titleRow = reader.next()
#rest = [row for row in reader]
columnList = {}
for row in reader:
iterator = 0
cellList = []
for cell in row:
if cell == "":
cellList.append(titleRow[iterator])
#print("Within " + row[0] + ", " + titleRow[iterator] + " has an empty")
iterator += 1
#print cellList
columnList[row[0]] = cellList
for item in sorted(columnList):
itemString = (str(columnList[item]))
if itemString != "[]":
print(item + ":" + str(columnList[item]))
| 20.387097 | 76 | 0.629747 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 196 | 0.310127 |
ddb85f6c9f54c6a26a73cc1b1e07e1f705ce4e40
| 124 |
py
|
Python
|
test_suite/suite/test09/other_mod.py
|
joncatanio/cannoli
|
410f6bea362bf9e33eecc0e01fb080dadd14ef23
|
[
"MIT"
] | 755 |
2017-12-09T05:34:43.000Z
|
2022-03-26T09:15:56.000Z
|
test_suite/suite/test09/other_mod.py
|
joncatanio/cannoli
|
410f6bea362bf9e33eecc0e01fb080dadd14ef23
|
[
"MIT"
] | 8 |
2017-12-12T01:03:18.000Z
|
2020-06-29T01:41:03.000Z
|
test_suite/suite/test09/other_mod.py
|
joncatanio/cannoli
|
410f6bea362bf9e33eecc0e01fb080dadd14ef23
|
[
"MIT"
] | 23 |
2018-05-17T17:48:23.000Z
|
2022-03-26T09:15:57.000Z
|
import some_mod
def functione(b):
a = some_mod.some_class()
print(b)
print("othermod calling in " + str(a.hello))
| 17.714286 | 47 | 0.669355 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 22 | 0.177419 |
ddb88819c796db53b08989fe1a656955b84d1760
| 140 |
py
|
Python
|
application/blueprints/user/__init__.py
|
demetrius-mp/flask-template
|
2dbab372bf2d7d5ff60af430c4b69c95a41cd681
|
[
"MIT"
] | null | null | null |
application/blueprints/user/__init__.py
|
demetrius-mp/flask-template
|
2dbab372bf2d7d5ff60af430c4b69c95a41cd681
|
[
"MIT"
] | 2 |
2021-10-14T02:00:15.000Z
|
2021-10-14T02:19:44.000Z
|
application/blueprints/user/__init__.py
|
demetrius-mp/flask-template
|
2dbab372bf2d7d5ff60af430c4b69c95a41cd681
|
[
"MIT"
] | null | null | null |
from flask import Flask
from application.blueprints.user.routes import users
def init_app(app: Flask):
app.register_blueprint(users)
| 17.5 | 52 | 0.792857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
ddb942c43951bea6ec9329f93418d8030cd886cd
| 249 |
py
|
Python
|
src/methods/defs.py
|
karlotness/nn-benchmark
|
65ddb2f3d9934db5718417fd800278a97f627ba2
|
[
"MIT"
] | 13 |
2021-08-04T21:07:50.000Z
|
2022-02-17T20:16:41.000Z
|
src/methods/defs.py
|
karlotness/nn-benchmark
|
65ddb2f3d9934db5718417fd800278a97f627ba2
|
[
"MIT"
] | null | null | null |
src/methods/defs.py
|
karlotness/nn-benchmark
|
65ddb2f3d9934db5718417fd800278a97f627ba2
|
[
"MIT"
] | 1 |
2021-10-03T00:37:05.000Z
|
2021-10-03T00:37:05.000Z
|
from collections import namedtuple
import torch
NONLINEARITIES = {
"tanh": torch.nn.Tanh,
"relu": torch.nn.ReLU,
}
TimeDerivative = namedtuple("TimeDerivative", ["dq_dt", "dp_dt"])
StepPrediction = namedtuple("StepPrediction", ["q", "p"])
| 22.636364 | 65 | 0.698795 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 64 | 0.257028 |
ddba665d13fe8e2f5bc1b2bf2549c77f6e609bdd
| 1,213 |
py
|
Python
|
caosmedicallab.py
|
erichilarysmithsr/CAOSMedicalLabDb
|
9a7acee44f9f3680c0a01332797ce94a0895c2d1
|
[
"Apache-2.0"
] | null | null | null |
caosmedicallab.py
|
erichilarysmithsr/CAOSMedicalLabDb
|
9a7acee44f9f3680c0a01332797ce94a0895c2d1
|
[
"Apache-2.0"
] | 10 |
2021-04-03T14:00:28.000Z
|
2022-01-28T23:44:11.000Z
|
caosmedicallab.py
|
erichilarysmithsr/CAOSMedicalLabDb
|
9a7acee44f9f3680c0a01332797ce94a0895c2d1
|
[
"Apache-2.0"
] | null | null | null |
>>> S = 'Susceptible'
>>> print(S)
>>> E = 'Exposed'
>>> print(E)
>>> I = 'Infectious'
>>> print(I)
>>> R = 'Removed'
>>> print(R)
>>> N = 'Total Population'
>>> print(N)
>>> C = 'Living with COVID19'
>>> print(C)
>>> D = 'Living with Diabetes'
>>> print(D)
>>> Susceptible = input('Enter number of Susceptible Individuals')
>>> print(Susceptible + 1)
>>> print(int(Susceptible)+ 1)
>>> Exposed = input('Enter number of Exposed Individuals')
>>> print(Exposed + 1)
>>> print(int(Exposed)+ 1)
>>> Infectious = input('Enter number of Infectious Individuals')
>>> print(Infectious + 1)
>>> print(int(Infectious)+ 1)
>>> Removed = input('Enter number of Removed Individuals')
>>> print(Removed + 1)
>>> print(int(Removed) + 1)
>>> Total Population = input('Enter number of Total Population')
>>> print(Total Population + 1)
>>> print(int(Total Population)+ 1)
>>> Living with COVID19 = input('Enter number of Individuals Living with COVID19')
>>> print(Living with COVID19 + 1)
>>> print(int(Living with COVID19)+ 1)
>>> Living with Diabetes = input('Enter number of Individuals Living with Diabetes')
>>> print(Living with Diabetes + 1)
>>> print(int(Living with Diabetes)+ 1)
>>> S = C + D - E - I - R
>>> print(S)
| 31.921053 | 84 | 0.645507 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 392 | 0.323166 |
ddbaaba267f11c03c921ef7b0388970b8db8a6b9
| 2,396 |
py
|
Python
|
src/quacks/__init__.py
|
ariebovenberg/quacks
|
839d307b24f3f37d9a5318c16acb631b9a1153f0
|
[
"MIT"
] | 11 |
2021-12-12T20:51:15.000Z
|
2022-02-02T12:08:32.000Z
|
src/quacks/__init__.py
|
ariebovenberg/quacks
|
839d307b24f3f37d9a5318c16acb631b9a1153f0
|
[
"MIT"
] | 8 |
2021-12-14T12:53:51.000Z
|
2022-03-15T04:29:44.000Z
|
src/quacks/__init__.py
|
ariebovenberg/quacks
|
839d307b24f3f37d9a5318c16acb631b9a1153f0
|
[
"MIT"
] | 1 |
2021-12-15T16:50:34.000Z
|
2021-12-15T16:50:34.000Z
|
from typing import _GenericAlias # type: ignore
from typing import ClassVar
from typing_extensions import Protocol
# Single-sourcing the version number with poetry:
# https://github.com/python-poetry/poetry/pull/2366#issuecomment-652418094
try:
__version__ = __import__("importlib.metadata").metadata.version(__name__)
except ModuleNotFoundError: # pragma: no cover
__version__ = __import__("importlib_metadata").version(__name__)
__all__ = ["readonly"]
def readonly(cls: type) -> type:
"""Decorate a :class:`~typing.Protocol` to make it read-only.
Unlike default protocol attributes, read-only protocols will match
frozen dataclasses and other immutable types.
Read-only attributes are already supported in protocols with
``@property``, but this is cumbersome to do for many attributes.
The ``@readonly`` decorator effectively transforms all mutable attributes
into read-only properties.
Example
-------
.. code-block:: python
from quacks import readonly
@readonly
class User(Protocol):
id: int
name: str
is_premium: bool
# equivalent to:
class User(Protocol):
@property
def id(self) -> int: ...
@property
def name(self) -> str: ...
@property
def is_premium(self) -> bool: ...
Warning
-------
Subprotocols and inherited attributes are not supported yet.
"""
if not _is_a_protocol(cls):
raise TypeError("Readonly decorator can only be applied to Protocols.")
elif any(b is not Protocol and _is_a_protocol(b) for b in cls.__bases__):
raise NotImplementedError("Subprotocols not yet supported.")
for name, typ in getattr(cls, "__annotations__", {}).items():
if not _is_classvar(typ):
@property # type: ignore
def prop(self): # type: ignore
... # pragma: no cover
prop.fget.__name__ = name # type: ignore
prop.fget.__annotations__ = {"return": typ} # type: ignore
setattr(cls, name, prop)
return cls
def _is_a_protocol(t: type) -> bool:
# Only classes *directly* inheriting from Protocol are protocols.
return Protocol in t.__bases__
def _is_classvar(t: type) -> bool:
return type(t) is _GenericAlias and t.__origin__ is ClassVar
| 29.95 | 79 | 0.647329 | 0 | 0 | 0 | 0 | 109 | 0.045492 | 0 | 0 | 1,436 | 0.599332 |
ddbb30413bba7d94f4e08a1b8b5b0f62d116a712
| 13,818 |
py
|
Python
|
gobbli/inspect/evaluate.py
|
RTIInternational/gobbli
|
d9ec8132f74ce49dc4bead2fad25b661bcef6e76
|
[
"Apache-2.0"
] | 276 |
2019-09-13T08:25:51.000Z
|
2022-03-05T13:07:55.000Z
|
gobbli/inspect/evaluate.py
|
RTIInternational/gobbli
|
d9ec8132f74ce49dc4bead2fad25b661bcef6e76
|
[
"Apache-2.0"
] | 15 |
2019-09-06T14:05:30.000Z
|
2022-01-01T20:15:06.000Z
|
gobbli/inspect/evaluate.py
|
RTIInternational/gobbli
|
d9ec8132f74ce49dc4bead2fad25b661bcef6e76
|
[
"Apache-2.0"
] | 24 |
2019-09-18T15:11:42.000Z
|
2021-12-23T18:59:55.000Z
|
from dataclasses import dataclass
from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union
import altair as alt
import pandas as pd
from sklearn.metrics import (
accuracy_score,
classification_report,
f1_score,
precision_score,
recall_score,
)
from gobbli.util import (
as_multiclass,
as_multilabel,
escape_line_delimited_text,
is_multilabel,
multilabel_to_indicator_df,
pred_prob_to_pred_label,
pred_prob_to_pred_multilabel,
truncate_text,
)
@dataclass
class ClassificationError:
"""
Describes an error in classification. Reports the original text,
the true label, and the predicted probability.
Args:
X: The original text.
y_true: The true label(s).
y_pred_proba: The model predicted probability for each class.
"""
X: str
y_true: Union[str, List[str]]
y_pred_proba: Dict[str, float]
@property
def y_pred(self) -> str:
"""
Returns:
The class with the highest predicted probability for this observation.
"""
return max(self.y_pred_proba, key=lambda k: self.y_pred_proba[k])
def y_pred_multilabel(self, threshold: float = 0.5) -> List[str]:
"""
Args:
threshold: The predicted probability threshold for predictions
Returns:
The predicted labels for this observation (predicted probability greater than
the given threshold)
"""
return pred_prob_to_pred_multilabel(self.y_pred_proba, threshold)
MetricFunc = Callable[[Sequence[str], pd.DataFrame], float]
"""
A function used to calculate some metric. It should accept a sequence of true labels (y_true)
and a dataframe of shape (n_samples, n_classes) containing predicted probabilities; it should
output a real number.
"""
DEFAULT_METRICS: Dict[str, MetricFunc] = {
"Weighted F1 Score": lambda y_true, y_pred: f1_score(
y_true, y_pred, average="weighted"
),
"Weighted Precision Score": lambda y_true, y_pred: precision_score(
y_true, y_pred, average="weighted"
),
"Weighted Recall Score": lambda y_true, y_pred: recall_score(
y_true, y_pred, average="weighted"
),
"Accuracy": lambda y_true, y_pred: accuracy_score(y_true, y_pred),
}
"""
The default set of metrics to evaluate classification models with. Users may want to extend
this.
"""
@dataclass
class ClassificationEvaluation:
"""
Provides several methods for evaluating the results from a classification problem.
Args:
labels: The set of unique labels in the dataset.
X: The list of texts that were classified.
y_true: The true labels for the dataset.
y_pred_proba: A dataframe containing a row for each observation in X and a
column for each label in the training data. Cells are predicted probabilities.
"""
labels: List[str]
X: List[str]
y_true: Union[List[str], List[List[str]]]
y_pred_proba: pd.DataFrame
metric_funcs: Optional[Dict[str, Callable[[Sequence, Sequence], float]]] = None
def __post_init__(self):
if not len(self.y_true) == self.y_pred_proba.shape[0]:
raise ValueError(
"y_true and y_pred_proba must have the same number of observations"
)
self.multilabel = is_multilabel(self.y_true)
@property
def y_true_multiclass(self) -> List[str]:
return as_multiclass(self.y_true, self.multilabel)
@property
def y_true_multilabel(self) -> pd.DataFrame:
return multilabel_to_indicator_df(
as_multilabel(self.y_true, self.multilabel), self.labels
)
@property
def y_pred_multiclass(self) -> List[str]:
"""
Returns:
Predicted class for each observation (assuming multiclass context).
"""
return pred_prob_to_pred_label(self.y_pred_proba)
@property
def y_pred_multilabel(self) -> pd.DataFrame:
"""
Returns:
Indicator dataframe containing a 0 if each label wasn't predicted and 1 if
it was for each observation.
"""
return pred_prob_to_pred_multilabel(self.y_pred_proba).astype("int")
def metrics(self) -> Dict[str, float]:
"""
Returns:
A dictionary containing various metrics of model performance on the test dataset.
"""
metric_funcs = self.metric_funcs
if metric_funcs is None:
metric_funcs = DEFAULT_METRICS
if self.multilabel:
y_true: Union[List[str], pd.DataFrame] = self.y_true_multilabel
y_pred: Union[List[str], pd.DataFrame] = self.y_pred_multilabel
else:
y_true = self.y_true_multiclass
y_pred = self.y_pred_multiclass
return {
name: metric_func(y_true, y_pred)
for name, metric_func in metric_funcs.items()
}
def metrics_report(self) -> str:
"""
Returns:
A nicely formatted human-readable report describing metrics of model performance
on the test dataset.
"""
metric_string = "\n".join(
f"{name}: {metric}" for name, metric in self.metrics().items()
)
if self.multilabel:
y_true: Union[pd.DataFrame, List[str]] = self.y_true_multilabel
y_pred: Union[pd.DataFrame, List[str]] = self.y_pred_multilabel
# Since these are indicator dataframes, the "labels" are indices
labels: Union[List[str], List[int]] = list(range(len(self.labels)))
else:
y_true = self.y_true_multiclass
y_pred = self.y_pred_multiclass
# Since these are lists of labels, the "labels" are the strings themselves
labels = self.labels
return (
"Metrics:\n"
"--------\n"
f"{metric_string}\n\n"
"Classification Report:\n"
"----------------------\n"
f"{classification_report(y_true, y_pred, labels=labels, target_names=self.labels)}\n"
)
def plot(self, sample_size: Optional[int] = None) -> alt.Chart:
"""
Args:
sample_size: Optional number of points to sample for the plot. Unsampled
plots may be difficult to save due to their size.
Returns:
An Altair chart visualizing predicted probabilities and true classes to visually identify
where errors are being made.
"""
# Since multilabel is a generalization of the multiclass paradigm, implement
# this visualization the same for multiclass and multilabel using the multilabel
# format
pred_prob_df = self.y_pred_proba
true_df = self.y_true_multilabel
if sample_size is not None:
# Avoid errors due to sample being larger than the population if the number
# of observations is smaller than the sample size
pred_prob_df = pred_prob_df.sample(
n=min(sample_size, pred_prob_df.shape[0])
)
true_df = true_df.iloc[pred_prob_df.index]
charts = []
if self.multilabel:
legend_label = "Has Label"
else:
legend_label = "Belongs to Class"
for label in self.labels:
# Plot the predicted probabilities for given label for all observations
plot_df = (
pred_prob_df[[label]]
.rename({label: "Predicted Probability"}, axis="columns")
.join(
true_df[[label]]
.astype("bool")
.rename({label: legend_label}, axis="columns")
)
)
charts.append(
alt.layer(
alt.Chart(plot_df, title=label, height=40)
.mark_circle(size=8)
.encode(
x=alt.X(
"Predicted Probability",
type="quantitative",
title=None,
scale=alt.Scale(domain=(0.0, 1.0)),
),
y=alt.Y(
"jitter",
type="quantitative",
title=None,
axis=alt.Axis(
values=[0], ticks=True, grid=False, labels=False
),
scale=alt.Scale(),
),
color=alt.Color(legend_label, type="nominal"),
)
.transform_calculate(
# Generate Gaussian jitter with a Box-Muller transform
jitter="sqrt(-2*log(random()))*cos(2*PI*random())/32"
)
.properties(height=40)
)
)
return alt.vconcat(*charts)
def errors_for_label(self, label: str, k: int = 10):
"""
Output the biggest mistakes for the given class by the classifier
Args:
label: The label to return errors for.
k: The number of results to return for each of false positives and false negatives.
Returns:
A 2-tuple. The first element is a list of the top ``k`` false positives, and the
second element is a list of the top ``k`` false negatives.
"""
pred_label = self.y_pred_multilabel[label].astype("bool")
true_label = self.y_true_multilabel[label].astype("bool")
# Order false positives/false negatives by the degree of the error;
# i.e. we want the false positives with highest predicted probability first
# and false negatives with lowest predicted probability first
# Take the top `k` of each
false_positives = (
self.y_pred_proba.loc[pred_label & ~true_label]
.sort_values(by=label, ascending=False)
.iloc[:k]
)
false_negatives = (
self.y_pred_proba.loc[~pred_label & true_label]
.sort_values(by=label, ascending=True)
.iloc[:k]
)
def create_classification_errors(
y_pred_proba: pd.DataFrame,
) -> List[ClassificationError]:
classification_errors = []
for ndx, row in y_pred_proba.iterrows():
classification_errors.append(
ClassificationError(
X=self.X[ndx],
y_true=self.y_true[ndx],
y_pred_proba=row.to_dict(),
)
)
return classification_errors
return (
create_classification_errors(false_positives),
create_classification_errors(false_negatives),
)
def errors(
self, k: int = 10
) -> Dict[str, Tuple[List[ClassificationError], List[ClassificationError]]]:
"""
Output the biggest mistakes for each class by the classifier.
Args:
k: The number of results to return for each of false positives and false negatives.
Returns:
A dictionary whose keys are label names and values are 2-tuples. The first
element is a list of the top ``k`` false positives, and the second element is a list
of the top ``k`` false negatives.
"""
errors = {}
for label in self.labels:
errors[label] = self.errors_for_label(label, k=k)
return errors
def errors_report(self, k: int = 10) -> str:
"""
Args:
k: The number of results to return for each of false positives and false negatives.
Returns:
A nicely-formatted human-readable report describing the biggest mistakes made by
the classifier for each class.
"""
errors = self.errors(k=k)
output = "Errors Report\n" "------------\n\n"
for label, (false_positives, false_negatives) in errors.items():
def make_errors_str(errors: List[ClassificationError]) -> str:
if self.multilabel:
return "\n".join(
(
f"Correct Value: {label in e.y_true}\n"
f"Predicted Probability: {e.y_pred_proba[label]}"
f"Text: {truncate_text(escape_line_delimited_text(e.X), 500)}\n"
)
for e in errors
)
else:
return "\n".join(
(
f"True Class: {e.y_true}\n"
f"Predicted Class: {e.y_pred} (Probability: {e.y_pred_proba[e.y_pred]})\n"
f"Text: {truncate_text(escape_line_delimited_text(e.X), 500)}\n"
)
for e in errors
)
false_positives_str = make_errors_str(false_positives)
if len(false_positives_str) == 0:
false_positives_str = "None"
false_negatives_str = make_errors_str(false_negatives)
if len(false_negatives_str) == 0:
false_negatives_str = "None"
header_name = "CLASS" if self.multilabel else "LABEL"
output += (
" -------\n"
f"| {header_name}: {label}\n"
" -------\n\n"
"False Positives\n"
"***************\n\n"
f"{false_positives_str}\n\n"
"False Negatives\n"
"***************\n\n"
f"{false_negatives_str}\n\n"
)
return output
| 35.25 | 102 | 0.56506 | 12,424 | 0.899117 | 0 | 0 | 12,446 | 0.900709 | 0 | 0 | 5,334 | 0.386018 |
ddbc20a9147b17ccfb31328be56cce367423b65a
| 791 |
py
|
Python
|
victor_fake_hardware_interface/scripts/fake_grippers_node.py
|
MMintLab/kuka_iiwa_interface
|
0dd258641377263e7275bc63f37cf32eb12f3e56
|
[
"BSD-2-Clause"
] | 5 |
2021-01-11T09:00:26.000Z
|
2021-12-13T15:59:01.000Z
|
victor_fake_hardware_interface/scripts/fake_grippers_node.py
|
MMintLab/kuka_iiwa_interface
|
0dd258641377263e7275bc63f37cf32eb12f3e56
|
[
"BSD-2-Clause"
] | 35 |
2020-07-01T14:48:40.000Z
|
2021-07-13T18:38:53.000Z
|
victor_fake_hardware_interface/scripts/fake_grippers_node.py
|
MMintLab/kuka_iiwa_interface
|
0dd258641377263e7275bc63f37cf32eb12f3e56
|
[
"BSD-2-Clause"
] | 1 |
2021-01-08T23:39:17.000Z
|
2021-01-08T23:39:17.000Z
|
#!/usr/bin/env python
import rospy
from victor_fake_hardware_interface.minimal_fake_arm_interface import MinimalFakeGripperInterface
def main():
rospy.init_node("minimal_fake_arm_interface")
interfaces = {}
arm_names = ["left_arm", "right_arm"]
for arm in arm_names:
interfaces[arm + "/gripper"] = MinimalFakeGripperInterface(gripper_command_topic=arm + "/gripper_command",
gripper_status_topic=arm + "/gripper_status")
for arm in arm_names:
interfaces[arm + "/gripper"].start_feedback_threads()
rospy.loginfo("Publishing data...")
rospy.spin()
for arm in arm_names:
interfaces[arm + "/gripper"].join_feedback_threads()
if __name__ == '__main__':
main()
| 27.275862 | 114 | 0.652339 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 165 | 0.208597 |
ddbc90c6ac864e7ce62505e078e39b8bc44056dd
| 11,647 |
py
|
Python
|
python/bgraph.py
|
brunodferrari/bdp
|
d320add1e451c85b6777ae34901bbd6fd3797114
|
[
"Unlicense"
] | null | null | null |
python/bgraph.py
|
brunodferrari/bdp
|
d320add1e451c85b6777ae34901bbd6fd3797114
|
[
"Unlicense"
] | null | null | null |
python/bgraph.py
|
brunodferrari/bdp
|
d320add1e451c85b6777ae34901bbd6fd3797114
|
[
"Unlicense"
] | null | null | null |
# -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
import networkx as nx
import pandas as pd
import copy
from numba import njit
from numba.typed import Dict, List
from numba.core import types
from concurrent.futures import ThreadPoolExecutor
np.seterr(over='ignore')
def pi(setlist, i):
try:
return np.int(np.where(np.array(setlist) == i )[0])
except FutureWarning:
print(setlist, i)
except TypeError:
return -1
def crossing(G):
aux = G.edges().copy()
c = 0
while len(aux) > 0:
e1 = aux.pop(0)
i = e1[0]
k = e1[1]
for e2 in aux:
j = e2[0]
l = e2[1]
if (G.pi_1[i] < G.pi_1[j]) and (G.pi_2[k] > G.pi_2[l]) and (G.pi_1[i] * G.pi_1[j] * G.pi_2[k] * G.pi_2[l]):
c = c + 1
elif (G.pi_1[i] > G.pi_1[j]) and (G.pi_2[k] < G.pi_2[l]) and (G.pi_1[i] * G.pi_1[j] * G.pi_2[k] * G.pi_2[l]):
c = c + 1
return c
def _cross_n(G, e1, e2):
i = e1[0]
k = e1[1]
j = e2[0]
l = e2[1]
if (G.pi_1[i] < G.pi_1[j]) and (G.pi_2[k] > G.pi_2[l]) and (G.pi_1[i] * G.pi_1[j] * G.pi_2[k] * G.pi_2[l]):
return 1
elif (G.pi_1[i] > G.pi_1[j]) and (G.pi_2[k] < G.pi_2[l]) and (G.pi_1[i] * G.pi_1[j] * G.pi_2[k] * G.pi_2[l]):
return 1
return 0
def _cross_w(G, edgeslist):
e1 = edgeslist[0]
#with ThreadPoolExecutor(6) as ex:
output = list(map(lambda x: _cross_n(G, e1, x), edgeslist[1:]))
#output=0
#for e2 in edgeslist[1:]:
# output = output + _cross_n(G, e1, e2)
return sum(output)
def crossing2(G):
edgeslist = G.edges()
c = 0
with ThreadPoolExecutor(6) as ex:
output = list(ex.map(lambda x: _cross_w(G, x), [edgeslist[i:] for i in range(len(edgeslist))]))
return c + int(sum(output))
@njit
def _numba_cross(pi_1, pi_2, edgeslist):
c = 0
for s, e1 in enumerate(edgeslist):
i = e1[0]
k = e1[1]
for e2 in edgeslist[s+1:]:
j = e2[0]
l = e2[1]
if (pi_1[i] < pi_1[j]) and (pi_2[k] > pi_2[l]) and (pi_1[i] * pi_1[j] * pi_2[k] * pi_2[l]):
c = c + 1
elif (pi_1[i] > pi_1[j]) and (pi_2[k] < pi_2[l]) and (pi_1[i] * pi_1[j] * pi_2[k] * pi_2[l]):
c = c + 1
return c
def crossing3(G):
edgeslist = G.edges()
c = 0
pi_1 = Dict.empty(
key_type=types.int64,
value_type=types.int64,
)
pi_2 = Dict.empty(
key_type=types.int64,
value_type=types.int64,
)
pi_1.update(G.pi_1)
pi_2.update(G.pi_2)
output = _numba_cross(pi_1, pi_2, List(edgeslist))
return c + output
def bdp_lyt(G, size = 4/3, height = 100): ## Formata lyt adequado para o plot do grafo
import numpy as np
#G, center = _process_params(G, center=center, dim=2)
#if len(G) == 0:
# return {}
#center = np.zeros(2)
top = G.v1()[::-1]
bottom = G.v2()[::-1]
height = 100
width = size * height
offset = (width/2, height/2)
nodes = top + bottom
left_xs = np.repeat(0, len(top))
right_xs = np.repeat(width, len(bottom))
left_ys = np.linspace(0, height, len(top))
right_ys = np.linspace(0, height, len(bottom))
top_pos = np.column_stack([left_xs, left_ys]) - offset
bottom_pos = np.column_stack([right_xs, right_ys]) - offset
pos = np.concatenate([top_pos, bottom_pos])
#pos = rescale_layout(pos, scale=scale) + center
pos = dict(zip(nodes, pos))
return pos
#plot utilizando o lyt adequado
def plotBGraph(G, size = 4/3, height=100):
B = nx.Graph()
B.add_nodes_from(G.v1(), bipartite=1)
B.add_nodes_from(G.v2(), bipartite=2)
B.add_edges_from(G.edges())
pos = bdp_lyt(G, size, height)
nx.draw(B, pos)
nx.draw_networkx_labels(B, pos)
#plt.savefig("test.pdf")
#plt.show()
def bary_sort(barylist, nodelist):
aux = [(pos, v) for (pos, v) in zip(barylist, nodelist)]
aux.sort(key=lambda tup: tup[0])
return list(np.int0(np.array(aux)[:,1]))
#encontra baricentro do vertice
def bary(G, v, v_layer = None):
b = 0
if v_layer == None:
return
elif v_layer == 1:
pi_k = G.v2()
K = [x for x in pi_k if (((v, x) in G.edges()) or ((x, v) in G.edges())) and G.pi_2[x] > 0] #encontra os viznho do vertice v na 2a camada
if len(K) > 0:
b = G.perm_v2(K).mean()
elif v_layer == 2:
pi_k = G.v1()
#K = [x for x in pi_k if (x, v) in G.edges()] #encontra os viznho do vertice v na 1a camada
K = [x for x in pi_k if (((v, x) in G.edges()) or ((x, v) in G.edges())) and G.pi_1[x] > 0]
if len(K) > 0:
b = G.perm_v1(K).mean()
return b
@njit
def _deg(nodelist, subgraph, edges):
deg = Dict.empty(
key_type=types.int64,
value_type=types.int64,
)
for v in nodelist:
K = [x for x in subgraph if ((v, x) in edges) or ((x, v) in edges)]
deg[v] = len(K)
return deg
class BGraph:
""" aux """
def __init__(self):
self._set_v1 = []
self._set_v2 = []
self._set_edges = []
self.pi_1 = {}
self.pi_2 = {}
self._adj = {}
self._nodes = {}
@property
def adj(self):
return self._adj
def __getitem__(self, n):
return self.adj[n]
def edges(self, edgelist = None):
if edgelist != None:
self._set_edges = []
self._set_edges = edgelist
for e in edgelist:
u, v = e
if u not in self._nodes:
self._adj[u] = {}
self._nodes[u] = {}
if v not in self._nodes:
self._adj[v] = {}
self._nodes[v] = {}
datadict = self._adj[u].get(v, {})
self._adj[u][v] = datadict
self._adj[v][u] = datadict
else:
return self._set_edges
def v1 (self, setlist = None):
if setlist != None:
self._set_v1 = []
self._set_v1 = setlist
self.pi_1 = dict(zip(self._set_v1,self.perm_v1()))
for u in setlist:
if u not in self._nodes:
self._nodes[u] = {}
self._adj[u] = {}
else:
return self._set_v1
def v2 (self, setlist = None):
if setlist != None:
self._set_v2 = []
self._set_v2 = setlist
self.pi_2 = dict(zip(self._set_v2, self.perm_v2()))
for u in setlist:
if u not in self._nodes:
self._nodes[u] = {}
self._adj[u] = {}
else:
return self._set_v2
def perm_v1(self, pos = None):
if pos != None:
return np.vectorize(lambda i: self.pi_1[i])(pos)
else:
return np.vectorize(lambda i: pi(self._set_v1, i))(self._set_v1) + 1
def perm_v2(self, pos = None):
if pos != None:
return np.vectorize(lambda i: self.pi_2[i])(pos)
else:
return np.vectorize(lambda i: pi(self._set_v2, i))(self._set_v2) + 1
def order_v1(self):
#aux = [(pos, v) for (pos, v) in zip(self.pi_1, self. set_v1)]
aux = list( self.pi_1.items() )
aux.sort(key=lambda tup: tup[1])
self.v1(list(np.int0(np.array(aux)[:,0])))
return
def order_v2(self):
#aux = [(pos, v) for (pos, v) in zip(self.pi_2,self. set_v2)]
aux = list( self.pi_2.items() )
aux.sort(key=lambda tup: tup[1])
self.v2(list(np.int0(np.array(aux)[:,0])))
return
def add_v1(self, i, pos):
if pos != -1:
self._set_v1 = self.v1()[:pos] + [i] + self.v1()[pos:]
else:
self._set_v1 = self.v1()[:] + [i]
def add_v2(self, i, pos):
if pos != -1:
self._set_v2 = self.v2()[:pos] + [i] + self.v2()[pos:]
else:
self._set_v2 = self.v2()[:] + [i]
def n_v1(self):
#self.n_v1 = len(self.v1)
return len(self._set_v1)
def n_v2(self):
#self.n_v2 = len(self.v2)
return len(self._set_v2)
def n_edge(self):
return len(self._set_edges)
def n_v(self):
return len(self._nodes)
def density(self):
return len(self._set_edges) / (len(self._set_v1)*len(self._set_v2))
def n_cross(self):
return crossing3(self)
def bc(self, v, k):
return bary(self, v, k)
#@jit(nopython=True)
def degree(self, nodelist = None, subgraph = None):
#deg = {}
if nodelist is None:
nodelist = (self.v1() + self.v2())
if subgraph is None:
subgraph = (self.v1() + self.v2())
# for v in self.v1():
# K = [x for x in self.v2() if (v, x) in self.edges()]
# deg[v] = len(K)
# for v in self.v2():
# K = [x for x in self.v1() if (x, v) in self.edges()]
# deg[v] = len(K)
d = _deg(List(nodelist), List(subgraph), List(self.edges()))
return d
def move_v1(self, v, pos, inplace=False):
aux = self.pi_1.copy()
pos_v = aux.pop(v)
aux = np.array(list(aux.items()))
aux[ aux[:,1] >= pos_v, 1] = aux[ aux[:,1] >= pos_v, 1] - 1
aux[ aux[:,1] >= pos, 1] = aux[ aux[:,1] >= pos, 1] + 1
aux = dict(aux)
aux[v] = pos
if not inplace:
return aux
else:
self.pi_1 = aux
def move_v2(self, v, pos, inplace=False):
aux = self.pi_2.copy()
pos_v = aux.pop(v)
aux = np.array(list(aux.items()))
aux[ aux[:,1] >= pos_v, 1] = aux[ aux[:,1] >= pos_v, 1] - 1
aux[ aux[:,1] >= pos, 1] = aux[ aux[:,1] >= pos, 1] + 1
aux = dict(aux)
aux[v] = pos
if not inplace:
return aux
else:
self.pi_2 = aux
def plot(self, size=4/3, height=100, order=0):
if order:
self.order_v1()
self.order_v2()
plotBGraph(self, size=size, height=height)
def K(self, u, v):
i = u
j = v
#G.move_v1(i, j, True)
#G.move_v1(j, i, True)
c = 0
if u in self._set_v1:
pi = self.pi_2
elif u in self._set_v2:
pi = self.pi_1
#nodes_between = [v for v in G.pi_1 if G.pi_1[v] >= G.pi_1[i] and G.pi_1[v] <= G.pi_1[j]]
#while nodes_between:
# i = nodes_between.pop()
# for j in nodes_between:
#print(pi)
for k in self._adj[i]:
for l in self._adj[j]:
if (pi[k] > pi[l]):
c = c + 1
return c
def find_pos(self, u, pos):
if u in self._set_v1:
pi = self.pi_1
elif u in self._set_v2:
pi = self.pi_2
return [u for u in pi if pi[u] == pos][0]
def copy(self):
return copy.deepcopy(self)
| 28.06506 | 145 | 0.479608 | 6,411 | 0.550442 | 0 | 0 | 847 | 0.072723 | 0 | 0 | 1,218 | 0.104576 |
ddbf5e2f65d38e783f4768e0ca9abc2a32d54029
| 3,403 |
py
|
Python
|
src/odin/http/server.py
|
stfc-aeg/odin-control
|
71ab2e6e6e1a7c7ce322ea0df31a9d675f7b92bf
|
[
"Apache-2.0"
] | 4 |
2018-05-24T13:38:23.000Z
|
2021-08-18T08:32:54.000Z
|
src/odin/http/server.py
|
stfc-aeg/odin-control
|
71ab2e6e6e1a7c7ce322ea0df31a9d675f7b92bf
|
[
"Apache-2.0"
] | 20 |
2018-04-10T09:28:01.000Z
|
2022-03-17T11:43:59.000Z
|
src/odin/http/server.py
|
stfc-aeg/odin-control
|
71ab2e6e6e1a7c7ce322ea0df31a9d675f7b92bf
|
[
"Apache-2.0"
] | 3 |
2017-06-07T13:28:38.000Z
|
2019-07-16T10:02:21.000Z
|
"""odin.http.server - ODIN HTTP Server class.
This module provides the core HTTP server class used in ODIN, which handles all client requests,
handing off API requests to the appropriate API route and adapter plugins, and defining the
default route used to serve static content.
Tim Nicholls, STFC Application Engineering
"""
import logging
import tornado.gen
import tornado.web
import tornado.ioloop
from tornado.log import access_log
from odin.http.routes.api import ApiRoute
from odin.http.routes.default import DefaultRoute
class HttpServer(object):
"""HTTP server class."""
def __init__(self, debug_mode=False, access_logging=None,
static_path='./static', adapters=None):
"""Initialise the HttpServer object.
:param debug_mode: Set True to enable Tornado debug mode
:param static_path: Set the path to static file content rendered by default route
:param adapters: list of adapters to register with API route
"""
settings = {
"debug": debug_mode,
"log_function": self.log_request,
}
# Set the up the access log level
if access_logging is not None:
try:
level_val = getattr(logging, access_logging.upper())
access_log.setLevel(level_val)
except AttributeError:
logging.error(
"Access logging level {} not recognised".format(access_logging)
)
# Create an API route
self.api_route = ApiRoute()
# Register adapters with the API route and get handlers
for adapter in adapters:
self.api_route.register_adapter(adapters[adapter])
# Initialize adapters for all those that require inter adapter communication
self.api_route.initialize_adapters()
handlers = self.api_route.get_handlers()
# Create a default route for static content and get handlers
default_route = DefaultRoute(static_path)
handlers += default_route.get_handlers()
# Create the Tornado web application for these handlers
self.application = tornado.web.Application(handlers, **settings)
def listen(self, port, host=''):
"""Listen for HTTP client requests.
:param port: port to listen on
:param host: host address to listen on
"""
self.application.listen(port, host)
def log_request(self, handler):
"""Log completed request information.
This method is passed to the tornado.web.Application instance to override the
default request logging behaviour. In doing so, successful requests are logged
at debug level rather than info in order to reduce the rate of logging under
normal conditions.
:param handler: currently active request handler
"""
if handler.get_status() < 400:
log_method = access_log.debug
elif handler.get_status() < 500:
log_method = access_log.warning
else:
log_method = access_log.error
request_time = 1000.0 * handler.request.request_time()
log_method("%d %s %.2fms", handler.get_status(),
handler._request_summary(), request_time)
def cleanup_adapters(self):
"""Clean up state of registered adapters.
"""
self.api_route.cleanup_adapters()
| 35.447917 | 96 | 0.658537 | 2,869 | 0.84308 | 0 | 0 | 0 | 0 | 0 | 0 | 1,591 | 0.467529 |
ddc0751188e1d1856d4d69064affd55e5821f001
| 1,382 |
py
|
Python
|
leetcode/ds_stack_valid_parentheses.py
|
ngovindaraj/Python
|
edbcd302533bef81aa0c01e902e6081df58f383c
|
[
"MIT"
] | null | null | null |
leetcode/ds_stack_valid_parentheses.py
|
ngovindaraj/Python
|
edbcd302533bef81aa0c01e902e6081df58f383c
|
[
"MIT"
] | null | null | null |
leetcode/ds_stack_valid_parentheses.py
|
ngovindaraj/Python
|
edbcd302533bef81aa0c01e902e6081df58f383c
|
[
"MIT"
] | null | null | null |
# @file Valid Parentheses
# @brief Given a string containing just the characters '(', ')', '{', '}',
# '[' and ']', determine if the input string is valid.
# https://leetcode.com/problems/valid-parentheses/
import collections
'''
Given a string containing just the characters '(', ')', '{', '}', '[' and ']',
determine if the input string is valid.
The brackets must close in the correct order, "()" and "()[]{}" are all valid
but "(]" and "([)]" are not.
'''
# time complexity : O(n)
# space complexity: O(n)
def isValid(self, s):
# Use a dictionary to match fwd and rev brackets
dct = {'(': ')', '{': '}', '[': ']'} # HashTable for fwd -> reverse braces
# For each fwd brace record corresponding reverse brace to match
stk = collections.deque()
for char in s:
if char in dct: # If char is fwd bracket
stk.append(dct[char]) # Append corresponding rev bracket
elif char in ')}]': # If char is rev bracket
if len(stk) == 0: # Ensure no extra rev brackets
return False
elif char != stk.pop(): # Verify rev bracket type
return False
else: # Found non fwd/rev bracket character
return False
return len(stk) == 0 # Ensure no extra fwd bracket
| 40.647059 | 79 | 0.552822 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 868 | 0.628075 |
ddc0a159fdea30685a3e6c6f67386c3bf2f75073
| 3,746 |
py
|
Python
|
2016/round_1b/technobabble.py
|
laichunpongben/CodeJam
|
a048229bce1bc680dc85c8a69ef395a2f049732a
|
[
"Apache-2.0"
] | null | null | null |
2016/round_1b/technobabble.py
|
laichunpongben/CodeJam
|
a048229bce1bc680dc85c8a69ef395a2f049732a
|
[
"Apache-2.0"
] | null | null | null |
2016/round_1b/technobabble.py
|
laichunpongben/CodeJam
|
a048229bce1bc680dc85c8a69ef395a2f049732a
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
from __future__ import print_function
from collections import deque
def parse_and_sort_topics(topics):
topic_deque = deque(tuple(topic.split(' ')) for topic in topics)
word0s = [item[0] for item in topic_deque]
word1s = [item[1] for item in topic_deque]
topic_deque = deque(sorted(list(topic_deque),
key=lambda x: (word0s.count(x[0]) + word1s.count(x[1]),
word1s.count(x[1]),
word0s.count(x[0]))))
sorted_topics = []
while len(topic_deque) > 0:
topic = topic_deque.popleft()
sorted_topics.append(topic)
added_word0s = [item[0] for item in sorted_topics]
added_word1s = [item[1] for item in sorted_topics]
topic_deque = deque(sorted(list(topic_deque),
key=lambda x: (added_word0s.count(x[0]) + added_word1s.count(x[1]),
added_word1s.count(x[1]),
added_word0s.count(x[0]),
word0s.count(x[0]) + word1s.count(x[1]),
word1s.count(x[1]),
word0s.count(x[0]))))
return sorted_topics
def count_fake(topics):
word0_dict = {}
word1_dict = {}
real_count = 0
fake_count = 0
sorted_topics = parse_and_sort_topics(topics)
print(sorted_topics)
for topic in sorted_topics:
word0, word1 = topic
try:
word0_count = word0_dict[word0]
except KeyError:
word0_dict[word0] = 0
try:
word1_count = word1_dict[word1]
except KeyError:
word1_dict[word1] = 0
if word0_dict[word0] > 0 and word1_dict[word1] > 0:
fake_count += 1
else:
real_count += 1
word0_dict[word0] += 1
word1_dict[word1] += 1
return fake_count
if __name__ == '__main__':
import os
samples = [
['HYDROCARBON COMBUSTION',
'QUAIL BEHAVIOR',
'QUAIL COMBUSTION'],
['CODE JAM',
'SPACE JAM',
'PEARL JAM'],
['INTERGALACTIC PLANETARY',
'PLANETARY INTERGALACTIC'],
['BOUNDARY GRAVITY',
'BOUNDARY HERMENEUTICS',
'BOUNDARY TRANSGRESSION',
'QUANTUM GRAVITY',
'QUANTUM HERMENEUTICS',
'QUANTUM TRANSGRESSION',
'TRANSFORMATIVE GRAVITY',
'TRANSFORMATIVE HERMENEUTICS',
'TRANSFORMATIVE TRANSGRESSION'],
['GF CH',
'RO GI',
'YB GI',
'TD HI',
'YG HI',
'IZ NB',
'BQ TA',
'GF TP',
'GR WG',
'IZ ZD']
]
for sample in samples:
print(count_fake(sample))
data_files = ['C-small-practice']
for f in data_files:
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)),
'{0}.in'.format(f)), 'r') as input_file:
lines = input_file.readlines()
test_case_count = int(lines[0].replace('\n' ,''))
test_cases = []
inputs = [line.replace('\n', '') for line in lines[1:]]
i = 0
while i < len(inputs):
n = int(inputs[i])
topics = inputs[i+1:i+n+1]
test_cases.append(topics)
i += n+1
i = 1
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)),
'{0}.out'.format(f)), 'w') as output_file:
for test_case in test_cases:
print(i)
output_file.write('Case #{0}: {1}\n'.format(i, count_fake(test_case)))
i += 1
| 30.455285 | 95 | 0.509343 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 525 | 0.140149 |
ddc2ab09f2aabd1f5e1c2e16308c14b4efc96586
| 101 |
py
|
Python
|
dask_xgboost/__init__.py
|
ksangeek/dask-xgboost
|
8ca1d69f21a6c666eaf581fb88b20cff2b6b05ef
|
[
"BSD-3-Clause"
] | null | null | null |
dask_xgboost/__init__.py
|
ksangeek/dask-xgboost
|
8ca1d69f21a6c666eaf581fb88b20cff2b6b05ef
|
[
"BSD-3-Clause"
] | null | null | null |
dask_xgboost/__init__.py
|
ksangeek/dask-xgboost
|
8ca1d69f21a6c666eaf581fb88b20cff2b6b05ef
|
[
"BSD-3-Clause"
] | null | null | null |
from .core import _train, train, predict, XGBClassifier, XGBRegressor # noqa
__version__ = '0.1.7'
| 25.25 | 77 | 0.742574 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 13 | 0.128713 |
ddc39e71f4d5f6b6a53e16b07decfbb4b7887488
| 12,963 |
py
|
Python
|
life360.indigoPlugin/Contents/Server Plugin/plugin.py
|
ryanbuckner/life360-plugin
|
3e64108b91c4ee0f4f85f6e7aa31fa7bd1b1d6fe
|
[
"MIT"
] | 1 |
2021-09-25T15:43:00.000Z
|
2021-09-25T15:43:00.000Z
|
life360.indigoPlugin/Contents/Server Plugin/plugin.py
|
ryanbuckner/life360-plugin
|
3e64108b91c4ee0f4f85f6e7aa31fa7bd1b1d6fe
|
[
"MIT"
] | null | null | null |
life360.indigoPlugin/Contents/Server Plugin/plugin.py
|
ryanbuckner/life360-plugin
|
3e64108b91c4ee0f4f85f6e7aa31fa7bd1b1d6fe
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
####################
# Copyright (c) 2021 ryanbuckner
# https://github.com/ryanbuckner/life360-plugin/wiki
#
# Based on neilk's Solcast plugin
################################################################################
# Imports
################################################################################
import indigo
import sys
from life360 import life360
import datetime
try:
from geopy.geocoders import Nominatim
except:
self.logger.debug("Geopy python library is not found. Try reinstalling the Plugin")
pass
################################################################################
# Globals
################################################################################
################################################################################
class Plugin(indigo.PluginBase):
########################################
# Class properties
########################################
########################################
def __init__(self, pluginId, pluginDisplayName, pluginVersion, pluginPrefs):
super(Plugin, self).__init__(pluginId, pluginDisplayName, pluginVersion, pluginPrefs)
self.debug = pluginPrefs.get("showDebugInfo", False)
self.deviceList = []
try:
self.authorization_token = self.pluginPrefs.get('authorizationtoken', 'cFJFcXVnYWJSZXRyZTRFc3RldGhlcnVmcmVQdW1hbUV4dWNyRUh1YzptM2ZydXBSZXRSZXN3ZXJFQ2hBUHJFOTZxYWtFZHI0Vg')
self.username = self.pluginPrefs.get('life360_username', None)
self.password = self.pluginPrefs.get('life360_password', None)
self.refresh_frequency = self.pluginPrefs.get('refresh_frequency', 30)
self.logger.debug("Success retriving preferences from Plugin config")
except:
self.logger.error("Error retrieving Plugin preferences. Please use Configure to set")
self.logger.info(u"")
self.logger.info(u"{0:=^130}".format("Starting Life360 Plugin Engine"))
self.logger.info(u"{0:<30} {1}".format("Plugin name:", pluginDisplayName))
self.logger.info(u"{0:<30} {1}".format("Plugin version:", pluginVersion))
self.logger.info(u"{0:<30} {1}".format("Plugin ID:", pluginId))
self.logger.info(u"{0:<30} {1}".format("Refresh Frequency:", str(self.refresh_frequency) + " seconds"))
self.logger.info(u"{0:<30} {1}".format("Indigo version:", indigo.server.version))
self.logger.info(u"{0:<30} {1}".format("Python version:", sys.version.replace('\n', '')))
self.logger.info(u"{0:<30} {1}".format("Python Directory:", sys.prefix.replace('\n', '')))
self.logger.info(u"{0:=^130}".format(""))
self.life360data = {}
self.member_list = {}
########################################
def deviceStartComm(self, device):
self.logger.debug("Starting device: " + device.name)
device.stateListOrDisplayStateIdChanged()
if device.id not in self.deviceList:
self.update(device)
self.deviceList.append(device.id)
########################################
def deviceStopComm(self, device):
self.logger.debug("Stopping device: " + device.name)
if device.id in self.deviceList:
self.deviceList.remove(device.id)
########################################
def runConcurrentThread(self):
self.logger.debug("Starting concurrent thread")
try:
pollingFreq = int(self.pluginPrefs['refresh_frequency']) * 1
except:
pollingFreq = 60
self.logger.debug("Current polling frequency is: " + str(pollingFreq) + " seconds")
# Refresh device states immediately after restarting the Plugin
iterationcount = 1
try:
while True:
if (iterationcount > 1):
self.sleep(1 * pollingFreq)
self.get_new_life360json()
iterationcount += 1
for deviceId in self.deviceList:
# call the update method with the device instance
self.update(indigo.devices[deviceId])
self.updatedevicestates(indigo.devices[deviceId])
except self.StopThread:
pass
########################################
def update(self, device):
#self.logger.debug(device.name)
# # device.stateListOrDisplayStateIdChanged()
return
########################################
# UI Validate, Device Config
########################################
def validateDeviceConfigUi(self, valuesDict, typeId, device):
return (True, valuesDict)
# assigns the device.address to the value of the member.id
def menuChanged(self, valuesDict = None, typeId = None, devId = None):
self.create_member_list()
self.logger.debug(self.member_list)
if valuesDict['membername'] in self.member_list:
tempName = valuesDict['membername']
valuesDict['address'] = self.member_list[tempName] # m['id']
else:
valuesDict['address'] = "Unknown"
return valuesDict
#dump JSON to event log
def write_json_to_log(self):
if (len(self.life360data) == 0):
self.get_new_life360json()
self.logger.debug(self.life360data)
if (not self.debug):
indigo.server.log("Life360 data has been written to the debugLog. If you did not see it you may need to enable debugging in the Plugin Config UI")
return
########################################
# UI Validate, Plugin Preferences
########################################
def validatePrefsConfigUi(self, valuesDict):
if int(valuesDict['refresh_frequency']) < 15:
self.logger.error("Invalid entry for Refresh Frequency - must be greater than 15")
errorsDict = indigo.Dict()
errorsDict['refresh_frequency'] = "Invalid entry for Refresh Frequency - must be greater than 15"
return (False, valuesDict, errorsDict)
if (not valuesDict['life360_username']):
self.logger.error("Invalid entry for Life360 username - cannot be empty")
errorsDict = indigo.Dict()
errorsDict['life360_username'] = "Invalid entry for Life360 username - cannot be empty"
return (False, valuesDict, errorsDict)
if (valuesDict['life360_username'].find('@') == -1):
self.logger.error("Invalid entry for Life360 username - must be a valid email address")
errorsDict = indigo.Dict()
errorsDict['life360_username'] = "Invalid entry for Life360 username - must be a valid email address"
return (False, valuesDict, errorsDict)
if (valuesDict['life360_username'].find('.') == -1):
self.logger.error("Invalid entry for Life360 username - must be a valid email address")
errorsDict = indigo.Dict()
errorsDict['life360_username'] = "Invalid entry for Life360 username - must be a valid email address"
return (False, valuesDict, errorsDict)
if (not valuesDict['life360_password']):
self.logger.error("Invalid entry for Life360 password - cannot be empty")
errorsDict = indigo.Dict()
errorsDict['life360_password'] = "Invalid entry for Life360 password - cannot be empty"
return (False, valuesDict, errorsDict)
auth_result = self.validate_api_auth(valuesDict['life360_username'], valuesDict['life360_password'], valuesDict['authorizationtoken'])
if (not auth_result):
self.logger.error("Life360 API Authentication failed - check your username and password")
errorsDict = indigo.Dict()
errorsDict['life360_password'] = "Life360 API Authentication failed - check your username and password"
return (False, valuesDict, errorsDict)
self.debug = valuesDict['showDebugInfo']
self.logger.debug("Debug set to: " + str(self.debug))
return (True, valuesDict)
def validate_api_auth(self, username, password, authorization_token):
api = life360(authorization_token=authorization_token, username=username, password=password)
try:
if api.authenticate():
self.logger.debug("Validation of API was successful")
return True
else:
self.logger.debug("Validation of API FAILED")
return False
except Exception as e:
self.logger.debug("Error authenticating: " + e.msg)
return False
def get_member_list(self, filter="", valuesDict=None, typeId="", targetId=0):
if (len(self.member_list) == 0):
self.create_member_list()
retList = list(self.member_list.keys())
return retList
def get_new_life360json(self):
api = life360(authorization_token=self.authorization_token, username=self.username, password=self.password)
if api.authenticate():
try:
self.logger.debug("Attepting to get list of circles")
circles = api.get_circles()
id = circles[0]['id']
circle = api.get_circle(id)
self.life360data = circle
self.create_member_list()
except Exception as e:
self.logger.error(e.message)
else:
self.logger.error("Error retrieving new Life360 JSON, Make sure you have the correct credentials in Plugin Config")
return
def create_member_list(self):
if len(self.life360data) == 0:
self.get_new_life360json()
self.member_list.clear()
for m in self.life360data['members']:
self.member_list[m['firstName']] = m['id']
return
def toggleDebugging(self):
if self.debug:
self.debug = False
self.logger.info(u"Turning off debug logging (Toggle Debugging menu item chosen).")
self.pluginPrefs['showDebugInfo'] = False
else:
self.debug = True
self.pluginPrefs['showDebugInfo'] = True
self.logger.debug(u"Turning on debug logging (Toggle Debugging menu item chosen).")
############################
# Action Method
#############################
def refresh_member_data(self,pluginAction, device):
self.get_new_life360json()
self.updatedevicestates(device)
return
def isDriving(self, speed_int):
if (round(speed_int) > 1):
return 1
else:
return 0
def mphSpeed(self, speed_int):
if speed_int < 2:
return str(speed_int)
else:
return str(round(2.2 * speed_int))
def updatedevicestates(self, device):
device_states = []
member_device = device.pluginProps['membername']
member_device_address = device.address
self.logger.debug("Updating device: " + member_device)
try:
geocoder = Nominatim(user_agent='life360')
except:
self.logger.error("Error instantiating geocoder object")
pass
if self.life360data['members']:
for m in self.life360data['members']:
if ((m['id'] == member_device_address) and (m['location'])):
x = datetime.datetime.now()
cur_date_time = x.strftime("%m/%d/%Y %I:%M %p")
# the raw speed from Life360 is exstimated to be MPH/2.2
adjustedSpeed = self.mphSpeed(float(m['location']['speed']))
# the raw Life360 isDriving boolean always comes back 0. Let's use speed to determine isDriving for Indigo
adjustedDriving = self.isDriving(float(adjustedSpeed))
device_states.append({'key': 'member_id','value': m['id'] })
device_states.append({'key': 'member_avatar','value': m['avatar'] })
device_states.append({'key': 'member_first_name','value': m['firstName'] })
device_states.append({'key': 'member_last_name','value': m['lastName'] })
device_states.append({'key': 'member_phone_num','value': m['loginPhone']})
device_states.append({'key': 'member_email','value': m['loginEmail']})
device_states.append({'key': 'last_api_update','value': str(cur_date_time)})
device_states.append({'key': 'member_360_location','value': m['location']['name']})
device_states.append({'key': 'member_battery','value': m['location']['battery']})
device_states.append({'key': 'batteryLevel','value': int(float(m['location']['battery']))})
device_states.append({'key': 'member_wifi','value': m['location']['wifiState']})
device_states.append({'key': 'member_battery_charging','value': m['location']['charge']})
device_states.append({'key': 'member_in_transit','value': m['location']['inTransit']})
device_states.append({'key': 'member_driveSDKStatus','value': m['location']['driveSDKStatus']})
device_states.append({'key': 'member_lat','value': float(m['location']['latitude'])})
device_states.append({'key': 'member_long','value': float(m['location']['longitude'])})
device_states.append({'key': 'member_is_driving','value': adjustedDriving })
device_states.append({'key': 'member_speed','value': adjustedSpeed })
try:
# get address from lat long information
loclat = float(m['location']['latitude'])
loclng = float(m['location']['longitude'])
geoloc = geocoder.reverse((loclat, loclng))
currentaddress = geoloc
except Exception as g:
self.logger.debug(u"Geocoder error")
currentaddress = "-geocoder error-"
device_states.append({'key': 'member_closest_address','value': str(currentaddress) })
if (m['location']['since']):
sincedate = datetime.datetime.fromtimestamp(m['location']['since'])
sincedatestr = sincedate.strftime("%m/%d/%Y %I:%M %p")
device_states.append({'key': 'member_location_since_datetime','value': sincedatestr})
else:
device_states.append({'key': 'member_location_since_datetime','value': ''})
if (m['location']['name'] == "Home"):
device.updateStateImageOnServer(indigo.kStateImageSel.MotionSensorTripped)
else:
device.updateStateImageOnServer(indigo.kStateImageSel.None)
device.updateStatesOnServer(device_states)
else:
pass
return
| 36.931624 | 174 | 0.66011 | 12,121 | 0.935046 | 0 | 0 | 0 | 0 | 0 | 0 | 5,274 | 0.40685 |
ddc3d50c63fd6d2bc041dad2539a4d22872461ff
| 699 |
py
|
Python
|
accounts/views.py
|
airmoor/learnweb
|
5867eadaca45b847f6fba8fd0be8a8ccdaeceea0
|
[
"MIT"
] | null | null | null |
accounts/views.py
|
airmoor/learnweb
|
5867eadaca45b847f6fba8fd0be8a8ccdaeceea0
|
[
"MIT"
] | null | null | null |
accounts/views.py
|
airmoor/learnweb
|
5867eadaca45b847f6fba8fd0be8a8ccdaeceea0
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.decorators import login_required
from django.urls import reverse_lazy
from django.views import generic, View
class SignUp(generic.CreateView):
form_class = UserCreationForm
success_url = reverse_lazy('login')
template_name = 'signup.html'
class UserAccountView(View):
template_name='account/user_account.html'
def get(self, request, *args, **kwargs):
user = self.kwargs.get('user')
current_user=request.user
context={
'current_user':current_user
}
return render(self.request, self.template_name, context)
| 27.96 | 64 | 0.726753 | 468 | 0.669528 | 0 | 0 | 0 | 0 | 0 | 0 | 67 | 0.095851 |
ddc4e2961bdd997e8ed912766a3c871b4f8b1cc7
| 3,306 |
py
|
Python
|
openmapi/globales.py
|
IgnacioPardo/mAPI-Provincias
|
812fc12bcc72d6aa28ab2e39af2d64d0aa68c86b
|
[
"MIT"
] | 4 |
2020-08-02T06:51:04.000Z
|
2022-03-22T21:31:44.000Z
|
openmapi/globales.py
|
Creativity-Hub/Open-mAPI
|
b7e0ee9acda424aec0e84513d8e968aa6ff5d7c5
|
[
"MIT"
] | null | null | null |
openmapi/globales.py
|
Creativity-Hub/Open-mAPI
|
b7e0ee9acda424aec0e84513d8e968aa6ff5d7c5
|
[
"MIT"
] | 1 |
2022-03-24T22:20:47.000Z
|
2022-03-24T22:20:47.000Z
|
import requests
from bs4 import BeautifulSoup
class Pais:
name = ''
casos = {}
def __init__(self, name, infectados=None, recuperados=None, fallecidos=None, activos = None):
self.name = name
self.casos['infectados'] = infectados
self.casos['recuperados'] = recuperados
self.casos['fallecidos'] = fallecidos
self.casos['activos'] = activos
#Updates case count for register
def update(self, registro, valor):
self.casos[registro] = valor
#Replaces all registers with new dict
def updateAll(self, new):
self.casos = new
#Returns register
def get(self, registro):
return self.casos[registro]
#Returns all registers
def getAll(self):
return self.casos
def __str__(self):
return self.name
def __repr__(self):
return self.name
def __iter__(self):
for registro in self.casos:
yield [registro, self.casos[registro]]
class Globales:
results = {}
registers_label = 'Empty'
def __init__(self):
self.results = {}
#Contabilizes countries.
def count(self):
self.registers_label = len(self.results.keys())+' registros.'
return len(self.results.keys())
def __str__(self):
return self.registers_label
def __repr__(self):
return self.registers_label
def __iter__(self):
for pais in self.results:
yield self.results[pais]
def load(self):
url = "https://en.wikipedia.org/wiki/COVID-19_pandemic_by_country_and_territory"
page = requests.get(url)
soup = BeautifulSoup(page.content, 'html.parser')
noise = soup.find('caption')
noise.decompose()
noise = soup.find_all('td', attrs={'style':'padding:0px 2px;'})
for td in noise:
td.decompose()
noise = soup.find_all('img')
for img in noise:
img.decompose()
noise = soup.find_all("tr", class_="sortbottom")
for tr in noise:
tr.decompose()
table = soup.find("table", class_="wikitable")
rows = table.find_all('tr')
cases, deaths, recov = [title.text.replace('\n', '').replace(',', '.') for title in rows[1].find_all('th')[1:6]][1:4]
active = int(cases.replace('.', '')) - (int(deaths.replace('.', ''))+int(recov.replace('.', '')))
self.results['world'] = Pais('world', cases, recov, deaths, active)
rows = rows[2:]
for row in rows:
country = row.find_all('th')[1].text.replace('\n', '')
if '[' in country:
country = country.split('[')[0]
res = [valor.text.replace('\n', '') for valor in row.find_all('td')[0:3]]
done = False
for i in range(len(res)):
if res[i] == 'No data':
self.results[country] = Pais(country, cases, recov, deaths, '-')
done = True
if ',' in res[i]:
res[i] = res[i].replace(',', '.')
if not done:
done = False
cases, deaths, recov = res
active = int(cases.replace('.', '')) - (int(deaths.replace('.', ''))+int(recov.replace('.', '')))
if active > 999:
active = '{:,}'.format(active).replace(',', '.')
self.results[country] = Pais(country, cases, recov, deaths, active)
self.count()
def getCountry(self, country):
self.load()
return self.results[country]
def getCountryInfo(self, country, info):
self.load()
return self.results[country].get(info)
def getCountryKeys(self):
self.load()
return list(self.results.keys())
def getInfoKeys(self):
return ['infectados', 'fallecidos', 'recuperados', 'activos']
| 23.614286 | 119 | 0.650333 | 3,257 | 0.985178 | 168 | 0.050817 | 0 | 0 | 0 | 0 | 533 | 0.161222 |
ddc738c6ed27c814c11c63a6fb453a793040af60
| 947 |
py
|
Python
|
openpype/hosts/tvpaint/plugins/publish/validate_start_frame.py
|
jonclothcat/OpenPype
|
d1208cbebc0a7f378de0062ccd653295c6399195
|
[
"MIT"
] | 1 |
2022-02-08T15:40:41.000Z
|
2022-02-08T15:40:41.000Z
|
openpype/hosts/tvpaint/plugins/publish/validate_start_frame.py
|
zafrs/OpenPype
|
4b8e7e1ed002fc55b31307efdea70b0feaed474f
|
[
"MIT"
] | 2 |
2022-03-18T01:46:03.000Z
|
2022-03-18T01:46:16.000Z
|
openpype/hosts/tvpaint/plugins/publish/validate_start_frame.py
|
zafrs/OpenPype
|
4b8e7e1ed002fc55b31307efdea70b0feaed474f
|
[
"MIT"
] | null | null | null |
import pyblish.api
from openpype.pipeline import PublishXmlValidationError
from openpype.hosts.tvpaint.api import lib
class RepairStartFrame(pyblish.api.Action):
"""Repair start frame."""
label = "Repair"
icon = "wrench"
on = "failed"
def process(self, context, plugin):
lib.execute_george("tv_startframe 0")
class ValidateStartFrame(pyblish.api.ContextPlugin):
"""Validate start frame being at frame 0."""
label = "Validate Start Frame"
order = pyblish.api.ValidatorOrder
hosts = ["tvpaint"]
actions = [RepairStartFrame]
optional = True
def process(self, context):
start_frame = lib.execute_george("tv_startframe")
if start_frame == 0:
return
raise PublishXmlValidationError(
self,
"Start frame has to be frame 0.",
formatting_data={
"current_start_frame": start_frame
}
)
| 24.921053 | 57 | 0.636748 | 823 | 0.86906 | 0 | 0 | 0 | 0 | 0 | 0 | 209 | 0.220697 |
ddc806857072eff9c83a07e28e06781742f5341f
| 1,304 |
py
|
Python
|
leds/rgbLed.py
|
JDRyder/stardust
|
a1b1de99e7d10c18f7243217a72aa5e7a3566e74
|
[
"MIT"
] | null | null | null |
leds/rgbLed.py
|
JDRyder/stardust
|
a1b1de99e7d10c18f7243217a72aa5e7a3566e74
|
[
"MIT"
] | null | null | null |
leds/rgbLed.py
|
JDRyder/stardust
|
a1b1de99e7d10c18f7243217a72aa5e7a3566e74
|
[
"MIT"
] | null | null | null |
import board
import neopixel
import time
pixels = neopixel.NeoPixel(board.D21, 1)
GREEN = (255, 0, 0) #
RED = (0,255,0) #
BLUE = (0,0,255) #
YELLOW = (255,255,0) #
CYAN = (255,0,255) #
VIOLET = (0,127,255) #
WHITE = (255,255,255) #
OFF = (0,0,0) #
def off():
pixels[0] = OFF
def startup():
pixels[0] = GREEN
time.sleep(1)
pixels[0] = RED
time.sleep(1)
pixels[0] = BLUE
time.sleep(1)
pixels[0] = YELLOW
time.sleep(1)
pixels[0] = CYAN
time.sleep(1)
pixels[0] = VIOLET
time.sleep(1)
pixels[0] = WHITE
time.sleep(1)
pixels[0] = OFF
def statusOk():
pixels[0] = GREEN
time.sleep(0.5)
pixels[0] = OFF
time.sleep(0.5)
pixels[0] = GREEN
time.sleep(0.5)
pixels[0] = OFF
time.sleep(0.5)
def bmpError():
pixels[0] = BLUE
time.sleep(1)
pixels[0] = OFF
time.sleep(1)
pixels[0] = BLUE
time.sleep(1)
pixels[0] = OFF
time.sleep(1)
def vemlError():
pixels[0] = YELLOW
time.sleep(1)
pixels[0] = OFF
time.sleep(1)
pixels[0] = YELLOW
time.sleep(1)
pixels[0] = OFF
time.sleep(1)
def samError():
pixels[0] = RED
time.sleep(1)
pixels[0] = OFF
time.sleep(1)
pixels[0] = RED
time.sleep(1)
pixels[0] = OFF
time.sleep(1)
def scd30Error():
pixels[0] = CYAN
time.sleep(1)
pixels[0] = OFF
time.sleep(1)
pixels[0] = CYAN
time.sleep(1)
pixels[0] = OFF
time.sleep(1)
| 15.162791 | 40 | 0.624233 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 | 0.006135 |
ddc862cde96df508b37a55b7bb12e12b0c12e813
| 3,548 |
py
|
Python
|
utils/Model_builder.py
|
Devwalkar/General_codebase
|
d52eee09248caa715d7e5e8b87f145d1989e278d
|
[
"MIT"
] | null | null | null |
utils/Model_builder.py
|
Devwalkar/General_codebase
|
d52eee09248caa715d7e5e8b87f145d1989e278d
|
[
"MIT"
] | null | null | null |
utils/Model_builder.py
|
Devwalkar/General_codebase
|
d52eee09248caa715d7e5e8b87f145d1989e278d
|
[
"MIT"
] | null | null | null |
import torch
import pretrainedmodels as PM
import torch.nn as nn
from .Mobilenet import MobileNetV2
device = 'cuda' if torch.cuda.is_available() else 'cpu'
def Model_builder(configer):
model_name = configer.model['name']
No_classes = configer.dataset_cfg["id_cfg"]["num_classes"]
model_pretrained = configer.model['pretrained']
model_dataparallel = configer.model["DataParallel"]
model_gpu_replica = configer.model["Multi_GPU_replica"]
gpu_ids = configer.train_cfg["gpu"]
if model_name == "Inceptionv3":
model = PM.inceptionv3(num_classes = 1000,pretrained=model_pretrained)
d = model.last_linear.in_features
model.last_linear = nn.Linear(d, No_classes)
elif model_name == "Xception":
model = PM.xception(num_classes = 1000,pretrained=model_pretrained)
d = model.last_linear.in_features
model.last_linear = nn.Linear(d, No_classes)
elif model_name == "VGG_19":
model = PM.vgg19(num_classes = 1000,pretrained=model_pretrained)
d = model.last_linear.in_features
model.last_linear = nn.Linear(d, No_classes)
elif model_name == "Resnet18":
model = PM.resnet18(num_classes = 1000,pretrained=model_pretrained)
d = model.last_linear.in_features
model.last_linear = nn.Linear(d, No_classes)
elif model_name == "Resnet50":
model = PM.resnet50(num_classes = 1000,pretrained=model_pretrained)
d = model.last_linear.in_features
model.last_linear = nn.Linear(d, No_classes)
elif model_name == "Resnet101":
model = PM.resnet101(num_classes = 1000,pretrained=model_pretrained)
d = model.last_linear.in_features
model.last_linear = nn.Linear(d, No_classes)
elif model_name == "Resnet152":
model = PM.resnet152(num_classes = 1000,pretrained=model_pretrained)
d = model.last_linear.in_features
model.last_linear = nn.Linear(d, No_classes)
elif model_name == "Resnet34":
model = PM.resnet34(num_classes = 1000,pretrained=model_pretrained)
d = model.last_linear.in_features
model.last_linear = nn.Linear(d, No_classes)
elif model_name == "Densenet121":
model = PM.densenet121(num_classes = 1000,pretrained=model_pretrained)
d = model.last_linear.in_features
model.last_linear = nn.Linear(d, No_classes)
elif model_name == "ResNeXt101-32":
model = PM.resnext101_32x4d(num_classes = 1000,pretrained=model_pretrained)
d = model.last_linear.in_features
model.last_linear = nn.Linear(d, No_classes)
elif model_name == "ResNeXt101-64":
model = PM.resnext101_64x4d(num_classes = 1000,pretrained=model_pretrained)
d = model.last_linear.in_features
model.last_linear = nn.Linear(d, No_classes)
elif model_name == "MobilenetV2":
model = MobileNetV2(n_class=No_classes)
else:
raise ImportError("Model Architecture not supported")
# Performing Data Parallelism if configured
if model_dataparallel:
model = torch.nn.DataParallel(model.to(device),device_ids =gpu_ids)
elif model_gpu_replica:
torch.distributed.init_process_group(backend='nccl',world_size=1,rank=1)
model = torch.nn.DistributedDataParallel(model.to(device),device_ids =gpu_ids)
else:
model = model.to(device)
print ('---------- Model Loaded')
return model
| 37.744681 | 94 | 0.669391 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 336 | 0.094701 |
ddc91781c017fdef90c8f25f225a0256fda47415
| 828 |
py
|
Python
|
examples/main.py
|
marcoaaguiar/erised
|
26a304afb2058f532b07ecde6c6fc85d8864696c
|
[
"MIT"
] | null | null | null |
examples/main.py
|
marcoaaguiar/erised
|
26a304afb2058f532b07ecde6c6fc85d8864696c
|
[
"MIT"
] | 3 |
2021-03-15T00:51:37.000Z
|
2021-03-15T01:01:30.000Z
|
examples/main.py
|
marcoaaguiar/erised
|
26a304afb2058f532b07ecde6c6fc85d8864696c
|
[
"MIT"
] | null | null | null |
from erised.proxy import Proxy
class Dog:
def bark(self, loud: bool):
sound = "woof-woof"
if loud:
return sound.upper()
return sound
class Person:
def __init__(self, dog: Dog = None):
self.dog = dog
if __name__ == "__main__":
person = Person()
person.dog = Dog()
proxy = Proxy(obj=person)
# call method remotely
call_future = proxy.dog.bark(loud=True)
print(call_future.result())
# set attributes into remote object, even if they didn't exist originally
proxy.dog.age = 3 # it generates a future that can't be retrieved
# get attributes from remote object
get_future = proxy.dog.age.retrieve()
print(get_future.result())
# if running multiprocessing mode (local=False), terminates child process
proxy.terminate()
| 23.657143 | 77 | 0.649758 | 218 | 0.263285 | 0 | 0 | 0 | 0 | 0 | 0 | 271 | 0.327295 |
ddcc7c8aaeb73f494f7fe3439f603884d9bf5226
| 480 |
py
|
Python
|
ontask/migrations/0056_auto_20190323_1122.py
|
pinheiroo27/ontask_b
|
23fee8caf4e1c5694a710a77f3004ca5d9effeac
|
[
"MIT"
] | 33 |
2017-12-02T04:09:24.000Z
|
2021-11-07T08:41:57.000Z
|
ontask/migrations/0056_auto_20190323_1122.py
|
pinheiroo27/ontask_b
|
23fee8caf4e1c5694a710a77f3004ca5d9effeac
|
[
"MIT"
] | 189 |
2017-11-16T04:06:29.000Z
|
2022-03-11T23:35:59.000Z
|
ontask/migrations/0056_auto_20190323_1122.py
|
pinheiroo27/ontask_b
|
23fee8caf4e1c5694a710a77f3004ca5d9effeac
|
[
"MIT"
] | 30 |
2017-11-30T03:35:44.000Z
|
2022-01-31T03:08:08.000Z
|
# Generated by Django 2.1.7 on 2019-03-23 00:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ontask', '0055_action_nrows_all_false'),
]
operations = [
migrations.AlterField(
model_name='action',
name='nrows_all_false',
field=models.IntegerField(blank=True, default=None, null=True, verbose_name='Number of rows with all conditions false'),
),
]
| 25.263158 | 132 | 0.641667 | 387 | 0.80625 | 0 | 0 | 0 | 0 | 0 | 0 | 151 | 0.314583 |
ddcde07c3cbd2e093fb249312865d2348a9e3b73
| 6,863 |
py
|
Python
|
proteus/MeshAdaptPUMI/Checkpoint.py
|
acatwithacomputer/proteus
|
80dfad95da6ab4d18a88a035f55c26b03540a864
|
[
"MIT"
] | null | null | null |
proteus/MeshAdaptPUMI/Checkpoint.py
|
acatwithacomputer/proteus
|
80dfad95da6ab4d18a88a035f55c26b03540a864
|
[
"MIT"
] | 13 |
2018-02-08T23:22:59.000Z
|
2020-12-06T19:40:32.000Z
|
proteus/MeshAdaptPUMI/Checkpoint.py
|
acatwithacomputer/proteus
|
80dfad95da6ab4d18a88a035f55c26b03540a864
|
[
"MIT"
] | 1 |
2020-02-17T03:25:34.000Z
|
2020-02-17T03:25:34.000Z
|
from __future__ import division
from builtins import str
from builtins import range
import proteus
import sys
import numpy
from proteus import Profiling
#it should probably be associated with the PUMI domain somehow
#The current implementation assumes we're using NS, VOF, LS, RD, MCorr setup with lagging and Backwards Euler.
#Future work on this module should include creating an abstract class from which variations based on the models and numerical accuracy can be created
#Have the dictionary submodels be labeled by physical model names like "twp_navier_stokes"
class Checkpointer:
"This class is meant to handle the checkpointing process for adapted meshes. Information that's needed to be loaded into hotstart needs to be output and then read in to be handled for data reconstruction"
def __init__(self,NSobject,frequency=10):
self.NSobject = NSobject
self.counter = 0
self.frequency = frequency
def checkpoint(self,hotStartTime):
self.transferInfo()
self.saveMesh()
modelListOld=self.EncodeModel(hotStartTime)
#pickling is apparently unsafe so we use json to try storing modelListOld
filename = "checkpointInfo"+str(self.counter)
f = open(filename, 'w')
import json
#json.dump(modelListOld.__dict__,f)
json.dump(modelListOld,f)
f.close()
def transferInfo(self):
self.NSobject.PUMI_transferFields()
def saveMesh(self):
fileName="checkpoint"+str(self.counter)+"_.smb"
self.NSobject.pList[0].domain.PUMIMesh.writeMesh(fileName)
def EncodeModel(self,hotStartTime):
"Grab only necessary components from modelListOld so far consistent only with first-order time integrator"
#def __init__(self,modelListOld,hotStartTime):
modelListOld = self.NSobject.modelListOld
saveModel = {}
saveModel["tCount"] = self.NSobject.tCount+1 #+1 just because of how indexing works in h5 file
saveModel["counter"] = self.counter
saveModel["numModels"] = len(modelListOld)
saveModel["hotStartTime"] = hotStartTime
saveModel["nAdapt"] = self.NSobject.pList[0].domain.PUMIMesh.nAdapt()
saveModel["checkpoint_status"] = ""
if(hasattr(self.NSobject,"tn") and (self.NSobject.systemStepController.t_system_last < self.NSobject.tn)):
saveModel["checkpoint_status"] = "midsystem"
saveModel["tCount"] = self.NSobject.tCount+2 #don't know how to justify this yet but it's what is needed
else:
saveModel["checkpoint_status"] = "endsystem"
saveModel["systemStepController"]=[]
controllerAttribute={}
controllerAttribute["dt_system"]=self.NSobject.systemStepController.dt_system
controllerAttribute["dt_system_fixed"]=self.NSobject.systemStepController.dt_system_fixed
controllerAttribute["t_system_last"]=self.NSobject.systemStepController.t_system_last
controllerAttribute["t_system"]=self.NSobject.systemStepController.t_system
saveModel["systemStepController"].append(controllerAttribute)
saveModel["stepController"]=[]
saveModel["timeIntegration"]=[]
saveModel["shockCapturing"]=[]
saveModel["stabilization"]=[]
for i in range(0,len(modelListOld)):
#step controller
subModel={}
subModel["dt_model"]= modelListOld[i].stepController.dt_model
subModel["t_model"] = modelListOld[i].stepController.t_model
subModel["t_model_last"] = modelListOld[i].stepController.t_model_last
subModel["substeps"]=modelListOld[i].stepController.substeps
saveModel["stepController"].append(subModel)
#time integration
subModel={}
subModel["dt"] = modelListOld[i].levelModelList[0].timeIntegration.dt
subModel["t"] = modelListOld[i].levelModelList[0].timeIntegration.t
if(hasattr(modelListOld[i].levelModelList[0].timeIntegration,'dtLast')):
subModel["dtLast"] = modelListOld[i].levelModelList[0].timeIntegration.dtLast
else:
subModel["dtLast"] = None
saveModel["timeIntegration"].append(subModel)
#shock capturing
subModel={}
if(modelListOld[i].levelModelList[0].shockCapturing is not None):
subModel["nSteps"]=modelListOld[i].levelModelList[0].shockCapturing.nSteps
subModel["nStepsToDelay"]= modelListOld[i].levelModelList[0].shockCapturing.nStepsToDelay
saveModel["shockCapturing"].append(subModel)
#Assuming the 0th model is RANS2P
#stabilization
subModel={}
subModel["nSteps"]= modelListOld[0].levelModelList[0].stabilization.nSteps
saveModel["stabilization"].append(subModel)
return saveModel
def DecodeModel(self,filename):
"create a modelListOld that can interact with the post-adapt restart capabilities"
f = open(filename, 'r')
import json
previousInfo = json.load(f)
f.close()
systemStepController = previousInfo["systemStepController"][0]
self.NSobject.systemStepController.dt_system = systemStepController["dt_system"]
self.NSobject.systemStepController.dt_system_fixed = systemStepController["dt_system_fixed"]
self.NSobject.systemStepController.t_system_last = systemStepController["t_system_last"]
self.NSobject.systemStepController.t_system = systemStepController["t_system"]
numModels = previousInfo["numModels"]
stepController=previousInfo["stepController"]
timeIntegration=previousInfo["timeIntegration"]
shockCapturing=previousInfo["shockCapturing"]
stabilization=previousInfo["stabilization"]
self.counter = previousInfo["counter"]+1
for i in range(0,numModels):
self.NSobject.modelList[i].stepController.dt_model = stepController[i]["dt_model"]
self.NSobject.modelList[i].stepController.t_model = stepController[i]["t_model"]
self.NSobject.modelList[i].stepController.t_model_last = stepController[i]["t_model_last"]
self.NSobject.modelList[i].stepController.substeps = stepController[i]["substeps"]
self.NSobject.modelList[i].levelModelList[0].timeIntegration.dt = timeIntegration[i]["dt"]
self.NSobject.modelList[i].levelModelList[0].timeIntegration.t = timeIntegration[i]["t"]
self.NSobject.modelList[i].levelModelList[0].timeIntegration.dtLast = timeIntegration[i]["dtLast"]
if(self.NSobject.modelList[i].levelModelList[0].shockCapturing is not None):
self.NSobject.modelList[i].levelModelList[0].shockCapturing.nSteps = shockCapturing[i]["nSteps"]
self.NSobject.modelList[i].levelModelList[0].shockCapturing.nStepsToDelay = shockCapturing[i]["nStepsToDelay"]
self.NSobject.modelList[0].levelModelList[0].stabilization.nSteps = stabilization[0]["nSteps"]
self.NSobject.pList[0].domain.PUMIMesh.set_nAdapt(previousInfo["nAdapt"])
| 48.673759 | 208 | 0.722862 | 6,292 | 0.9168 | 0 | 0 | 0 | 0 | 0 | 0 | 1,931 | 0.281364 |
ddd0869ac7a679df101a618ba69ece08f889a431
| 596 |
py
|
Python
|
rlunch/lunch/migrations/0002_talk_how_was_it.py
|
acdh-oeaw/django-generic-apps
|
4af63a8a95826ede787347fc86951e933ccdbc6c
|
[
"MIT"
] | null | null | null |
rlunch/lunch/migrations/0002_talk_how_was_it.py
|
acdh-oeaw/django-generic-apps
|
4af63a8a95826ede787347fc86951e933ccdbc6c
|
[
"MIT"
] | null | null | null |
rlunch/lunch/migrations/0002_talk_how_was_it.py
|
acdh-oeaw/django-generic-apps
|
4af63a8a95826ede787347fc86951e933ccdbc6c
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2017-02-02 14:22
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('vocabs', '0002_auto_20170202_1030'),
('lunch', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='talk',
name='how_was_it',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='vocabs.SkosConcept'),
),
]
| 25.913043 | 129 | 0.644295 | 406 | 0.681208 | 0 | 0 | 0 | 0 | 0 | 0 | 162 | 0.271812 |
ddd0b7f89eb5fdc6f55d6efae895022ea00e5fd2
| 2,634 |
py
|
Python
|
ream/decode.py
|
chmlee/ream-python
|
13f46596f59bb411308d1c9070b8d6f8a0afeb31
|
[
"MIT"
] | null | null | null |
ream/decode.py
|
chmlee/ream-python
|
13f46596f59bb411308d1c9070b8d6f8a0afeb31
|
[
"MIT"
] | null | null | null |
ream/decode.py
|
chmlee/ream-python
|
13f46596f59bb411308d1c9070b8d6f8a0afeb31
|
[
"MIT"
] | null | null | null |
"""
REAM: REAM Ain't Markdown
~~~~~~~~~~~~~~~~~~~~~~~~~
This file is part of the ream package
:copyright: Copyright 2020 by Chih-Ming Louis Lee
:license: MIT, see LICENSE for details
"""
import sys
import os
import re
import json
import pandas as pd
from ream.transformer import Ream2Dict
from ream.grammar import REAM_RULE
def ream2dict(input_raw, output_file=None, debug=False, no_comment=False):
"""ream to json"""
if no_comment:
Ream2Dict.no_comment = True
else:
Ream2Dict.no_comment = False
input_tree = REAM_RULE.parse(input_raw)
output_raw = Ream2Dict().transform(input_tree)
if debug:
print(input_tree)
print("====================")
print(input_tree.pretty())
print("====================")
print(output_raw)
print("====================")
if output_file is None:
return output_raw
else:
with open(output_file, 'w') as file:
json.dump(output_raw, file)
print(json.dumps(output_raw, indent=4))
return None
def ream2list(input_raw):
data = ream2dict(input_raw, no_comment=True)
def flatten(d):
parent = []
children = []
for value in d.values():
if type(value) == list:
for subentry in value:
new = flatten(subentry)
if type(new[0]) == list:
for subsub in new:
children.append(subsub)
else:
children.append(new)
else:
parent.append(value)
if children:
result = [ parent + child for child in children ]
else:
result = parent
return result
return(flatten(data))
def ream2csv(input_raw, output_file):
list_raw = ream2list(input_raw)
with open(output_file, 'w') as file:
colname = ",".join([str(x) for x in range(len(list_raw[0]))])
file.write(colname)
file.write('\n')
for entry in list_raw:
file.write(",".join(entry))
file.write('\n')
def ream2df(data):
return pd.DataFrame(ream2list(data))
def main(input_raw, output_file, debug, no_comment):
"""
main function for decoding ream file
"""
output_ext = output_file.split('.')[-1]
# choose conversion function
if output_ext in ['json']:
ream2dict(input_raw, output_file, debug, no_comment)
elif output_ext in ['csv']:
ream2csv(input_raw, output_file)
else:
print("Output file formet not supported")
print("Complete")
| 24.849057 | 74 | 0.566059 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 431 | 0.163629 |
ddd0baa5f55beee804fd811c66a9f9297112106b
| 444 |
py
|
Python
|
snippets/3DEM/useful_bits/scratch_hdf5_2_nii.py
|
michielkleinnijenhuis/EM
|
f46a9b11298919b359e80d9f23a7e824df1356cb
|
[
"Apache-2.0"
] | null | null | null |
snippets/3DEM/useful_bits/scratch_hdf5_2_nii.py
|
michielkleinnijenhuis/EM
|
f46a9b11298919b359e80d9f23a7e824df1356cb
|
[
"Apache-2.0"
] | null | null | null |
snippets/3DEM/useful_bits/scratch_hdf5_2_nii.py
|
michielkleinnijenhuis/EM
|
f46a9b11298919b359e80d9f23a7e824df1356cb
|
[
"Apache-2.0"
] | null | null | null |
### get all the blocked raw datafiles from ARC and convert to nifti's ###
#rsync -avz [email protected]:/data/ndcn-fmrib-water-brain/ndcn0180/EM/M3/M3_S1_GNU/testblock/m000_?????-?????_?????-?????_?????-?????.h5 /Users/michielk/oxdata/P01/EM/M3/M3_S1_GNU/
for f in `ls m000_?????-?????_?????-?????_?????-?????.h5`; do
python $scriptdir/convert/EM_stack2stack.py ${f} ${f/.h5/.nii.gz} -i 'zyx' -l 'xyz' -e -0.0073 -0.0073 0.05 -u
done
| 74 | 191 | 0.614865 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 322 | 0.725225 |
ddd0c824e4d2d1eee0fd05d787911e01b80ce07a
| 53 |
py
|
Python
|
Week1/sumofdigits_teacher.py
|
CrazyDi/Python1
|
016dd77ace04fccfec61edf37ec5a990ead6c4ef
|
[
"Unlicense"
] | null | null | null |
Week1/sumofdigits_teacher.py
|
CrazyDi/Python1
|
016dd77ace04fccfec61edf37ec5a990ead6c4ef
|
[
"Unlicense"
] | null | null | null |
Week1/sumofdigits_teacher.py
|
CrazyDi/Python1
|
016dd77ace04fccfec61edf37ec5a990ead6c4ef
|
[
"Unlicense"
] | null | null | null |
import sys
print(sum([int(x) for x in sys.argv[1]]))
| 17.666667 | 41 | 0.660377 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
ddd11496dad00035846edb605bc410d5d8e9ecb5
| 1,549 |
py
|
Python
|
cyborg/accelerator/drivers/nic/base.py
|
NeCTAR-RC/cyborg
|
e0fab29181467c0c72667ea26a8b04c53b238ddb
|
[
"Apache-2.0"
] | 37 |
2017-03-23T02:10:35.000Z
|
2021-11-25T07:57:36.000Z
|
cyborg/accelerator/drivers/nic/base.py
|
openstack/nomad
|
8cd846a16c2da04c2f204b02b90db814e32acd63
|
[
"Apache-2.0"
] | null | null | null |
cyborg/accelerator/drivers/nic/base.py
|
openstack/nomad
|
8cd846a16c2da04c2f204b02b90db814e32acd63
|
[
"Apache-2.0"
] | 27 |
2017-07-14T02:26:24.000Z
|
2022-01-19T07:55:10.000Z
|
# Copyright 2020 Intel, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Cyborg NIC driver implementation.
"""
VENDOR_MAPS = {"0x8086": "intel"}
class NICDriver(object):
"""Base class for Nic drivers.
This is just a virtual NIC drivers interface.
Vendor should implement their specific drivers.
"""
@classmethod
def create(cls, vendor, *args, **kwargs):
for sclass in cls.__subclasses__():
vendor_name = VENDOR_MAPS.get(vendor, vendor)
if vendor_name == sclass.VENDOR:
return sclass(*args, **kwargs)
raise LookupError("Not find the NIC driver for vendor %s" % vendor)
def discover(self):
"""Discover NIC information of current vendor(Identified by class).
:return: List of NIC information dict.
"""
raise NotImplementedError()
@classmethod
def discover_vendors(cls):
"""Discover NIC vendors of current node.
:return: NIC vendor ID list.
"""
raise NotImplementedError()
| 29.788462 | 75 | 0.672692 | 891 | 0.57521 | 0 | 0 | 506 | 0.326662 | 0 | 0 | 1,022 | 0.659781 |
ddd29b6ed93109a393ac788b23f262d228d22a8b
| 621 |
py
|
Python
|
reviews/migrations/0016_auto_20180908_1733.py
|
UrbanBogger/horrorexplosion
|
3698e00a6899a5e8b224cd3d1259c3deb3a2ca80
|
[
"MIT"
] | null | null | null |
reviews/migrations/0016_auto_20180908_1733.py
|
UrbanBogger/horrorexplosion
|
3698e00a6899a5e8b224cd3d1259c3deb3a2ca80
|
[
"MIT"
] | 4 |
2020-06-05T18:21:18.000Z
|
2021-06-10T20:17:31.000Z
|
reviews/migrations/0016_auto_20180908_1733.py
|
UrbanBogger/horrorexplosion
|
3698e00a6899a5e8b224cd3d1259c3deb3a2ca80
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-09-08 16:33
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('reviews', '0015_auto_20180908_1626'),
]
operations = [
migrations.RemoveField(
model_name='movie',
name='franchise_association',
),
migrations.RemoveField(
model_name='movie',
name='is_a_remake',
),
migrations.RemoveField(
model_name='movie',
name='is_a_sequel',
),
]
| 22.178571 | 48 | 0.574879 | 471 | 0.758454 | 0 | 0 | 0 | 0 | 0 | 0 | 175 | 0.281804 |
ddd324b790a9312a2fcc8cac11385ac3c12a277d
| 2,796 |
py
|
Python
|
src/markup.py
|
alex-panda/PDFCompiler
|
3454ee01a6e5ebb2d2bccdcdc32678bf1def895d
|
[
"MIT"
] | null | null | null |
src/markup.py
|
alex-panda/PDFCompiler
|
3454ee01a6e5ebb2d2bccdcdc32678bf1def895d
|
[
"MIT"
] | null | null | null |
src/markup.py
|
alex-panda/PDFCompiler
|
3454ee01a6e5ebb2d2bccdcdc32678bf1def895d
|
[
"MIT"
] | null | null | null |
from constants import ALIGNMENT, STRIKE_THROUGH, UNDERLINE
def assert_bool(val):
assert isinstance(val, (bool, None)), f'Can only be True, False, or None. {val} was given instead.'
class Markup:
"""
A Markup for a range of MarkedUpText.
"""
def __init__(self):
from placer.templates import TextInfo
self._text_info = TextInfo()
self._paragraph_break = None # applied at MarkupStart
self._second_pass_python = []
def set_paragraph_break(self, boolean):
assert_bool(boolean)
self._paragraph_break = boolean
def paragraph_break(self):
return self._paragraph_break
def add_callback(self, function):
"""
Functions that you want to be called when this Markup is reached.
Nothing is passed to the Function.
"""
self._callbacks.append(function)
def markup_start_and_end(self):
"""
Returns starting and ending markup objects for this markup.
"""
me = MarkupEnd(self)
ms = MarkupStart(self, me)
return ms, me
def copy(self):
m = Markup()
m._text_info = self._text_info.copy()
m.paragraph_break = self._paragraph_break
return m
# --------------------------------
# Methods for accessing fields
# Enum Fields
def text_info(self):
return self._text_info
def set_text_info(self, text_info):
from placer.templates import TextInfo
assert isinstance(text_info, TextInfo), f'Text info must be of type TextInfo, not {text_info}.'
self._text_info = text_info
# Other Fields
def python(self):
return self._second_pass_python
def add_python(self, python_token):
self._second_pass_python.append(python_token)
def callbacks(self):
return self._callbacks
def add_callback(self, callback_function):
self._callbacks.append(callback_function)
class MarkupStart:
__slots__ = ['markup', 'markup_end']
def __init__(self, markup, markup_end=None):
self.markup = markup # A pointer to the Markup object
self.markup_end = markup_end # a pointer to the EndMarkup that ends this markup or None if there is no End
def copy(self):
return MarkupStart(self.markup, self.markup_end)
class MarkupEnd:
__slots__ = ['markup', 'undo_dict']
def __init__(self, markup, undo_dict=None):
self.markup = markup # A pointer to the Markup object
# A dictionary containing all the attributes that were changed and what
# they attributes were changed from when the current document TextInfo
# was changed.
self.undo_dict = undo_dict
def copy(self):
return MarkupEnd(self.markup, self.undo_dict.copy())
| 29.125 | 114 | 0.65093 | 2,601 | 0.930258 | 0 | 0 | 0 | 0 | 0 | 0 | 842 | 0.301144 |
ddd3332668c74ceeb6666a897a79187f953f120f
| 10,476 |
py
|
Python
|
svirl/vars/params.py
|
microsoft/svirl
|
8d0da6a03ad20315a690a3c65bb8b60c196c3f3d
|
[
"MIT"
] | 6 |
2020-12-21T20:11:13.000Z
|
2022-03-21T07:55:33.000Z
|
svirl/vars/params.py
|
ivan-sadovsky/svirl
|
523abe9fcf2a5e9d192782d7aeb7093c86ef4036
|
[
"MIT"
] | 4 |
2021-07-15T20:12:55.000Z
|
2021-08-07T22:11:18.000Z
|
svirl/vars/params.py
|
ivan-sadovsky/svirl
|
523abe9fcf2a5e9d192782d7aeb7093c86ef4036
|
[
"MIT"
] | 9 |
2020-12-22T06:06:16.000Z
|
2022-03-25T17:26:55.000Z
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import numpy as np
import svirl.config as cfg
from svirl.storage import GArray
from . import FixedVortices
class Params(object):
"""This class contains setters and getters for parameters"""
def __init__(self, mesh, vars):
self.mesh = mesh
self.vars = vars
self.fixed_vortices = FixedVortices(self.mesh, self.vars)
self.solveA = False
self.linear_coefficient = cfg.linear_coefficient # epsilon
self.gl_parameter = cfg.gl_parameter # kappa
self.normal_conductivity = cfg.normal_conductivity # sigma
# homogeneous external magnetic field
self._H = cfg.dtype(0.0)
self.homogeneous_external_field_reset = cfg.homogeneous_external_field
# x- and y- components of external vector potential for non-homogeneous external magnetic field
self.ae, self.be = None, None
# external and irregular vector potential
# it should be kept self._vpei = (self.ae, self.be) + (ai, bi)
self._vpei = None
# non-homogeneous external magnetic field
self.external_field = cfg.external_field
self.order_parameter_Langevin_coefficient = cfg.order_parameter_Langevin_coefficient
self.vector_potential_Langevin_coefficient = cfg.vector_potential_Langevin_coefficient
def __del__(self):
pass
@property
def linear_coefficient(self):
""" Sets/gets epsilon (linear coefficient)"""
if self._epsilon.size == 1:
return np.full((cfg.Nx, cfg.Ny), self._epsilon.get_h(), dtype = cfg.dtype)
else:
return self._epsilon.get_h()
@linear_coefficient.setter
def linear_coefficient(self, linear_coefficient):
if callable(linear_coefficient):
xg, yg = mesh.xy_grid
lc = linear_coefficient(xg, yg)
else:
lc = linear_coefficient
if np.isscalar(lc):
lc = lc*np.ones(1)
else:
assert lc.shape == (cfg.Nx, cfg.Ny)
self._epsilon = GArray(like = lc.astype(cfg.dtype))
def linear_coefficient_h(self):
if self._epsilon.size != 1:
return self._epsilon.get_d_obj()
return np.uintp(0)
def linear_coefficient_scalar_h(self):
if self._epsilon.size == 1:
return self._epsilon.get_h()
return cfg.dtype(0.0)
@property
def gl_parameter(self):
""" Sets/gets GL parameter"""
return self._kappa
@gl_parameter.setter
def gl_parameter(self, gl_parameter):
if gl_parameter is None or np.isnan(gl_parameter) or np.isinf(gl_parameter): gl_parameter = np.inf
assert isinstance(gl_parameter, (np.floating, float, np.integer, int)) and (np.isposinf(gl_parameter) or gl_parameter > 0.0)
self._kappa = cfg.dtype(gl_parameter)
self.solveA = np.bool(not np.isposinf(self._kappa))
def gl_parameter_squared_h(self):
if self.solveA:
return cfg.dtype(self.gl_parameter**2)
return cfg.dtype(-1.0)
@property
def normal_conductivity(self):
""" Sets/gets normal conductivity"""
return self._sigma
@normal_conductivity.setter
def normal_conductivity(self, normal_conductivity):
assert isinstance(normal_conductivity, (np.floating, float, np.integer, int)) and normal_conductivity > 0.0
self._sigma = cfg.dtype(normal_conductivity)
self._rho = cfg.dtype(1.0/normal_conductivity)
@property
def homogeneous_external_field(self):
"""
Sets/gets homogeneous external field and
does not update vector potential.
"""
return self._H
@homogeneous_external_field.setter
def homogeneous_external_field(self, homogeneous_external_field):
self._H = cfg.dtype(homogeneous_external_field)
def _update_vector_potential(self, homogeneous_external_field, reset):
assert isinstance(homogeneous_external_field, (np.floating, float, np.integer, int))
if reset:
self._H = cfg.dtype(homogeneous_external_field)
# TODO: need a fill method in GArray
# self.a.fill(0.0)
# self.b.fill(0.0)
a, b = self.vars._vp.get_vec_h()
a.fill(0.0)
b.fill(0.0)
self.vars._vp.need_htod_sync()
self.vars._vp.sync()
delta_H = self._H
else:
delta_H = - self._H
self._H = cfg.dtype(homogeneous_external_field)
delta_H += self._H
self.vars._vp.sync()
# TODO: implement GPU version of ab initialization
# Possible set of gauges, A = [g*y*H, (1-g)*x*H, 0] with any g, 0 <= g <= 1
g = 0.5
_, yg = self.mesh.xy_a_grid
xg, _ = self.mesh.xy_b_grid
a, b = self.vars._vp.get_vec_h()
a -= g * (yg - 0.5*cfg.Ly) * delta_H
b += (1.0 - g) * (xg - 0.5*cfg.Lx) * delta_H
self.vars._vp.need_htod_sync()
self.vars._vp.sync()
def _homogeneous_external_field_delta(self, homogeneous_external_field):
self._update_vector_potential(homogeneous_external_field, reset=False)
homogeneous_external_field_delta = property(
fset = _homogeneous_external_field_delta,
doc = """Sets homogeneous external field, H, and adds to the vector
potential deltaA, satisfying curl(deltaA) = deltaH, where
deltaH = H - Hold and Hold is homogeneous external field
before update.""")
def _homogeneous_external_field_reset(self, homogeneous_external_field):
self._update_vector_potential(homogeneous_external_field, reset=True)
homogeneous_external_field_reset = property(
fset = _homogeneous_external_field_reset,
doc = """Sets homogeneous external field, H, and sets vector
potential, A, satisfying curl(A) = H.""")
def _update_gvpei(self):
"""Sets self.gvpei = (self.ae, self.be) + (ai, bi).
To be executed in self.external_vector_potential and self.fixed_vortices setters."""
assert (self.ae is None) == (self.be is None)
ai, bi = None, None
if self.fixed_vortices is not None and self.fixed_vortices._vpi is not None:
ai, bi = self.fixed_vortices._vpi.get_vec_h()
assert (ai is None) == (bi is None)
vpei = None
if self.ae is not None:
if ai is not None:
vpei = (self.ae + ai, self.be + bi)
else:
vpei = (self.ae, self.be)
else:
vpei = (ai, bi)
if self._vpei is not None and vpei is None:
self._vpei.free()
self._vpei = None
else:
#TODO: easier if GArray supports like for vector storage
shapes = [vpei[0].shape, vpei[1].shape]
self._vpei = GArray(shape = shapes, dtype = cfg.dtype)
self._vpei.set_vec_h(vpei[0], vpei[1])
self._vpei.sync()
@property
def external_vector_potential(self):
"""Sets/gets external vector potential."""
assert (self.ae is None) == (self.be is None)
if self.ae is not None:
return self.ae, self.be
return None
@external_vector_potential.setter
def external_vector_potential(self, external_vector_potential):
if external_vector_potential is not None:
Ax, Ay = external_vector_potential
assert (Ax is None) == (Ay is None)
else:
Ax = None
if Ax is not None:
assert Ax.shape == (cfg.Nxa, cfg.Nya)
assert Ay.shape == (cfg.Nxb, cfg.Nyb)
self.ae = Ax
self.be = Ay
else:
self.ae, self.be = None, None
self._update_gvpei()
@property
def external_irregular_vector_potential(self):
""" Sets/gets external irregular vector potential"""
if self._vpei is not None:
return self._vpei.get_vec_h()
return None
def external_irregular_vector_potential_h(self):
if self._vpei is not None:
return self._vpei.get_d_obj()
return np.uintp(0)
@property
def external_field(self):
"""
Sets/gets external (non-homogeneous) magnetic field.
Setter accepts only a number now.
"""
# TODO: return curl(A) for non-homogeneous external_field
A = self.external_vector_potential
if A is not None:
Ax, Ay = A
# TODO: check expression below
return (- np.diff(Ax, axis=1) * cfg.idy
+ np.diff(Ay, axis=0) * cfg.idx)
else:
return None
@external_field.setter
def external_field(self, external_field):
if external_field is not None:
# NOTE: placeholder, accepts only a number now
# TODO: solve equation curl(Aext) = Hext(r) for nonuniform field Hext(r)
# Possible set of gauges, A = [g*y*H, (1-g)*x*H, 0] with any g, 0 <= g <= 1
g = 0.5
_, yg = self.mesh.xy_a_grid
xg, _ = self.mesh.xy_b_grid
Ax = - g * (yg - 0.5*cfg.Ly) * external_field
Ay = (1.0 - g) * (xg - 0.5*cfg.Lx) * external_field
self.external_vector_potential = (Ax, Ay)
else:
self.external_vector_potential = None
@property
def order_parameter_Langevin_coefficient(self):
return self._psi_langevin_c
@order_parameter_Langevin_coefficient.setter
def order_parameter_Langevin_coefficient(self, order_parameter_Langevin_coefficient):
assert isinstance(order_parameter_Langevin_coefficient, (np.floating, float, np.integer, int))
self._psi_langevin_c = cfg.dtype(order_parameter_Langevin_coefficient)
@property
def vector_potential_Langevin_coefficient(self):
return self._ab_langevin_c
@vector_potential_Langevin_coefficient.setter
def vector_potential_Langevin_coefficient(self, vector_potential_Langevin_coefficient):
assert isinstance(vector_potential_Langevin_coefficient, (np.floating, float, np.integer, int))
self._ab_langevin_c = cfg.dtype(vector_potential_Langevin_coefficient)
| 31.939024 | 132 | 0.616648 | 10,289 | 0.98215 | 0 | 0 | 5,097 | 0.486541 | 0 | 0 | 1,885 | 0.179935 |
ddd3cf7e6c6e22a81fc4f44dcb742ce19a9d4e7a
| 1,570 |
py
|
Python
|
src/2_save_images.py
|
Irio/photoshopped-or-not
|
70ae1a2e7e54003d916b501f8d9e020c13ca6c98
|
[
"MIT"
] | 77 |
2016-07-13T13:36:55.000Z
|
2022-02-25T07:49:38.000Z
|
src/2_save_images.py
|
goldservice2017/FakeImageDetection
|
e7f618989d004e24444854df63d9f1c408d0463f
|
[
"MIT"
] | 1 |
2017-07-11T10:28:36.000Z
|
2017-07-11T10:28:36.000Z
|
src/2_save_images.py
|
goldservice2017/FakeImageDetection
|
e7f618989d004e24444854df63d9f1c408d0463f
|
[
"MIT"
] | 20 |
2016-10-23T14:57:19.000Z
|
2022-03-21T13:32:45.000Z
|
from hashlib import sha256
from helpers import load_dataset
import numpy as np
import os
import pandas as pd
import requests
import sys
import time
import urllib.request
CSV_PATH = sys.argv[1]
URL_COLUMN = sys.argv[2]
PATH = sys.argv[3]
def download_image(url, file_path):
try:
if 'imgur.com' in url:
final_url = requests.get(url).url
if ('//'.join(final_url.split('//')[1:])) == 'i.imgur.com/removed.png':
raise IOError('HTTP Error 404: Not Found')
urllib.request.urlretrieve(url, file_path)
print('+ %s' % url)
except IOError as e:
print('%s - %s' % (url, e), file=sys.stderr)
def url_to_file_name(url):
if url:
file_name = sha256(url.encode('utf-8')).hexdigest()
extension = url.split('.')[-1]
if len(extension) > 4:
return file_name
else:
return '%s.%s' % (file_name, extension)
if not os.path.exists(PATH):
os.mkdir(PATH)
dataset = load_dataset(CSV_PATH)
dataset[URL_COLUMN] = dataset[URL_COLUMN].astype(np.str).replace({'nan': None})
dataset['file_names'] = dataset[URL_COLUMN].map(url_to_file_name)
already_downloaded = dataset['file_names'].isin(os.listdir(PATH))
without_url = dataset[URL_COLUMN].isnull()
remaining_images = dataset[~(already_downloaded | without_url)]
print('Remaining: %i' % len(remaining_images))
for index, values in remaining_images.iterrows():
url = dict(values)[URL_COLUMN]
file_path = '%s/%s' % (PATH, url_to_file_name(url))
time.sleep(1)
download_image(url, file_path)
| 30.784314 | 83 | 0.66051 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 154 | 0.098089 |
ddd71b2e4c6346f49e71518346e30e3f595d4613
| 1,169 |
py
|
Python
|
lf3py/task/data.py
|
rog-works/lf3py
|
e89937f7aa133ed54d85764f06101ab9abf6b960
|
[
"CNRI-Python"
] | null | null | null |
lf3py/task/data.py
|
rog-works/lf3py
|
e89937f7aa133ed54d85764f06101ab9abf6b960
|
[
"CNRI-Python"
] | 48 |
2020-12-19T13:47:26.000Z
|
2021-01-07T22:27:56.000Z
|
lf3py/task/data.py
|
rog-works/lf3py
|
e89937f7aa133ed54d85764f06101ab9abf6b960
|
[
"CNRI-Python"
] | null | null | null |
from abc import ABCMeta, abstractmethod
from dataclasses import dataclass
from typing import Any, List, Type, TypeVar
from lf3py.lang.dsn import DSN
from lf3py.serialization.serializer import DictSerializer, Serializer
T_OBJ = TypeVar('T_OBJ')
class Command(metaclass=ABCMeta):
@property
@abstractmethod
def dsn(self) -> DSN:
raise NotImplementedError()
@abstractmethod
def data(self, data_type: Type[T_OBJ]) -> T_OBJ:
raise NotImplementedError()
class CommandQueue:
def __init__(self) -> None:
self._queue: List[Command] = []
@property
def has_next(self) -> bool:
return len(self._queue) > 0
def enqueue(self, *commands: Command):
self._queue.extend(commands)
def __iter__(self) -> 'CommandQueue':
return self
def __next__(self) -> Command:
if not self.has_next:
raise StopIteration()
task = self._queue[0]
self._queue = self._queue[1:]
return task
@dataclass
class Result:
_serializer: Serializer = DictSerializer()
def serialize(self) -> Any:
return self._serializer.serialize(self)
Ok = Result()
| 21.648148 | 69 | 0.662104 | 887 | 0.758768 | 0 | 0 | 424 | 0.362703 | 0 | 0 | 21 | 0.017964 |
ddd7b262ec09a987c21172c82cd032e817c1ba5b
| 801 |
py
|
Python
|
quapy/method/__init__.py
|
valgur/QuaPy
|
6b1ba4886a1d64b086829306cbba689cdcfd60e8
|
[
"BSD-3-Clause"
] | 34 |
2021-01-06T14:01:06.000Z
|
2022-03-08T06:59:04.000Z
|
quapy/method/__init__.py
|
valgur/QuaPy
|
6b1ba4886a1d64b086829306cbba689cdcfd60e8
|
[
"BSD-3-Clause"
] | 4 |
2021-06-07T07:45:57.000Z
|
2021-06-21T11:16:10.000Z
|
quapy/method/__init__.py
|
valgur/QuaPy
|
6b1ba4886a1d64b086829306cbba689cdcfd60e8
|
[
"BSD-3-Clause"
] | 6 |
2021-06-07T10:08:17.000Z
|
2022-03-07T13:42:15.000Z
|
from . import aggregative
from . import base
from . import meta
from . import non_aggregative
EXPLICIT_LOSS_MINIMIZATION_METHODS = {
aggregative.ELM,
aggregative.SVMQ,
aggregative.SVMAE,
aggregative.SVMKLD,
aggregative.SVMRAE,
aggregative.SVMNKLD
}
AGGREGATIVE_METHODS = {
aggregative.CC,
aggregative.ACC,
aggregative.PCC,
aggregative.PACC,
aggregative.EMQ,
aggregative.HDy,
aggregative.X,
aggregative.T50,
aggregative.MAX,
aggregative.MS,
aggregative.MS2,
} | EXPLICIT_LOSS_MINIMIZATION_METHODS
NON_AGGREGATIVE_METHODS = {
non_aggregative.MaximumLikelihoodPrevalenceEstimation
}
META_METHODS = {
meta.Ensemble,
meta.QuaNet
}
QUANTIFICATION_METHODS = AGGREGATIVE_METHODS | NON_AGGREGATIVE_METHODS | META_METHODS
| 18.627907 | 85 | 0.741573 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
ddd886a1ce049e2677d31fc3b30fe240938605cc
| 208 |
py
|
Python
|
day 5 2nd.py
|
shalini-22/Letsupgrade-Python-essentials
|
fda98097be08dbb9bfbba6e0622954c6eba0f7f3
|
[
"MIT"
] | null | null | null |
day 5 2nd.py
|
shalini-22/Letsupgrade-Python-essentials
|
fda98097be08dbb9bfbba6e0622954c6eba0f7f3
|
[
"MIT"
] | null | null | null |
day 5 2nd.py
|
shalini-22/Letsupgrade-Python-essentials
|
fda98097be08dbb9bfbba6e0622954c6eba0f7f3
|
[
"MIT"
] | null | null | null |
def prime_num(n):
if n>1:
for i in range (2,n):
if n%i==0:
return False
return True
lst_prime=list(filter(prime_num,range(1,2500)))
print(len(lst_prime))
| 26 | 48 | 0.533654 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
ddd9ee0a17827daaf5df8b02b71f681e46b3a8a2
| 916 |
py
|
Python
|
OT/test_subplots.py
|
pine2104/Python_for_Lab
|
571398c2422711d8a74f9c95a746537859458557
|
[
"MIT"
] | 5 |
2022-02-03T20:10:21.000Z
|
2022-03-30T08:05:10.000Z
|
OT/test_subplots.py
|
pine2104/Python_for_Lab
|
571398c2422711d8a74f9c95a746537859458557
|
[
"MIT"
] | null | null | null |
OT/test_subplots.py
|
pine2104/Python_for_Lab
|
571398c2422711d8a74f9c95a746537859458557
|
[
"MIT"
] | null | null | null |
from EM_Algorithm.gen_gauss import gen_gauss
from EM_Algorithm.gen_poisson import gen_poisson
import numpy as np
import matplotlib.pyplot as plt
x = gen_gauss([8],[2],[1000])
y = gen_poisson([1],[1000])
fig = plt.figure(figsize=(8, 8))
# Add a gridspec with two rows and two columns and a ratio of 2 to 7 between
# the size of the marginal axes and the main axes in both directions.
# Also adjust the subplot parameters for a square plot.
gs = fig.add_gridspec(2, 2, width_ratios=(7, 2), height_ratios=(2, 7),
left=0.1, right=0.9, bottom=0.1, top=0.9,
wspace=0.05, hspace=0.05)
ax = fig.add_subplot(gs[1, 0])
ax.scatter(x, y)
ax_histx = fig.add_subplot(gs[0, 0], sharex=ax)
ax_histy = fig.add_subplot(gs[1, 1], sharey=ax)
ax_histx.hist(x, bins=10, color='grey', edgecolor="white")
ax_histy.hist(y, bins=10, orientation='horizontal', color='grey', edgecolor="white")
| 35.230769 | 84 | 0.689956 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 238 | 0.259825 |
ddda6ce0c1f2ddd975f7aba52a0da244fa436a75
| 2,114 |
py
|
Python
|
code/examples/VsevolodTymofyeyev2/example.py
|
TrackerSB/MasterThesis
|
2792203d28d6c7b62f54545344ee6772d2ec5b64
|
[
"MIT"
] | null | null | null |
code/examples/VsevolodTymofyeyev2/example.py
|
TrackerSB/MasterThesis
|
2792203d28d6c7b62f54545344ee6772d2ec5b64
|
[
"MIT"
] | null | null | null |
code/examples/VsevolodTymofyeyev2/example.py
|
TrackerSB/MasterThesis
|
2792203d28d6c7b62f54545344ee6772d2ec5b64
|
[
"MIT"
] | null | null | null |
import os
from threading import Thread
from typing import List
from aiExchangeMessages_pb2 import SimulationID, TestResult
def _handle_vehicle(sid: SimulationID, vid: str, requests: List[str]) -> None:
vid_obj = VehicleID()
vid_obj.vid = vid
i = 0
while i < 3:
i += 1
print(sid.sid + ": Test status: " + service.get_status(sid))
print(vid + ": Wait")
sim_state = service.wait_for_simulator_request(sid, vid_obj) # wait()
if sim_state is SimStateResponse.SimState.RUNNING:
print(vid + ": Request data")
request = DataRequest()
request.request_ids.extend(requests)
data = service.request_data(sid, vid_obj, request) # request()
print(data)
print(vid + ": Wait for control")
control = Control()
control.avCommand.accelerate = 1
service.control(sid, vid_obj, control)
else:
print(sid.sid + ": The simulation is not running anymore (State: "
+ SimStateResponse.SimState.Name(sim_state) + ").")
print(sid.sid + ": Final result: " + service.get_result(sid))
break
sim_state = service.wait_for_simulator_request(sid, vid_obj) # wait()
if sim_state is sim_state is SimStateResponse.SimState.RUNNING:
result = TestResult()
result.result = TestResult.Result.FAILED
service.control_sim(sid, result)
if __name__ == "__main__":
from AIExchangeService import get_service
from aiExchangeMessages_pb2 import SimStateResponse, Control, SimulationID, VehicleID, DataRequest
service = get_service()
# Send tests
sids = service.run_tests("test", "test", "xmls/criteriaA.dbc.xml", "xmls/environmentA.dbe.xml")
# -> Response status: 500
print("Tests sent")
# Interact with a simulation
if not sids:
exit(1)
sid = SimulationID()
sid.sid = sids.sids[0]
ego_requests = ["egoSpeed"]
ego_vehicle = Thread(target=_handle_vehicle, args=(sid, "ego", ego_requests))
ego_vehicle.start()
ego_vehicle.join()
| 33.555556 | 102 | 0.638127 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 325 | 0.153737 |
dddd2ef86d68662ac04401af3b7b61b4ab5cb9ed
| 1,916 |
py
|
Python
|
rfap.py
|
anabanami/RFAP
|
09b434d115ae1872810d65126bcbc9d7af510e89
|
[
"MIT"
] | null | null | null |
rfap.py
|
anabanami/RFAP
|
09b434d115ae1872810d65126bcbc9d7af510e89
|
[
"MIT"
] | null | null | null |
rfap.py
|
anabanami/RFAP
|
09b434d115ae1872810d65126bcbc9d7af510e89
|
[
"MIT"
] | null | null | null |
# PHS3350
# Week 2 - wave packet and RFAP -
# "what I cannot create I cannot understand" - R. Feynman.
# Ana Fabela Hinojosa, 13/03/2021
import os
from pathlib import Path
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import physunits
from scipy.fft import fft, ifft
plt.rcParams['figure.dpi'] = 200
folder = Path('wavepacket_time_evolution')
os.makedirs(folder, exist_ok=True)
os.system(f'rm {folder}/*.png')
# hbar = 1.0545718e-34 # [Js]
hbar = 1
m = 1
𝜎 = 1
x_max = 10
x = np.linspace(-x_max, x_max, 1024, endpoint=False)
n = x.size
x_step = x[1] - x[0]
# oscillations per unit of space
k0 = 2 * np.pi / x_max * 5
# For Fourier space
k = 2 * np.pi * np.fft.fftfreq(n, x_step)
wave = np.exp(- x**2 / (2*𝜎**2)) * np.exp(1j*k0*x)
# Square well potential
sw = np.zeros_like(x)
# depth
sw[0] = sw[-1] = 1000*k0**2
# Schrodinger equation (or first order time derivarive)
def Schrodinger_eqn(t, Ψ):
r = np.linspace(0, x_max, 1024, endpoint=False)
KΨ = -hbar**2/(2 * m) * ifft(-(k**2) * fft(Ψ))
VΨ = sw * Ψ
# I dunno #+ (-1j / hbar) * 1j*x**3 * Ψ
return (-1j / hbar) * (KΨ + VΨ)
def Runge_Kutta(t, delta_t, Ψ):
k1 = Schrodinger_eqn(t, Ψ)
k2 = Schrodinger_eqn(t + delta_t / 2, Ψ + k1 * delta_t / 2)
k3 = Schrodinger_eqn(t + delta_t / 2, Ψ + k2 * delta_t / 2)
k4 = Schrodinger_eqn(t + delta_t, Ψ + k3 * delta_t)
return Ψ + (delta_t / 6) * (k1 + 2 * k2 + 2 * k3 + k4)
i = 0
t = 0
t_final = 5
delta_t = 0.0001
while t < t_final:
if not i % 400:
plt.plot(x, np.real(wave), label="real part")
plt.plot(x, np.imag(wave), label="imaginary part")
plt.xlim(-x_max, x_max)
plt.legend()
plt.xlabel("x")
plt.title(f"wave packet t = {i}")
plt.savefig(folder/f'{i:04d}.png')
# plt.show()
plt.clf()
wave = Runge_Kutta(t, delta_t, wave)
i += 1
t += delta_t
| 23.95 | 64 | 0.598643 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 476 | 0.245868 |
dddf4b825918a14f6e6549a6e8c4a604f9609cd9
| 295 |
py
|
Python
|
tests/test_pass.py
|
zaber-paul/base
|
9c4d4e40db7a5059dcaa32d44be0146b6bb829c4
|
[
"Apache-2.0"
] | null | null | null |
tests/test_pass.py
|
zaber-paul/base
|
9c4d4e40db7a5059dcaa32d44be0146b6bb829c4
|
[
"Apache-2.0"
] | null | null | null |
tests/test_pass.py
|
zaber-paul/base
|
9c4d4e40db7a5059dcaa32d44be0146b6bb829c4
|
[
"Apache-2.0"
] | null | null | null |
""" run with
nosetests -v --nocapture
or
nosetests -v
"""
from builtins import object
from cloudmesh_base.util import HEADING
class Test_pass(object):
def setup(self):
pass
def tearDown(self):
pass
def test_dummy(self):
HEADING()
assert True
| 11.8 | 39 | 0.630508 | 162 | 0.549153 | 0 | 0 | 0 | 0 | 0 | 0 | 61 | 0.20678 |
dde27c4c382b986590140f153b007830bdfd2e36
| 3,725 |
py
|
Python
|
tests/api/test_record_permissions.py
|
equadon/invenio-app-ils
|
42ba282968d0aa28fb1bfc71d0709685165aaec4
|
[
"MIT"
] | null | null | null |
tests/api/test_record_permissions.py
|
equadon/invenio-app-ils
|
42ba282968d0aa28fb1bfc71d0709685165aaec4
|
[
"MIT"
] | null | null | null |
tests/api/test_record_permissions.py
|
equadon/invenio-app-ils
|
42ba282968d0aa28fb1bfc71d0709685165aaec4
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2018-2019 CERN.
#
# invenio-app-ils is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Test record permissions."""
from __future__ import unicode_literals
import uuid
import pytest
from flask_principal import RoleNeed, identity_loaded
from flask_security import login_user
from invenio_access.models import ActionRoles
from invenio_accounts.models import Role, User
from invenio_records.api import Record
from invenio_app_ils.records.permissions import RecordPermission, \
create_records_action
@pytest.mark.parametrize(
"access,action,is_allowed",
[
({"foo": "bar"}, "read", True),
({"foo": "bar"}, "update", False),
({"_access": {"read": [1]}}, "read", True),
({"_access": {"read": [2]}}, "read", False),
({"_access": {"read": ["records-readers"]}}, "read", True),
# permission for specific user to create
({"_access": {"update": [1]}}, "update", True),
# checks if the access works for different actions
({"_access": {"update": [1]}}, "create", False),
({"_access": {"delete": [1]}}, "update", False),
# delete access for user and librarian
({"_access": {"delete": [1, "librarian"]}}, "delete", True),
],
)
def test_record_generic_access(db, users, with_access, access, action,
is_allowed):
"""Test access control for records."""
@identity_loaded.connect
def mock_identity_provides(sender, identity):
"""Provide additional role to the user."""
roles = [RoleNeed("records-readers")]
# Gives the user additional roles, f.e. based on his groups
identity.provides |= set(roles)
def login_and_test(user_id):
login_user(User.query.get(user_id))
# Create record
user = User.query.get(user_id)
id = uuid.uuid4()
record = Record.create(access, id_=id)
factory = RecordPermission(record, action)
if user.has_role("admin"):
# super user can do EVERYTHING
assert factory.can()
elif user.has_role("librarian") and action != "delete":
# librarian should be able to update, create, and read everything
assert factory.can()
else:
assert factory.can() if is_allowed else not factory.can()
# Test standard user
login_and_test(users["patron1"].id)
# Test librarian access
login_and_test(users["librarian"].id)
# Test superuser access
login_and_test(users["admin"].id)
@pytest.mark.parametrize(
"access,action,is_allowed",
[
({"foo": "bar"}, "create", True),
({"foo": "bar"}, "update", False),
({"foo": "bar"}, "delete", False),
],
)
def test_record_patron_create(db, users, access, action, is_allowed):
"""Test patron create."""
# create role to be able to create records
role = Role(name="records-creators")
db.session.add(role)
db.session.commit()
# assign role to the action "create-records"
ar = ActionRoles.allow(create_records_action, role_id=role.id)
db.session.add(ar)
db.session.commit()
@identity_loaded.connect
def mock_identity_provides(sender, identity):
"""Provide additional role to the user."""
roles = [RoleNeed(role.name)]
# Gives the user additional roles, f.e. based on his groups
identity.provides |= set(roles)
login_user(users["patron1"])
id = uuid.uuid4()
record = Record.create(access, id_=id)
factory = RecordPermission(record, action)
assert factory.can() if is_allowed else not factory.can()
| 33.863636 | 77 | 0.633289 | 0 | 0 | 0 | 0 | 3,089 | 0.829262 | 0 | 0 | 1,317 | 0.353557 |
dde28d401374fdc16a1d1b838ec6fd235235e1cc
| 945 |
py
|
Python
|
src/Index.py
|
bhed01/bhed01.github.io
|
132cf8e4afa05a00d71555afa2002a2d50c304c8
|
[
"MIT"
] | 3 |
2020-10-16T12:26:31.000Z
|
2022-02-03T18:06:35.000Z
|
src/Index.py
|
bhed01/bhed01.github.io
|
132cf8e4afa05a00d71555afa2002a2d50c304c8
|
[
"MIT"
] | null | null | null |
src/Index.py
|
bhed01/bhed01.github.io
|
132cf8e4afa05a00d71555afa2002a2d50c304c8
|
[
"MIT"
] | null | null | null |
from .components.Head import Head
from .components.NavIcons import Hamburger
from .components.Screens import HomeScreen, AboutScreen, ProjectsScreen
from .components.Footer import Footer
from .utils import JSON_DIR
from json import load
import os
def Index():
with open(os.path.join(JSON_DIR, 'home_data.json')) as hFile:
home_data = load(hFile)
with open(os.path.join(JSON_DIR, 'about_data.json')) as aFile:
about_data = load(aFile)
with open(os.path.join(JSON_DIR, 'projects_data.json')) as pFile:
projects = load(pFile)['projects']
with open(os.path.join(JSON_DIR, 'footer_data.json')) as fFile:
footer_data = load(fFile)
return Head(
title='Portfolio - Bhed',
children=f'''
{Hamburger()}
{HomeScreen(**home_data)}
{AboutScreen(**about_data)}
{ProjectsScreen(projects)}
{Footer(**footer_data)}'''
)
| 32.586207 | 72 | 0.649735 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 270 | 0.285714 |
dde2faa4056b42852281bc2be32673929adfef2b
| 5,190 |
py
|
Python
|
bisk/features/base.py
|
facebookresearch/bipedal-skills
|
edd424a8779e3a0121fb995cad00839d8226cf46
|
[
"MIT"
] | 6 |
2021-11-05T16:57:58.000Z
|
2022-03-16T10:34:46.000Z
|
bisk/features/base.py
|
facebookresearch/bipedal-skills
|
edd424a8779e3a0121fb995cad00839d8226cf46
|
[
"MIT"
] | null | null | null |
bisk/features/base.py
|
facebookresearch/bipedal-skills
|
edd424a8779e3a0121fb995cad00839d8226cf46
|
[
"MIT"
] | 1 |
2021-11-05T16:57:47.000Z
|
2021-11-05T16:57:47.000Z
|
# Copyright (c) 2021-present, Facebook, Inc.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
#
import logging
from typing import Dict, List
import gym
import numpy as np
from dm_control import mujoco
from dm_control.mujoco.wrapper.mjbindings import enums as mjenums
from dm_control.mujoco.wrapper.mjbindings import mjlib
log = logging.getLogger(__name__)
class Featurizer:
n_qpos: Dict[int, int] = { # qpos entries per joint type
mjenums.mjtJoint.mjJNT_FREE: 7,
mjenums.mjtJoint.mjJNT_BALL: 4,
mjenums.mjtJoint.mjJNT_SLIDE: 1,
mjenums.mjtJoint.mjJNT_HINGE: 1,
}
n_qvel: Dict[int, int] = { # qvel entries per joint type
mjenums.mjtJoint.mjJNT_FREE: 6,
mjenums.mjtJoint.mjJNT_BALL: 3,
mjenums.mjtJoint.mjJNT_SLIDE: 1,
mjenums.mjtJoint.mjJNT_HINGE: 1,
}
def __init__(
self,
p: mujoco.Physics,
robot: str,
prefix: str = 'robot',
exclude: str = None,
):
self.p = p
self.prefix = prefix
self.observation_space: gym.spaces.Box = None
def __call__(self) -> np.ndarray:
raise NotImplementedError()
def set_frame_of_reference(self):
raise NotImplementedError()
def feature_names(self) -> List[str]:
raise NotImplementedError()
def qpos_names(self) -> List[str]:
names = ['' for i in range(len(self.p.data.qpos))]
for jn in self.p.named.model.jnt_type.axes.row.names:
typ = self.p.named.model.jnt_type[jn]
adr = self.p.named.model.jnt_qposadr[jn]
if typ == 0:
names[adr + 0] = f'{jn}:px'
names[adr + 1] = f'{jn}:py'
names[adr + 2] = f'{jn}:pz'
names[adr + 3] = f'{jn}:ow'
names[adr + 4] = f'{jn}:ox'
names[adr + 5] = f'{jn}:oy'
names[adr + 6] = f'{jn}:oz'
elif typ == 1:
names[adr + 0] = f'{jn}:ow'
names[adr + 1] = f'{jn}:ox'
names[adr + 2] = f'{jn}:oy'
names[adr + 3] = f'{jn}:oz'
elif typ == 2 or typ == 3:
names[adr] = f'{jn}:p'
else:
raise ValueError(f'Unknown joint type {typ}')
return names
def qvel_names(self) -> List[str]:
names = ['' for i in range(len(self.p.data.qvel))]
for jn in self.p.named.model.jnt_type.axes.row.names:
typ = self.p.named.model.jnt_type[jn]
adr = self.p.named.model.jnt_dofadr[jn]
if typ == 0:
names[adr + 0] = f'{jn}:lvx'
names[adr + 1] = f'{jn}:lvy'
names[adr + 2] = f'{jn}:lvz'
names[adr + 3] = f'{jn}:avx'
names[adr + 4] = f'{jn}:avy'
names[adr + 5] = f'{jn}:avz'
elif typ == 1:
names[adr + 0] = f'{jn}:avx'
names[adr + 1] = f'{jn}:avy'
names[adr + 2] = f'{jn}:avz'
elif typ == 2 or typ == 3:
names[adr] = f'{jn}:v'
else:
raise ValueError(f'Unknown joint type {typ}')
return names
def cfrc_ext_names(self) -> List[List[str]]:
names: List[List[str]] = []
for cn in self.p.named.data.cfrc_ext.axes.row.names:
names.append(
[f'{cn}:c{n}' for n in ['rx', 'ry', 'rz', 'tx', 'ty', 'tz']]
)
return names
def sensor_names(self) -> List[str]:
names = ['' for i in range(len(self.p.data.sensordata))]
for sn in self.p.named.model.sensor_adr.axes.row.names:
typ = self.p.named.model.sensor_type[sn]
adr = self.p.named.model.sensor_adr[sn]
if typ == mjenums.mjtSensor.mjSENS_GYRO:
feats = ['avx', 'avy', 'avz']
elif (
typ == mjenums.mjtSensor.mjSENS_VELOCIMETER
or typ == mjenums.mjtSensor.mjSENS_SUBTREELINVEL
):
feats = ['lvx', 'lvy', 'lvz']
elif typ == mjenums.mjtSensor.mjSENS_ACCELEROMETER:
feats = ['lax', 'lay', 'laz']
elif (
typ == mjenums.mjtSensor.mjSENS_FRAMEPOS
or typ == mjenums.mjtSensor.mjSENS_SUBTREECOM
):
feats = ['px', 'py', 'pz']
elif typ == mjenums.mjtSensor.mjSENS_JOINTPOS:
feats = ['']
elif typ == mjenums.mjtSensor.mjSENS_JOINTVEL:
feats = ['']
elif typ == mjenums.mjtSensor.mjSENS_FORCE:
feats = ['fx', 'fy', 'fz']
elif typ == mjenums.mjtSensor.mjSENS_TORQUE:
feats = ['tx', 'ty', 'tz']
elif typ == mjenums.mjtSensor.mjSENS_RANGEFINDER:
feats = ['d']
elif typ == mjenums.mjtSensor.mjSENS_TOUCH:
feats = ['f']
else:
raise ValueError(f'Unsupported sensor type: {typ}')
for i, f in enumerate(feats):
names[adr + i] = f'{sn}:{f}'
return names
| 36.293706 | 76 | 0.511753 | 4,751 | 0.915414 | 0 | 0 | 0 | 0 | 0 | 0 | 691 | 0.133141 |
dde38c8eda97903a41ce2ff61801ff8773e4a599
| 1,124 |
py
|
Python
|
dns.py
|
bernd-wechner/OpenWRT-Tools
|
4ca2eb0d0774e0d97b48a485fa18a4d4bbc3f108
|
[
"Unlicense"
] | 3 |
2017-06-12T11:03:56.000Z
|
2021-04-11T20:09:47.000Z
|
dns.py
|
bernd-wechner/OpenWRT-Tools
|
4ca2eb0d0774e0d97b48a485fa18a4d4bbc3f108
|
[
"Unlicense"
] | null | null | null |
dns.py
|
bernd-wechner/OpenWRT-Tools
|
4ca2eb0d0774e0d97b48a485fa18a4d4bbc3f108
|
[
"Unlicense"
] | 1 |
2021-08-25T02:39:40.000Z
|
2021-08-25T02:39:40.000Z
|
#!/usr/bin/python
#
# identify the DNS servers used on the WAN interface.
#
# Tries to find the owner of the DNS as well.
#
# DNS spoofing is one of the entry points for malware. I haven't seen it since I dumped
# Windows at home but in past have seen malware that would change the DNS config on
# the router. Kind of hand to see a name attached to the IP addresses then and most of
# us wouldn't recognize an IP address, will recognize a name.
import json, subprocess, os
devnull = open(os.devnull, 'w')
def get_wan_status():
try:
status = subprocess.check_output(["ubus", "call", "network.interface.wan", "status"], stderr=devnull)
return json.loads(status)
except:
return None
def get_owner(ip):
try:
return subprocess.check_output(["ip_owner", ip], stderr=devnull).strip()
except:
return None
wan_status = get_wan_status()
if wan_status:
dns_servers = wan_status["dns-server"]
print "DNS Servers on WAN interface:"
n = 1
for dns in dns_servers:
owner = get_owner(dns)
print "\tDNS %d: %s\t%s" % (n, dns, owner)
n += 1
| 30.378378 | 109 | 0.669929 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 554 | 0.492883 |
dde5915014c5c7fff2dcda09f7e0ecc75334cecc
| 398 |
py
|
Python
|
downloadHSfiles.py
|
McDowellLab/downloadNEON
|
b43d47d40cbf0e168dfa307969687025e3f5fa34
|
[
"MIT"
] | null | null | null |
downloadHSfiles.py
|
McDowellLab/downloadNEON
|
b43d47d40cbf0e168dfa307969687025e3f5fa34
|
[
"MIT"
] | null | null | null |
downloadHSfiles.py
|
McDowellLab/downloadNEON
|
b43d47d40cbf0e168dfa307969687025e3f5fa34
|
[
"MIT"
] | null | null | null |
from hs_restclient import HydroShare, HydroShareAuthBasic
# Download LCZO sesnor database from Hydroshare
# link to the Hydroshare resource https://www.hydroshare.org/resource/b38bc00887ec45ac9499f9dea45eb8d5/
auth = HydroShareAuthBasic(username="miguelcleon", password = "x")
hs = HydroShare(auth = auth)
hs.getResource('b38bc00887ec45ac9499f9dea45eb8d5', destination='./lczodata', unzip=True)
| 39.8 | 103 | 0.81407 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 212 | 0.532663 |
dde641d979074c8c01c9f5c1fbef8f55228ae8fe
| 339 |
py
|
Python
|
protonfixes/gamefixes/287260.py
|
bmaupin/protonfixes
|
9fc87a9a487d7dfbd0c602a079f3b026f8a84638
|
[
"BSD-2-Clause"
] | 213 |
2018-10-06T01:40:26.000Z
|
2022-03-16T16:17:37.000Z
|
protonfixes/gamefixes/287260.py
|
bmaupin/protonfixes
|
9fc87a9a487d7dfbd0c602a079f3b026f8a84638
|
[
"BSD-2-Clause"
] | 88 |
2018-10-06T17:38:56.000Z
|
2022-02-19T13:27:26.000Z
|
protonfixes/gamefixes/287260.py
|
bmaupin/protonfixes
|
9fc87a9a487d7dfbd0c602a079f3b026f8a84638
|
[
"BSD-2-Clause"
] | 67 |
2018-10-09T16:57:16.000Z
|
2022-03-14T13:06:25.000Z
|
""" Game fix for Toybox Turbos
"""
#pylint: disable=C0103
from protonfixes import util
from protonfixes.logger import log
def main():
""" Changes the proton argument from the launcher to the game
"""
log('Applying fixes for Toybox Turbos')
# Fix infinite startup screen
util.set_environment('PROTON_NO_ESYNC', '1')
| 21.1875 | 65 | 0.707965 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 208 | 0.613569 |
dde74c584ce2a956ca4842502658de5be6e68e74
| 4,035 |
py
|
Python
|
systrade/models/strategies.py
|
pdghawk/systrade
|
2200b950a3172f22a424c9e547aa6fa982f54c46
|
[
"BSD-3-Clause"
] | 1 |
2022-02-09T20:16:51.000Z
|
2022-02-09T20:16:51.000Z
|
systrade/models/strategies.py
|
pdghawk/systrade
|
2200b950a3172f22a424c9e547aa6fa982f54c46
|
[
"BSD-3-Clause"
] | null | null | null |
systrade/models/strategies.py
|
pdghawk/systrade
|
2200b950a3172f22a424c9e547aa6fa982f54c46
|
[
"BSD-3-Clause"
] | null | null | null |
""" The strategies module provides utilities for designing trading strategies
Notes
------
All strategies should inherit from BaseStrategy, and provide a get_order_list
method. For details of the requirements of this method, see its docstring in
base/BaseStrategy, or in the method within SimpleStrategy in this module.
"""
import copy
import pandas as pd
import numpy as np
import time as timer
import matplotlib.pyplot as plt
from .base import BaseStrategy
# TODO: extend strategy types
# examples - how the signals are combined - could be done in many ways
# should check whether indivdual positions should be exited (been held at a loss too long)
# eg buy and simultaneously do a sell_limit for same_quantity or something
# portfolio re-optimization movements (modern portfolio theory) n.b maximise
# expected returns whilst minimising the portoflio variance
class SimpleStrategy(BaseStrategy):
""" A simple strategy that sums signals in 5 minute intervals
Combines all signals provided, and sums over 5 minute intervals to generate
a meta-signal on which to place orders.
A list of orders can be geberated with get_order_list(), and that list of
orders can be historically traded with run_historical().
"""
def __init__(self,signal_dict,ticker_list,resampling=5):
# initialise the members of the BaseStrategy
if isinstance(resampling,(int,float)):
self.resampling = int(resampling)
else:
raise TypeError("resampling should be a number")
super().__init__(signal_dict,ticker_list)
def get_order_list(self,stocks_df):
""" generate a list of orders based on historical data
Args:
- stocks_df: dataframe of tickers values indexed by time
Returns:
- order_list: a list of trading.orders objects
"""
requests_dict = self.signal_requests(stocks_df)
order_list = []
for ticker in requests_dict:
# concat all the dataframes for this ticker into single dataframe
# where each signal will have a column
tmp_df = pd.concat(requests_dict[ticker],sort=False)
# now collate over time periods to get a 'master signal' from the
# various input signals
#resample into 5 min intervals
tmp_df = tmp_df.resample(str(self.resampling)+'T',
closed='right',
label='right').sum()
# sum over all signals
tmp_df['sum'] = tmp_df.sum(axis=1)
# drop the original signal columns
tmp_df = tmp_df.drop(columns=list(self.signal_dict.keys()))
# this is a simple selection - anywhere the sum over signals gave
# a positive or negative overall signal in that 5 minute period
tmp_df = tmp_df.loc[~(tmp_df==0).all(axis=1)]
for idx,row in tmp_df.iterrows():
if row['sum']>0:
# signals sum to positive request - buy
#order_list.append(orders.BuyMarketOrder(idx,ticker,1))
order_list.append({'type': 'buy_market',
'time': idx,
'ticker': ticker,
'quantity': 1})
elif row['sum']<0:
# signals sum to negative request - sell
#order_list.append(orders.SellMarketOrder(idx,ticker,1))
order_list.append({'type': 'sell_market',
'time': idx,
'ticker': ticker,
'quantity': 1})
else:
# zeros should have been removed above- error for safety
raise RuntimeError("0 encountered in collated signals")
return order_list
def __repr__(self):
return "Simple"+super().__repr__()
| 44.340659 | 91 | 0.601983 | 3,162 | 0.783643 | 0 | 0 | 0 | 0 | 0 | 0 | 2,183 | 0.541016 |
dde864bb1233daa956ab699eaa628d9606c4448f
| 144 |
py
|
Python
|
ParkFinder/Parks/migrations/__init__.py
|
Krause2023/CS224-ParkFinder-Web_App
|
ccfa2faf61d6adb300f319ae11dd983483451410
|
[
"MIT"
] | 1 |
2021-12-22T16:00:36.000Z
|
2021-12-22T16:00:36.000Z
|
ParkFinder/Parks/migrations/__init__.py
|
Krause2023/CS224-ParkFinder-Web_App
|
ccfa2faf61d6adb300f319ae11dd983483451410
|
[
"MIT"
] | null | null | null |
ParkFinder/Parks/migrations/__init__.py
|
Krause2023/CS224-ParkFinder-Web_App
|
ccfa2faf61d6adb300f319ae11dd983483451410
|
[
"MIT"
] | null | null | null |
# Create your migrations here.
# WILL USE THIS LATER IF/WHEN YOU CREATE A DATABASE AND USER ACCOUNTS - THIS MAY BE IN A DIFFERENT APP AS WELL!!!
| 72 | 113 | 0.756944 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 143 | 0.993056 |
ddea0dbcc4c809d7b5a35e5b2781bf028ff2f764
| 140 |
py
|
Python
|
tests/framework/Optimizers/Infinite/infinite.py
|
milljm/raven
|
5f29fe81b75e2ffbeb54a55aa63647e7b2f6457b
|
[
"Apache-2.0"
] | 2 |
2019-10-11T15:59:10.000Z
|
2021-04-08T18:23:57.000Z
|
tests/framework/Optimizers/Infinite/infinite.py
|
milljm/raven
|
5f29fe81b75e2ffbeb54a55aa63647e7b2f6457b
|
[
"Apache-2.0"
] | 1 |
2018-03-27T13:06:00.000Z
|
2018-03-27T13:06:00.000Z
|
tests/framework/Optimizers/Infinite/infinite.py
|
milljm/raven
|
5f29fe81b75e2ffbeb54a55aa63647e7b2f6457b
|
[
"Apache-2.0"
] | 1 |
2017-08-29T16:09:13.000Z
|
2017-08-29T16:09:13.000Z
|
import numpy as np
def run(self,Inputs):
if self.x != 0.0:
self.ans = self.y/self.x
else:
self.ans = np.array([float('inf')])
| 15.555556 | 39 | 0.592857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 | 0.035714 |
ddea413a66b41dee24d47cda24474b0ccba4f292
| 1,955 |
py
|
Python
|
pyinterfaces/enum/ENUM_USAGE.py
|
OaklandPeters/pyinterfaces
|
c60efaad92e8d2e1ec25df718dfb43f034a083bb
|
[
"MIT"
] | null | null | null |
pyinterfaces/enum/ENUM_USAGE.py
|
OaklandPeters/pyinterfaces
|
c60efaad92e8d2e1ec25df718dfb43f034a083bb
|
[
"MIT"
] | null | null | null |
pyinterfaces/enum/ENUM_USAGE.py
|
OaklandPeters/pyinterfaces
|
c60efaad92e8d2e1ec25df718dfb43f034a083bb
|
[
"MIT"
] | null | null | null |
"""
Provides example of how I would like `enum` to be used.
Implementation details:
(1) Uses Metaclass for two reasons:
(1.1) So that the subclasses can be iterable (we want class objects, not instance objects)
(1.2) To automatically collect Enumeratee
enum.Enumerator
EnumSet, Partition, Basis
Product of all possibilities...
An alternative term, mathematically correct, but misleading, would be 'Partition'
Another alternative term: Basis (as of, group of independent vectors). Note, a basis is a 'linearly independent spanning set'.
enum.Enumeratee
EnumCase, Dimension
@todo: Handle issue: How are the cases ordered?
In Python 3, there is a mechanism for preserving the order of metaclasses.
(see https://www.python.org/dev/peps/pep-3115/)
"""
import ENUM_STUBS as enum
class Beatles(enum.Enumerator):
John = enum.Enumeratee("John Lennon")
Paul = enum.Enumeratee("Paul McCartney")
George = enum.Enumeratee("George Harrison")
Ringo = enum.Enumeratee("Ringo Starr")
@classmethod
def __iter__(cls):
"""__iter__ provides the order cases are returned, while
cls.cases does not.
"""
# Return in alphabetic order, based on attribute name
for name, case in sorted(cls.cases, key=lambda name, case: name):
yield name, case
# Cases in Enumerator: Directly referencable via attribute name
str(Beatles.John) == "John Lennon"
# Cases: equality based on descriptor's return
Beatles.John == "John Lennon"
Beatles.Paul == "Paul McCartney"
Beatles.George == "George Harrison"
Beatles.Ringo == "Ringo Starr"
# Iterable: returns cases
iterator = iter(Beatles)
iterator.next() == Beatles.John
iterator.next() == Beatles.George
iterator.next() == Beatles.Paul
iterator.next() == Beatles.Ringo
# Cases: returns case methods with names
# ... no particular order imposed here
("John", Beatles.John)
Beatles.cases == (("John", "John Lennon"))
| 29.621212 | 130 | 0.707417 | 521 | 0.266496 | 288 | 0.147315 | 305 | 0.15601 | 0 | 0 | 1,310 | 0.670077 |
ddebffcf3d40adc0208ac8b35c967b6d0551178a
| 38,079 |
py
|
Python
|
port/platform/common/automation/u_utils.py
|
stephanboner/ubxlib
|
64025c5760771ac2accd09f9f176693c7add2919
|
[
"Apache-2.0"
] | null | null | null |
port/platform/common/automation/u_utils.py
|
stephanboner/ubxlib
|
64025c5760771ac2accd09f9f176693c7add2919
|
[
"Apache-2.0"
] | null | null | null |
port/platform/common/automation/u_utils.py
|
stephanboner/ubxlib
|
64025c5760771ac2accd09f9f176693c7add2919
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
'''Generally useful bits and bobs.'''
import queue # For PrintThread and exe_run
from time import sleep, time, gmtime, strftime # For lock timeout, exe_run timeout and logging
import threading # For PrintThread
import os # For ChangeDir, has_admin
import stat # To help deltree out
from telnetlib import Telnet # For talking to JLink server
import socket
import shutil # To delete a directory tree
import signal # For CTRL_C_EVENT
import subprocess
import platform # Figure out current OS
import serial # Pyserial (make sure to do pip install pyserial)
import psutil # For killing things (make sure to do pip install psutil)
import u_settings
# How long to wait for an install lock in seconds
INSTALL_LOCK_WAIT_SECONDS = u_settings.INSTALL_LOCK_WAIT_SECONDS #(60 * 60)
# The URL for Unity, the unit test framework
UNITY_URL = u_settings.UNITY_URL #"https://github.com/ThrowTheSwitch/Unity"
# The sub-directory that Unity is usually put in
# (off the working directory)
UNITY_SUBDIR = u_settings.UNITY_SUBDIR #"Unity"
# The path to DevCon, a Windows tool that allows
# USB devices to be reset, amongst other things
DEVCON_PATH = u_settings.DEVCON_PATH #"devcon.exe"
# The path to jlink.exe (or just the name 'cos it's on the path)
JLINK_PATH = u_settings.JLINK_PATH #"jlink.exe"
# The port number for SWO trace capture out of JLink
JLINK_SWO_PORT = u_settings.JLINK_SWO_PORT #19021
# The port number for GDB control of ST-LINK GDB server
STLINK_GDB_PORT = u_settings.STLINK_GDB_PORT #61200
# The port number for SWO trace capture out of ST-LINK GDB server
STLINK_SWO_PORT = u_settings.STLINK_SWO_PORT #61300
# The format string passed to strftime()
# for logging prints
TIME_FORMAT = u_settings.TIME_FORMAT #"%Y-%m-%d_%H:%M:%S"
# The default guard time waiting for a platform lock in seconds
PLATFORM_LOCK_GUARD_TIME_SECONDS = u_settings.PLATFORM_LOCK_GUARD_TIME_SECONDS #60 * 60
# The default guard time for downloading to a target in seconds
DOWNLOAD_GUARD_TIME_SECONDS = u_settings.DOWNLOAD_GUARD_TIME_SECONDS #60
# The default guard time for running tests in seconds
RUN_GUARD_TIME_SECONDS = u_settings.RUN_GUARD_TIME_SECONDS #60 * 60
# The default inactivity timer for running tests in seconds
RUN_INACTIVITY_TIME_SECONDS = u_settings.RUN_INACTIVITY_TIME_SECONDS #60 * 5
# The name of the #define that forms the filter string
# for which tests to run
FILTER_MACRO_NAME = u_settings.FILTER_MACRO_NAME #"U_CFG_APP_FILTER"
# The time for which to wait for something from the
# queue in exe_run(). If this is too short, in a
# multiprocessing world or on a slow machine, it is
# possible to miss things as the task putting things
# on the queue may be blocked from doing so until
# we've decided the queue has been completely emptied
# and moved on
EXE_RUN_QUEUE_WAIT_SECONDS = u_settings.EXE_RUN_QUEUE_WAIT_SECONDS #1
def subprocess_osify(cmd):
''' expects an array of strings being [command, param, ...] '''
if platform.system() == "Linux":
return [ ' '.join(cmd) ]
return cmd
def get_actual_path(path):
'''Given a drive number return real path if it is a subst'''
actual_path = path
# Get a list of substs
text = subprocess.check_output("subst",
stderr=subprocess.STDOUT,
shell=True) # Jenkins hangs without this
for line in text.splitlines():
# Lines should look like this:
# Z:\: => C:\projects\ubxlib_priv
# So, in this example, if we were given z:\blah
# then the actual path should be C:\projects\ubxlib_priv\blah
text = line.decode()
bits = text.rsplit(": => ")
if (len(bits) > 1) and (len(path) > 1) and \
(bits[0].lower()[0:2] == path[0:2].lower()):
actual_path = bits[1] + path[2:]
break
return actual_path
def get_instance_text(instance):
'''Return the instance as a text string'''
instance_text = ""
for idx, item in enumerate(instance):
if idx == 0:
instance_text += str(item)
else:
instance_text += "." + str(item)
return instance_text
def remove_readonly(func, path, exec_info):
'''Help deltree out'''
del exec_info
os.chmod(path, stat.S_IWRITE)
func(path)
def deltree(directory, printer, prompt):
'''Remove an entire directory tree'''
tries = 2
success = False
if os.path.isdir(directory):
# Retry this as sometimes Windows complains
# that the directory is not empty when it
# it really should be, some sort of internal
# Windows race condition
while not success and (tries > 0):
try:
# Need the onerror bit on Winders, seek
# this Stack Overflow post:
# https://stackoverflow.com/questions/1889597/deleting-directory-in-python
shutil.rmtree(directory, onerror=remove_readonly)
success = True
except OSError as ex:
printer.string("{}ERROR unable to delete \"{}\" {}: \"{}\"".
format(prompt, directory,
ex.errno, ex.strerror))
tries -= 1
else:
success = True
return success
# Check if admin privileges are available, from:
# https://stackoverflow.com/questions/2946746/python-checking-if-a-user-has-administrator-privileges
def has_admin():
'''Check for administrator privileges'''
admin = False
if os.name == 'nt':
try:
# only Windows users with admin privileges can read the C:\windows\temp
if os.listdir(os.sep.join([os.environ.get("SystemRoot", "C:\\windows"), "temp"])):
admin = True
except PermissionError:
pass
else:
# Pylint will complain about the following line but
# that's OK, it is only executed if we're NOT on Windows
# and there the geteuid() method will exist
if "SUDO_USER" in os.environ and os.geteuid() == 0:
admin = True
return admin
# Reset a USB port with the given Device Description
def usb_reset(device_description, printer, prompt):
''' Reset a device'''
instance_id = None
found = False
success = False
try:
# Run devcon and parse the output to find the given device
printer.string("{}running {} to look for \"{}\"...". \
format(prompt, DEVCON_PATH, device_description))
cmd = [DEVCON_PATH, "hwids", "=ports"]
text = subprocess.check_output(subprocess_osify(cmd),
stderr=subprocess.STDOUT,
shell=True) # Jenkins hangs without this
for line in text.splitlines():
# The format of a devcon entry is this:
#
# USB\VID_1366&PID_1015&MI_00\6&38E81674&0&0000
# Name: JLink CDC UART Port (COM45)
# Hardware IDs:
# USB\VID_1366&PID_1015&REV_0100&MI_00
# USB\VID_1366&PID_1015&MI_00
# Compatible IDs:
# USB\Class_02&SubClass_02&Prot_00
# USB\Class_02&SubClass_02
# USB\Class_02
#
# Grab what we hope is the instance ID
line = line.decode()
if line.startswith("USB"):
instance_id = line
else:
# If the next line is the Name we want then we're done
if instance_id and ("Name: " + device_description in line):
found = True
printer.string("{}\"{}\" found with instance ID \"{}\"". \
format(prompt, device_description,
instance_id))
break
instance_id = None
if found:
# Now run devcon to reset the device
printer.string("{}running {} to reset device \"{}\"...". \
format(prompt, DEVCON_PATH, instance_id))
cmd = [DEVCON_PATH, "restart", "@" + instance_id]
text = subprocess.check_output(subprocess_osify(cmd),
stderr=subprocess.STDOUT,
shell=False) # Has to be False or devcon won't work
for line in text.splitlines():
printer.string("{}{}".format(prompt, line.decode()))
success = True
else:
printer.string("{}device with description \"{}\" not found.". \
format(prompt, device_description))
except subprocess.CalledProcessError:
printer.string("{} unable to find and reset device.".format(prompt))
return success
# Open the required serial port.
def open_serial(serial_name, speed, printer, prompt):
'''Open serial port'''
serial_handle = None
text = "{}: trying to open \"{}\" as a serial port...". \
format(prompt, serial_name)
try:
return_value = serial.Serial(serial_name, speed, timeout=0.05)
serial_handle = return_value
printer.string("{} opened.".format(text))
except (ValueError, serial.SerialException) as ex:
printer.string("{}{} while accessing port {}: {}.".
format(prompt, type(ex).__name__,
serial_handle.name, str(ex)))
return serial_handle
def open_telnet(port_number, printer, prompt):
'''Open telnet port on localhost'''
telnet_handle = None
text = "{}trying to open \"{}\" as a telnet port on localhost...". \
format(prompt, port_number)
try:
telnet_handle = Telnet("localhost", int(port_number), timeout=5)
if telnet_handle is not None:
printer.string("{} opened.".format(text))
else:
printer.string("{} failed.".format(text))
except (socket.error, socket.timeout, ValueError) as ex:
printer.string("{}{} failed to open telnet {}: {}.".
format(prompt, type(ex).__name__,
port_number, str(ex)))
return telnet_handle
def install_lock_acquire(install_lock, printer, prompt):
'''Attempt to acquire install lock'''
timeout_seconds = INSTALL_LOCK_WAIT_SECONDS
success = False
if install_lock:
printer.string("{}waiting for install lock...".format(prompt))
while not install_lock.acquire(False) and (timeout_seconds > 0):
sleep(1)
timeout_seconds -= 1
if timeout_seconds > 0:
printer.string("{}got install lock.".format(prompt))
success = True
else:
printer.string("{}failed to aquire install lock.".format(prompt))
else:
printer.string("{}warning, there is no install lock.".format(prompt))
return success
def install_lock_release(install_lock, printer, prompt):
'''Release install lock'''
if install_lock:
install_lock.release()
printer.string("{}install lock released.".format(prompt))
def fetch_repo(url, directory, branch, printer, prompt):
'''Fetch a repo: directory can be relative or absolute'''
got_code = False
checked_out = False
success = False
printer.string("{}in directory {}, fetching"
" {} to directory {}".format(prompt, os.getcwd(),
url, directory))
if not branch:
branch = "master"
if os.path.isdir(directory):
# Update existing code
with ChangeDir(directory):
printer.string("{}updating code in {}...".
format(prompt, directory))
try:
text = subprocess.check_output(subprocess_osify(["git", "pull",
"origin", branch]),
stderr=subprocess.STDOUT,
shell=True) # Jenkins hangs without this
for line in text.splitlines():
printer.string("{}{}".format(prompt, line.decode()))
got_code = True
except subprocess.CalledProcessError as error:
printer.string("{}git returned error {}: \"{}\"".
format(prompt, error.returncode,
error.output))
else:
# Clone the repo
printer.string("{}cloning from {} into {}...".
format(prompt, url, directory))
try:
text = subprocess.check_output(subprocess_osify(["git", "clone", url, directory]),
stderr=subprocess.STDOUT,
shell=True) # Jenkins hangs without this
for line in text.splitlines():
printer.string("{}{}".format(prompt, line.decode()))
got_code = True
except subprocess.CalledProcessError as error:
printer.string("{}git returned error {}: \"{}\"".
format(prompt, error.returncode,
error.output))
if got_code and os.path.isdir(directory):
# Check out the correct branch and recurse submodules
with ChangeDir(directory):
printer.string("{}checking out branch {}...".
format(prompt, branch))
try:
text = subprocess.check_output(subprocess_osify(["git", "-c",
"advice.detachedHead=false",
"checkout",
"origin/" + branch]),
stderr=subprocess.STDOUT,
shell=True) # Jenkins hangs without this
for line in text.splitlines():
printer.string("{}{}".format(prompt, line.decode()))
checked_out = True
except subprocess.CalledProcessError as error:
printer.string("{}git returned error {}: \"{}\"".
format(prompt, error.returncode,
error.output))
if checked_out:
printer.string("{}recursing sub-modules (can take some time" \
" and gives no feedback).".format(prompt))
try:
text = subprocess.check_output(subprocess_osify(["git", "submodule",
"update", "--init",
"--recursive"]),
stderr=subprocess.STDOUT,
shell=True) # Jenkins hangs without this
for line in text.splitlines():
printer.string("{}{}".format(prompt, line.decode()))
success = True
except subprocess.CalledProcessError as error:
printer.string("{}git returned error {}: \"{}\"".
format(prompt, error.returncode,
error.output))
return success
def exe_where(exe_name, help_text, printer, prompt):
'''Find an executable using where.exe or which on linux'''
success = False
try:
printer.string("{}looking for \"{}\"...". \
format(prompt, exe_name))
# See here:
# https://stackoverflow.com/questions/14928860/passing-double-quote-shell-commands-in-python-to-subprocess-popen
# ...for why the construction "".join() is necessary when
# passing things which might have spaces in them.
# It is the only thing that works.
if platform.system() == "Linux":
cmd = ["which {}".format(exe_name)]
printer.string("{}detected linux, calling \"{}\"...".format(prompt, cmd))
else:
cmd = ["where", "".join(exe_name)]
printer.string("{}detected nonlinux, calling \"{}\"...".format(prompt, cmd))
text = subprocess.check_output(cmd,
stderr=subprocess.STDOUT,
shell=True) # Jenkins hangs without this
for line in text.splitlines():
printer.string("{}{} found in {}".format(prompt, exe_name,
line.decode()))
success = True
except subprocess.CalledProcessError:
if help_text:
printer.string("{}ERROR {} not found: {}". \
format(prompt, exe_name, help_text))
else:
printer.string("{}ERROR {} not found". \
format(prompt, exe_name))
return success
def exe_version(exe_name, version_switch, printer, prompt):
'''Print the version of a given executable'''
success = False
if not version_switch:
version_switch = "--version"
try:
text = subprocess.check_output(subprocess_osify(["".join(exe_name), version_switch]),
stderr=subprocess.STDOUT,
shell=True) # Jenkins hangs without this
for line in text.splitlines():
printer.string("{}{}".format(prompt, line.decode()))
success = True
except subprocess.CalledProcessError:
printer.string("{}ERROR {} either not found or didn't like {}". \
format(prompt, exe_name, version_switch))
return success
def exe_terminate(process_pid):
'''Jonathan's killer'''
process = psutil.Process(process_pid)
for proc in process.children(recursive=True):
proc.terminate()
process.terminate()
def read_from_process_and_queue(process, read_queue):
'''Read from a process, non-blocking'''
while process.poll() is None:
string = process.stdout.readline().decode()
if string:
read_queue.put(string)
def queue_get_no_exception(the_queue, block=True, timeout=None):
'''A version of queue.get() that doesn't throw an Empty exception'''
thing = None
try:
thing = the_queue.get(block=block, timeout=timeout)
except queue.Empty:
pass
return thing
def capture_env_var(line, env, printer, prompt):
'''A bit of exe_run that needs to be called from two places'''
# Find a KEY=VALUE bit in the line,
# parse it out and put it in the dictionary
# we were given
pair = line.split('=', 1)
if len(pair) == 2:
env[pair[0]] = pair[1].rstrip()
else:
printer.string("{}WARNING: not an environment variable: \"{}\"".
format(prompt, line))
# Note: if returned_env is given then "set"
# will be executed after the exe and the environment
# variables will be returned in it. The down-side
# of this is that the return value of the exe is,
# of course, lost.
def exe_run(call_list, guard_time_seconds, printer, prompt,
shell_cmd=False, set_env=None, returned_env=None):
'''Call an executable, printing out what it does'''
success = False
start_time = time()
flibbling = False
kill_time = None
read_time = start_time
if returned_env is not None:
# The caller wants the environment after the
# command has run, so, from this post:
# https://stackoverflow.com/questions/1214496/how-to-get-environment-from-a-subprocess
# append a tag that we can detect
# to the command and then call set,
# from which we can parse the environment
call_list.append("&&")
call_list.append("echo")
call_list.append("flibble")
call_list.append("&&")
call_list.append("set")
# I've seen output from set get lost,
# possibly because the process ending
# is asynchronous with stdout,
# so add a delay here as well
call_list.append("&&")
call_list.append("sleep")
call_list.append("2")
try:
# Call the thang
# Note: used to have bufsize=1 here but it turns out
# that is ignored 'cos the output is considered
# binary. Seems to work in any case, I guess
# Winders, at least, is in any case line-buffered.
process = subprocess.Popen(call_list,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=shell_cmd,
env=set_env)
printer.string("{}{}, pid {} started with guard time {} second(s)". \
format(prompt, call_list[0], process.pid,
guard_time_seconds))
# This is over complex but, unfortunately, necessary.
# At least one thing that we try to run, nrfjprog, can
# crash silently: just hangs and sends no output. However
# it also doesn't flush and close stdout and so read(1)
# will hang, meaning we can't read its output as a means
# to check that it has hung.
# So, here we poll for the return value, which is normally
# how things will end, and we start another thread which
# reads from the process's stdout. If the thread sees
# nothing for guard_time_seconds then we terminate the
# process.
read_queue = queue.Queue()
read_thread = threading.Thread(target=read_from_process_and_queue,
args=(process, read_queue))
read_thread.start()
while process.poll() is None:
if guard_time_seconds and (kill_time is None) and \
((time() - start_time > guard_time_seconds) or
(time() - read_time > guard_time_seconds)):
kill_time = time()
printer.string("{}guard time of {} second(s)." \
" expired, stopping {}...".
format(prompt, guard_time_seconds,
call_list[0]))
exe_terminate(process.pid)
line = queue_get_no_exception(read_queue, True, EXE_RUN_QUEUE_WAIT_SECONDS)
read_time = time()
while line is not None:
line = line.rstrip()
if flibbling:
capture_env_var(line, returned_env, printer, prompt)
else:
if returned_env is not None and "flibble" in line:
flibbling = True
else:
printer.string("{}{}".format(prompt, line))
line = queue_get_no_exception(read_queue, True, EXE_RUN_QUEUE_WAIT_SECONDS)
read_time = time()
# Can't join() read_thread here as it might have
# blocked on a read() (if nrfjprog has anything to
# do with it). It will be tidied up when this process
# exits.
# There may still be stuff on the queue, read it out here
line = queue_get_no_exception(read_queue, True, EXE_RUN_QUEUE_WAIT_SECONDS)
while line is not None:
line = line.rstrip()
if flibbling:
capture_env_var(line, returned_env, printer, prompt)
else:
if returned_env is not None and "flibble" in line:
flibbling = True
else:
printer.string("{}{}".format(prompt, line))
line = queue_get_no_exception(read_queue, True, EXE_RUN_QUEUE_WAIT_SECONDS)
# There may still be stuff in the buffer after
# the application has finished running so flush that
# out here
line = process.stdout.readline().decode()
while line:
line = line.rstrip()
if flibbling:
capture_env_var(line, returned_env, printer, prompt)
else:
if returned_env is not None and "flibble" in line:
flibbling = True
else:
printer.string("{}{}".format(prompt, line))
line = process.stdout.readline().decode()
if (process.poll() == 0) and kill_time is None:
success = True
printer.string("{}{}, pid {} ended with return value {}.". \
format(prompt, call_list[0],
process.pid, process.poll()))
except ValueError as ex:
printer.string("{}failed: {} while trying to execute {}.". \
format(prompt, type(ex).__name__, str(ex)))
return success
class ExeRun():
'''Run an executable as a "with:"'''
def __init__(self, call_list, printer, prompt, shell_cmd=False, with_stdin=False):
self._call_list = call_list
self._printer = printer
self._prompt = prompt
self._shell_cmd = shell_cmd
self._with_stdin=with_stdin
self._process = None
def __enter__(self):
if self._printer:
text = ""
for idx, item in enumerate(self._call_list):
if idx == 0:
text = item
else:
text += " {}".format(item)
self._printer.string("{}starting {}...".format(self._prompt,
text))
try:
# Start exe
if self._with_stdin:
self._process = subprocess.Popen(self._call_list,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=self._shell_cmd,
creationflags=subprocess.CREATE_NEW_PROCESS_GROUP)
else:
self._process = subprocess.Popen(self._call_list,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=self._shell_cmd,
creationflags=subprocess.CREATE_NEW_PROCESS_GROUP)
self._printer.string("{}{} pid {} started".format(self._prompt,
self._call_list[0],
self._process.pid))
except (OSError, subprocess.CalledProcessError, ValueError) as ex:
if self._printer:
self._printer.string("{}failed: {} to start {}.". \
format(self._prompt,
type(ex).__name__, str(ex)))
return self._process
def __exit__(self, _type, value, traceback):
del _type
del value
del traceback
# Stop exe
if self._printer:
self._printer.string("{}stopping {}...". \
format(self._prompt,
self._call_list[0]))
return_value = self._process.poll()
if not return_value:
retry = 5
while (self._process.poll() is None) and (retry > 0):
# Try to stop with CTRL-C
self._process.send_signal(signal.CTRL_BREAK_EVENT)
sleep(1)
retry -= 1
return_value = self._process.poll()
if not return_value:
# Terminate with a vengeance
self._process.terminate()
while self._process.poll() is None:
pass
self._printer.string("{}{} pid {} terminated".format(self._prompt,
self._call_list[0],
self._process.pid))
else:
self._printer.string("{}{} pid {} CTRL-C'd".format(self._prompt,
self._call_list[0],
self._process.pid))
else:
self._printer.string("{}{} pid {} already ended".format(self._prompt,
self._call_list[0],
self._process.pid))
return return_value
# Simple SWO decoder: only handles single bytes of application
# data at a time, i.e. what ITM_SendChar() sends.
class SwoDecoder():
'''Take the contents of a byte_array and decode it as SWO'''
def __init__(self, address, replaceLfWithCrLf=False):
self._address = address
self._replace_lf_with_crlf = replaceLfWithCrLf
self._expecting_swit = True
def decode(self, swo_byte_array):
'''Do the decode'''
decoded_byte_array = bytearray()
if swo_byte_array:
for data_byte in swo_byte_array:
# We're looking only for "address" and we also know
# that CMSIS only offers ITM_SendChar(), so packet length
# is always 1, and we only send ASCII characters,
# so the top bit of the data byte must be 0.
#
# For the SWO protocol, see:
#
# https://developer.arm.com/documentation/ddi0314/h/
# instrumentation-trace-macrocell/
# about-the-instrumentation-trace-macrocell/trace-packet-format
#
# When we see SWIT (SoftWare Instrumentation Trace
# I think, anyway, the bit that carries our prints
# off the target) which is 0bBBBBB0SS, where BBBBB is
# address and SS is the size of payload to follow,
# in our case 0x01, we know that the next
# byte is probably data and if it is ASCII then
# it is data. Anything else is ignored.
# The reason for doing it this way is that the
# ARM ITM only sends out sync packets under
# special circumstances so it is not a recovery
# mechanism for simply losing a byte in the
# transfer, which does happen occasionally.
if self._expecting_swit:
if ((data_byte & 0x03) == 0x01) and ((data_byte & 0xf8) >> 3 == self._address):
# Trace packet type is SWIT, i.e. our
# application logging
self._expecting_swit = False
else:
if data_byte & 0x80 == 0:
if (data_byte == 10) and self._replace_lf_with_crlf:
decoded_byte_array.append(13)
decoded_byte_array.append(data_byte)
self._expecting_swit = True
return decoded_byte_array
class PrintThread(threading.Thread):
'''Print thread to organise prints nicely'''
def __init__(self, print_queue):
self._queue = print_queue
self._running = False
threading.Thread.__init__(self)
def stop_thread(self):
'''Helper function to stop the thread'''
self._running = False
def run(self):
'''Worker thread'''
self._running = True
while self._running:
try:
my_string = self._queue.get(block=False, timeout=0.5)
print(my_string)
except queue.Empty:
pass
class PrintToQueue():
'''Print to a queue, if there is one'''
def __init__(self, print_queue, file_handle, include_timestamp=False):
self._queue = print_queue
self._file_handle = file_handle
self._include_timestamp = include_timestamp
def string(self, string, file_only=False):
'''Print a string'''
if self._include_timestamp:
string = strftime(TIME_FORMAT, gmtime()) + " " + string
if not file_only:
if self._queue:
self._queue.put(string)
else:
print(string)
if self._file_handle:
self._file_handle.write(string + "\n")
self._file_handle.flush()
# This stolen from here:
# https://stackoverflow.com/questions/431684/how-do-i-change-the-working-directory-in-python
class ChangeDir():
'''Context manager for changing the current working directory'''
def __init__(self, new_path):
self._new_path = os.path.expanduser(new_path)
self._saved_path = None
def __enter__(self):
'''CD to new_path'''
self._saved_path = os.getcwd()
os.chdir(self._new_path)
def __exit__(self, etype, value, traceback):
'''CD back to saved_path'''
os.chdir(self._saved_path)
class Lock():
'''Hold a lock as a "with:"'''
def __init__(self, lock, guard_time_seconds,
lock_type, printer, prompt):
self._lock = lock
self._guard_time_seconds = guard_time_seconds
self._lock_type = lock_type
self._printer = printer
self._prompt = prompt
self._locked = False
def __enter__(self):
if not self._lock:
return True
# Wait on the lock
if not self._locked:
timeout_seconds = self._guard_time_seconds
self._printer.string("{}waiting up to {} second(s)" \
" for a {} lock...". \
format(self._prompt,
self._guard_time_seconds,
self._lock_type))
count = 0
while not self._lock.acquire(False) and \
((self._guard_time_seconds == 0) or (timeout_seconds > 0)):
sleep(1)
timeout_seconds -= 1
count += 1
if count == 30:
self._printer.string("{}still waiting {} second(s)" \
" for a {} lock (locker is" \
" currently {}).". \
format(self._prompt, timeout_seconds,
self._lock_type, self._lock))
count = 0
if (self._guard_time_seconds == 0) or (timeout_seconds > 0):
self._locked = True
self._printer.string("{}{} lock acquired ({}).". \
format(self._prompt, self._lock_type,
self._lock))
return self._locked
def __exit__(self, _type, value, traceback):
del _type
del value
del traceback
if self._lock and self._locked:
try:
self._lock.release()
self._locked = False
self._printer.string("{}released a {} lock.".format(self._prompt,
self._lock_type))
except RuntimeError:
self._locked = False
self._printer.string("{}{} lock was already released.". \
format(self._prompt, self._lock_type))
def wait_for_completion(list, purpose, guard_time_seconds,
printer, prompt):
'''Wait for a completion list to empty'''
completed = False
if len(list) > 0:
timeout_seconds = guard_time_seconds
printer.string("{}waiting up to {} second(s)" \
" for {} completion...". \
format(prompt, guard_time_seconds, purpose))
count = 0
while (len(list) > 0) and \
((guard_time_seconds == 0) or (timeout_seconds > 0)):
sleep(1)
timeout_seconds -= 1
count += 1
if count == 30:
list_text = ""
for item in list:
if list_text:
list_text += ", "
list_text += str(item)
printer.string("{}still waiting {} second(s)" \
" for {} to complete (waiting" \
" for {}).". \
format(prompt, timeout_seconds,
purpose, list_text))
count = 0
if len(list) == 0:
completed = True
printer.string("{}{} completed.".format(prompt, purpose))
return completed
def reset_nrf_target(connection, printer, prompt):
'''Reset a Nordic NRFxxx target'''
call_list = []
printer.string("{}resetting target...".format(prompt))
# Assemble the call list
call_list.append("nrfjprog")
call_list.append("--reset")
if connection and "debugger" in connection and connection["debugger"]:
call_list.append("-s")
call_list.append(connection["debugger"])
# Print what we're gonna do
tmp = ""
for item in call_list:
tmp += " " + item
printer.string("{}in directory {} calling{}". \
format(prompt, os.getcwd(), tmp))
# Call it
return exe_run(call_list, 60, printer, prompt)
| 42.930101 | 120 | 0.536437 | 10,699 | 0.280969 | 0 | 0 | 0 | 0 | 0 | 0 | 10,774 | 0.282938 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.