anchor
stringlengths
2
528
positive
stringlengths
4
6k
negative
stringlengths
6
6k
์ž์œ  ์—ฌ๋ฆ„์˜ ๋ชฉ์ ์€ ๋ฌด์—‡์ด์—ˆ์Šต๋‹ˆ๊นŒ?
์ž์œ  ์—ฌ๋ฆ„์€ ๋˜ํ•œ ์ „๊ตญ ๊ตํšŒ ํ˜‘์˜ํšŒ์˜ ์ง€์›์„ ๋ฐ›์•˜์œผ๋ฉฐ, ์—ฌ๋ฆ„ ๋™์•ˆ ์ธ๊ถŒ์„ ์œ„ํ•œ ์˜๋ฃŒ ์œ„์›ํšŒ์˜ ์ž์›๋ด‰์‚ฌ์ž๋“ค๊ณผ ๋‹ค์–‘ํ•œ ๋‹จ์ฒด์˜ ๋ณ€ํ˜ธ์‚ฌ๋“ค์ด ๋ฏธ์‹œ์‹œํ”ผ์—์„œ ํ™œ๋™ํ–ˆ๋‹ค. ์ž์œ  ์—ฌ๋ฆ„์˜ ์žฅ๊ธฐ์ ์ธ ๋ชฉํ‘œ๋Š” ๋ฏธ์‹œ์‹œํ”ผ์˜ ๊ถŒ๋ ฅ ๊ตฌ์กฐ๋ฅผ ๋ณ€ํ™”์‹œํ‚ค๋Š” ๊ฒƒ์ด์—ˆ๋‹ค.
์ž์œ ์˜ ์—ฌ๋ฆ„์€ ๋˜ํ•œ ์ „๊ตญ ๊ตํšŒ ํ˜‘์˜ํšŒ์˜ ์ง€์›์„ ๋ฐ›์•˜์œผ๋ฉฐ, ์—ฌ๋ฆ„ ๋™์•ˆ ์ธ๊ถŒ์„ ์œ„ํ•œ ์˜๋ฃŒ ์œ„์›ํšŒ์˜ ์ž์›๋ด‰์‚ฌ์ž๋“ค๊ณผ ๋‹ค์–‘ํ•œ ๋‹จ์ฒด์˜ ๋ณ€ํ˜ธ์‚ฌ๋“ค์ด ๋ฏธ์‹œ์‹œํ”ผ์—์„œ ํ™œ๋™ํ–ˆ์Šต๋‹ˆ๋‹ค. ์ž์œ ์˜ ์—ฌ๋ฆ„์˜ ์žฅ๊ธฐ์ ์ธ ๋ชฉํ‘œ๋Š” ๋ฏธ์‹œ์‹œํ”ผ์˜ ๊ถŒ๋ ฅ ๊ตฌ์กฐ๋ฅผ ๋ณ€ํ™”์‹œํ‚ค๋Š” ๊ฒƒ์ด์—ˆ์Šต๋‹ˆ๋‹ค.
End do_enable; called after state changes but before command acknowledged. This method connects to the HVAC server.
async def end_enable(self, id_data: salobj.BaseDdsDataType) -> None: if not self.connected: await self.connect() await super().end_enable(id_data)
def on_enable(self): self.i_err = 0
Function returns a list of creation datetimes for the files in the list of jpg files paths
def get_creation_times(list_of_jpg): list_of_creation_times = [] for jpg in list_of_jpg: try: image = Image.open(jpg) exif = image.getexif() creation_time = exif.get(36867) except: creation_time = None list_of_creation_times.append(creation_time) return list_of_creation_times
def image_timestamp_list(self) -> Dict[str, List[int]]: assert self.image_list is not None assert self._image_list is not None if self._image_timestamp_list is None: self._image_timestamp_list = {} for log in self.log_list: self._image_timestamp_list[log] = {} for camera in CAMERA_LIST: self._image_timestamp_list[log][camera] = [ int(x.split("/")[-1][:-4].split("_")[-1]) for x in self._image_list[log][camera] ] return self._image_timestamp_list[self.current_log]
Add a new snapshot node. It adds an entry in self.snapshotsCached and creates a new Checkbox object
def __addNewSnapshot__(self, snapshotNode): nodeID = snapshotNode.GetID() print("Added new node " + nodeID) name = snapshotNode.GetName() description = snapshotNode.GetSnapshotDescription() ckb = qt.QCheckBox() ckb.checked = True ckb.text = name ckb.toolTip = "%s. Uploaded to Picasa: NO" % description # Add the checkbox to the layout self.currentSnapshotsInnerLayout.addWidget(ckb) # Add a new snapshot node to the cached collection (Name, Description, Uploaded, Widget) self.snapshotsCached[nodeID] = [name, description, False, ckb] # Add an observer in case the node is modified (example: renamed) self.__addModifiedObserver__(snapshotNode) # Remove no items label if visible self.noItemsLabel.hide()
def push_snapshot(self):
Performs a commit A commit of known offsets for read partitions And subscription positions for untouched partitions
async def _do_commit(self): offsets = self._subscriptions.subscription \ .assignment.all_consumed_offsets() offsets.update(self._offsets) if offsets: await self._consumer.commit(offsets)
def _do_update_update(self, data, offset): assert IMutableUploadable.providedBy(data) assert self.is_mutable() # offset == self.get_size() is valid and means that we are # appending data to the file. assert offset <= self.get_size() segsize = self._version[3] # We'll need the segment that the data starts in, regardless of # what we'll do later. start_segment = offset // segsize # We only need the end segment if the data we append does not go # beyond the current end-of-file. end_segment = start_segment if offset + data.get_size() < self.get_size(): end_data = offset + data.get_size() # The last byte we touch is the end_data'th byte, which is actually # byte end_data - 1 because bytes are zero-indexed. end_data -= 1 end_segment = end_data // segsize self._start_segment = start_segment self._end_segment = end_segment # Now ask for the servermap to be updated in MODE_WRITE with # this update range. return self._update_servermap(update_range=(start_segment, end_segment))
Return a pandas dataframe Goes through all the events and tallies up the points of the students.
def parse_sheets(event_attendance, member_list): attendance = pd.DataFrame() #parse_bonus_points(attendance, member_list) attendance["Name:"] = member_list["First Name"].str.strip() + " " + member_list["Last Name"].str.strip() attendance["Total Spark Points:"] = 0 print(attendance) print(member_list) #Go through all events in the folder and check if the person attended. If yes, add points for index, member in attendance.iterrows(): #print(event_attendance) #print(member) name = str(member["Name:"].strip()).lower() valid = student_list.get(name, "NA") #Add a new student to the dictionary of student_list if not already there print(valid) if(valid == 'NA'): print("Added student " + name) student_list[name.lower()] = {'EID' : 0, 'Points' : 0} for event in event_attendance: eventdata = list(event.columns) event_name = eventdata[-1] #print(event_name) points_to_add = event.iat[0, len(eventdata)-1] attendance.at[index, event_name] = 0 #If the person is in the database and at the meeting, sum up each person's spark points and add to their total if check_attendance(member["Name:"].lower(), event): attendance.at[index, event_name] += points_to_add attendance.at[index, "Total Spark Points:"] += points_to_add #parse_shop() return attendance
def frame_info_to_df(obj_info_aggregated, frame_ind, camera_id, date_time): frame_df = pd.DataFrame(obj_info_aggregated, columns=[ 'obj_bounds', 'obj_classification', 'confidence']) frame_df["frame_id"] = frame_ind frame_df["camera_id"] = camera_id frame_df["video_upload_datetime"] = date_time return frame_df
์•„๋“œ๋ ˆ๋ž„์ด ์ธ๊ฐ„์—๊ฒŒ ๋ฏธ์น  ์ˆ˜ ์žˆ๋Š” ์˜ํ–ฅ์€ ๋ฌด์—‡์ธ๊ฐ€?
์•„๋“œ๋ ˆ๋ž„์€ ์•”ํŽ˜ํƒ€๋ฏผ์œผ๋กœ, ์ฃผ์˜๋ ฅ ๊ฒฐํ• ๊ณผ๋‹ค ํ–‰๋™ ์žฅ์• (ADHD)์™€ ๊ฐ™์€ ์žฅ์• ๋ฅผ ์น˜๋ฃŒํ•˜๋Š” ๋ฐ ์‚ฌ์šฉ๋˜๋Š” ์ค‘์ถ” ์‹ ๊ฒฝ๊ณ„ ์ž๊ทน์ œ์ž…๋‹ˆ๋‹ค. ๋ฏธ๊ตญ์—์„œ ์ผ๋ฐ˜์ ์œผ๋กœ ์‚ฌ์šฉ๋˜๊ณ  ์ฒ˜๋ฐฉ๋˜๋Š” ์•”ํŽ˜ํƒ€๋ฏผ์—๋Š” ๋ฑ์ŠคํŠธ๋กœ์•”ํŽ˜ํƒ€๋ฏผ์ด ํฌํ•จ๋˜์–ด ์žˆ์Šต๋‹ˆ๋‹ค. ์•„๋“œ๋ ˆ๋ž„์€ ์•”ํŽ˜ํƒ€๋ฏผ๊ณผ ๋ฑ์ŠคํŠธ๋กœ์•”ํŽ˜ํƒ€๋ฏผ์˜ ์กฐํ•ฉ์œผ๋กœ, ADHD์™€ ๊ธฐ๋ฉด์ฆ ์น˜๋ฃŒ์— ๋„๋ฆฌ ์ฒ˜๋ฐฉ๋˜๋Š” ์˜์•ฝํ’ˆ์ž…๋‹ˆ๋‹ค. ์•„๋“œ๋ ˆ๋ž„์ด ํƒˆ๋ชจ๋ฅผ ์œ ๋ฐœํ•ฉ๋‹ˆ๊นŒ? ๊ทธ๋ ‡๋‹ค๋ฉด ์–ด๋–ป๊ฒŒ ์œ ๋ฐœํ•ฉ๋‹ˆ๊นŒ? ์ผ๋ฐ˜์ ์œผ๋กœ ์•”ํŽ˜ํƒ€๋ฏผ์€ ์ธ์ฒด์—์„œ ์ž˜ ๊ฒฌ๋””์ง€๋งŒ ์ผ๋ถ€ ๋ถ€์ž‘์šฉ์ด ์žˆ์„ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์žฅ๊ธฐ๊ฐ„ ์‚ฌ์šฉ ๋ฐ ์ค‘๋… ์‹œ ๋ถ€์ž‘์šฉ์ด ํ›จ์”ฌ ๋” ์ปค์ง€๋ฉฐ, ์—ฌ๊ธฐ์—๋Š” ๋ชจ๋ฐœ์ด ๊ฐ€๋Š˜์–ด์ง€๊ฑฐ๋‚˜ ํƒˆ๋ชจ๊ฐ€ ํฌํ•จ๋ฉ๋‹ˆ๋‹ค. ํƒˆ๋ชจ๋Š” ์ผ๋ฐ˜์ ์œผ๋กœ ๋‘ํ”ผ ์ „๋ฐ˜์— ๊ฑธ์ณ ํผ์ ธ ์žˆ์œผ๋ฉฐ ํŠน์ •ํ•œ ํ•œ ์ง€์—ญ์—๋งŒ ์ง‘์ค‘๋˜์ง€ ์•Š์Šต๋‹ˆ๋‹ค.
์• ๋“œ๋ ˆ๋ž„์—๋Š” ์ธ๊ฐ„ ์„ฑ์žฅ ํ˜ธ๋ฅด๋ชฌ์ด ํฌํ•จ๋˜์–ด ์žˆ์Šต๋‹ˆ๊นŒ? ์• ๋“œ๋ ˆ๋ž„๊ณผ ์ธ๊ฐ„ ์„ฑ์žฅ ํ˜ธ๋ฅด๋ชฌ์„ ํ•จ๊ป˜ ๋ณต์šฉํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๊นŒ? Treato์—์„œ ์• ๋“œ๋ ˆ๋ž„๊ณผ ์ธ๊ฐ„ ์„ฑ์žฅ ํ˜ธ๋ฅด๋ชฌ์— ๋Œ€ํ•œ 5๊ฐ€์ง€ ๋…ผ์˜ ๋‚ด ๋ถ๋งˆํฌ
์ „์„ค, ์†, ์ธ์–ด, ๋Œ€๋ถ€๋ถ„, ์‚ฌ๋žŒ, ์กด์žฌ, ๋ฌด์„ญ๋‹ค
์ „์„ค ์† ์ธ์–ด๋Š” ๋Œ€๋ถ€๋ถ„ ์ฃ„ ์—†๋Š” ์‚ฌ๋žŒ์„ ํ•ด์น˜๋Š” ๋ฌด์„œ์šด ์กด์žฌ์ด๋‹ค.
์˜›๋‚  ๋‹ค๊ณผ์ƒ ์ฐจ๋ฆผ์€ ๋…น์ฐจ, ๋ฌด์ง€๊ฐœ๋–ก, ๋ฐค๋‹ค์‹, ์ƒ๋ž€, ์œจ๋ž€, ์กฐ๋ž€ ๋“ฑ์ด๋‹ค.
Retrieves the pixel value from image Will keep requested pixel values within the bounds of the image
def get_pixel(image, x, y): y = y*image['width'] #Necessary to multiply by width for correct indexing in a list if x < 0: x = 0 if y < 0: y = 0 if x > image['width']-1: x = image['width']-1 if y > (image['height']-1)*image['width']: y = (image['height']-1)*image['width'] return image['pixels'][x+y]
def get_input(self, idx): img_filename = os.path.join( self.data_dir, self._input_array[idx]) x = Image.open(img_filename).convert('RGB') return x
Loads a specific dataset file from the csv. Split refers to the EMNIST split in the dataset.
def load_split(self, split, stage): if not split in self.splits: print(split, "is not a valid EMNIST split") return if not stage in self.stages: print(stage, "is not a valid stage") return csv_filename = self.data_folder + "/emnist-" + split + "-" + stage + ".csv" print("Loading", csv_filename) data_frame = pd.read_csv(csv_filename, header=None, dtype=np.int8) tensor_x = torch.Tensor(data_frame.iloc[:,1:].values.reshape((-1, 1, 28, 28))).float() # Load the images tensor_y = torch.Tensor(data_frame.iloc[:,0].values).long() # Load the labels return torch.utils.data.TensorDataset(tensor_x, tensor_y)
def load_data(self): data_frame = pd.read_csv(self.URL, encoding='ISO-8859-1') data_frame = data_frame[['cont_africa', 'rugged', 'rgdppc_2000']] data_frame.rgdppc_2000 = np.log(data_frame.rgdppc_2000) data_frame = data_frame[np.isfinite(data_frame.rgdppc_2000)] datasets = [] for flag in [0, 1]: split = data_frame[data_frame.cont_africa == flag] x, y = split.rugged.to_numpy(), split.rgdppc_2000.to_numpy() datasets.append([x, y]) return datasets
Create a new variable in the model, annotated with the given term, and in the given units.
def _create_annotated_variable(self, prefixed_name, units): #1903 TODO: Be more careful to create unique local names and ids prefix, local_name = prefixed_name.split(':') var = self.add_variable(self._get_protocol_component(), local_name, units, id=prefix + '_' + local_name) var.add_rdf_annotation(('bqbiol:is', NSS['bqbiol']), (prefixed_name, self._protocol_namespaces[prefix])) return var
def add_term(self, term, cursor=None, loci_cursor=None): if not cursor: cur = self.m80.db.cursor() cur.execute("BEGIN TRANSACTION") else: cur = cursor if not loci_cursor: lcur = self.loci.m80.db.cursor() lcur.execute("BEGIN TRANSACTION") else: lcur = loci_cursor # Add the term id and description cur.execute( """ INSERT OR ABORT INTO terms (name, desc) VALUES (?, ?)""", (term.name, term.desc), ) (TID,) = cur.execute("SELECT last_insert_rowid()").fetchone() if TID is None: # pragma: no cover # I dont know when this would happen without another exception being thrown raise ValueError(f"{term} was not assigned a valid TID!") for key, val in term.attrs.items(): cur.executemany(""" INSERT INTO term_attrs (TID, key, val) VALUES (?,?,?) """, ((TID,key,xval) for xval in val) ) # separate the new loci from the existing loci new_LIDs = [] existing_LIDs = [] for l in term.loci: try: existing_LIDs.append(self.loci._get_LID(l)) except MissingLocusError: new_LIDs.append(self.loci.add_locus(l, cur=lcur)) for LID in new_LIDs + existing_LIDs: cur.execute( """ INSERT INTO term_loci (TID,LID) VALUES (?,?) """, (TID, LID), ) if not cursor: cur.execute("END TRANSACTION")
Attaches to the gdbserver, running locally or portforwarded. If |remote_address| is set, it is used for ssh.
def _attach_bare_metal_gdb( remote_address, plugin_pid, ssh_options, nacl_helper_nonsfi_path, gdb_type): gdb_port = _get_bare_metal_gdb_port(plugin_pid) # Before launching 'gdb', we wait for that the target port is opened. _wait_by_busy_loop( lambda: _is_remote_port_open(_LOCAL_HOST, gdb_port)) gdb_args = [] if nacl_helper_nonsfi_path: gdb_args.append(nacl_helper_nonsfi_path) gdb_args.extend([ '-ex', 'target remote %s:%d' % (_LOCAL_HOST, gdb_port)]) gdb_args.extend(get_gdb_python_init_args()) library_path = os.path.abspath(build_common.get_load_library_path()) gdb_args.extend(get_gdb_python_script_init_args( 'bare_metal_support', arc_nexe=os.path.join( library_path, os.path.basename(build_common.get_runtime_main_nexe())), library_path=library_path, runnable_ld_path=os.path.join(library_path, 'runnable-ld.so'), lock_file=os.path.join(_BARE_METAL_GDB_LOCK_DIR, str(plugin_pid)), remote_address=remote_address, ssh_options=ssh_options)) gdb_args.extend(['-ex', r'echo To start: c or cont\n']) _launch_plugin_gdb(gdb_args, gdb_type)
def create_server_socket(self, local_address, peer, remote_address): self.server_sockets[local_address] = server_socket(self, local_address, peer, remote_address) self.server_sockets[local_address].run_thread.start()
ํ˜ธ์ฃผ๋Š” ์ž„์‹œ์ง ์‹œ๊ฐ„์ œ ๊ณต๋ฌด์›์—๊ฒŒ ๋ณธ๋ด‰ ์™ธ์˜ ๋ณด์ˆ˜๋ฅผ ์–ด๋–ป๊ฒŒ ์ฃผ๊ณ  ์žˆ์ง€
3) ํ˜ธ์ฃผ ํ˜ธ์ฃผ์˜ ๊ณต๋ฌด์› ์ž„์šฉ์€ ์ •๊ทœ์ง ์ „์ผ์ œ, ์ •๊ทœ์ง ์‹œ๊ฐ„์ œ ๊ณต๋ฌด์›, ์ž„์‹œ์ง ์ „์ผ์ œ, ์ž„์‹œ์ง ์‹œ๊ฐ„์ œ ๋“ฑ 4๊ฐ€์ง€๋กœ ๋ถ„๋ฅ˜๋œ๋‹ค. ์ „์ผ์ œ๋Š” ์ฃผ 35์‹œ๊ฐ„ ์ด์ƒ, ์‹œ๊ฐ„์ œ๋Š” ์ฃผ 35์‹œ๊ฐ„ ๋ฏธ๋งŒ ๊ทผ๋ฌดํ•˜๋Š” ๊ฒƒ์„ ์›์น™์œผ๋กœ ํ•˜๋ฉฐ, ์ •๊ทœ์ง ์‹œ๊ฐ„์ œ ๊ณต๋ฌด์›์˜ ๊ฒฝ์šฐ ๊ธ‰์—ฌ, ์ˆ˜๋‹น, ๋ณต์ง€ํ›„์ƒ, ํœด๊ฐ€ ๋“ฑ ์ •๊ทœ์ง ์ „์ผ์ œ๊ณต๋ฌด์›๊ณผ ๋™๋“ฑํ•˜๋‹ค. ์ฃผ๋‹น๊ทผ๋ฌด์‹œ๊ฐ„์ด ์ ์œผ๋ฏ€๋กœ ๊ธ‰์—ฌ๋‚˜ ์ˆ˜๋‹น ๋“ฑ์ด ์ „์ผ์ œ์— ๋น„ํ•ด ์ ๋‹ค. ์ž„์‹œ์ง ์‹œ๊ฐ„์ œ๊ณต๋ฌด์›์˜ ๊ฒฝ์šฐ ๋‹ค๋ฅธ ์ •๊ทœ์ง(์ „์ผ์ œ, ์‹œ๊ฐ„์ œ ํฌํ•จ) ๊ณต๋ฌด์›๊ณผ ์ž„์‹œ์ง ์ „์ผ์ œ๊ณต๋ฌด์›์˜ ์ž„๊ธˆ, ์ˆ˜๋‹น, ํœด๊ฐ€, ๊ณตํœด์ผ ๊ทœ์ •์ด ์ ์šฉ๋˜์ง€ ์•Š๋Š”๋‹ค. ๊ธฐ๋ณธ๊ธ‰ ์ด์™ธ์˜ ์ˆ˜๋‹น์ด ์ง€๊ธ‰๋  ๊ฒฝ์šฐ ๊ธฐ๋ณธ๊ธ‰์˜ 15% ๋‚ด์™ธ๋กœ ์ง€๊ธ‰๋œ๋‹ค. ์ „์ฒด ๊ณต๋ฌด์›์˜ 14%๊ฐ€ ์‹œ๊ฐ„์ œ๊ณต๋ฌด์›์ธ ๊ฒƒ์œผ๋กœ ๋‚˜ํƒ€๋‚˜๊ณ  ์žˆ๋‹ค.
โ–ก ๋˜ํ•œ ๊ณ ๊ฐ์œผ๋กœ๋ถ€ํ„ฐ ๋ณด์ˆ˜๋ฅผ ์ˆ˜์ทจํ•  ๋•Œ์—๋„ ์ž๋ฌธ์„œ๋น„์Šค๋ฅผ ์ œ๊ณตํ•˜๊ธฐ ์ด์ „์— ๋ณด์ƒ๊ตฌ์กฐ๋ฅผ ๋ช…์‹œํ•˜๋„๋ก ํ•˜๊ณ  ์ด๋ฅผ ๋ฐ”ํƒ•์œผ๋กœ ๊ณ ๊ฐ๊ณผ ํ˜‘์ƒํ•˜์—ฌ ์ž๋ฌธ๋ณด์ˆ˜๋ฅผ ๊ฒฐ์ •ํ•˜๋„๋ก ํ•จ. โ—‹ ์˜๊ตญ์˜ ๊ฒฝ์šฐ services and costs disclosure document๋‚˜ combined initial disclosure document ์ค‘ ์ ์ ˆํ•œ ๊ฒƒ์„ ์„ ํƒํ•˜์—ฌ ์„œ๋น„์Šค ์ œ๊ณต์ด์ „์— ๊ณ ๊ฐ์—๊ฒŒ ์ œ์‹œํ•˜๋„๋ก ํ•˜๊ณ  ์žˆ์Œ. โ—‹ ๋˜ํ•œ ์˜๊ตญ์€ ์ด์ž๋ฌธ๋น„์šฉ ๊ณต์‹œ์— ๋Œ€ํ•œ ์ค€์น™์„ ์ ํ•ฉ์„ฑ ๋ณด๊ณ ์„œ์— ์ œ๊ณตํ•˜๋„๋ก ๊ทœ์ œํ•˜๊ณ  ์žˆ์Œ. โ—‹ ๊ณ ๊ฐ์—๊ฒŒ ์ œ์‹œํ•œ ๋ณด์ƒ๊ตฌ์กฐ ์™ธ์˜ ์—ฌํƒ€ ๋น„์šฉ์„ ๋ถ€๊ณผํ•  ์ˆ˜ ์—†์œผ๋ฉฐ ๋ณด์ƒ๊ตฌ์กฐ๋ฅผ ์„ค๋ช…ํ•˜๋Š” ๋‚ด์šฉ์— ๊ฑฐ์ง“์ด ์—†์–ด์•ผ ํ•œ๋‹ค๋Š” ์ ๋„ ๊ทœ์ • โ—‹ ํ˜ธ์ฃผ์˜ ๊ฒฝ์šฐ์—๋„ ์ˆ˜์ˆ˜๋ฃŒ๋ฅผ ํฌํ•จํ•œ ๋ณด์ˆ˜๋‚˜ ๊ธˆ์ „์  ์ด์ต์— ๋Œ€ํ•œ ์ •๋ณด ๋“ฑ์„ Financial service guides(๊ณ ๊ฐ์ด ์ž๋ฌธ์—…์ž๋ฅผ ์„ ํƒํ•  ๋•Œ ์ œ๊ณต๋ฐ›๋Š” ์ž๋ฃŒ), Statement of Advice(์ž๋ฌธ์ด ์ œ๊ณต๋  ๋•Œ ์ œ๊ณต ๋ฐ›๋Š” ์ž๋ฃŒ)๋“ฑ์— ๊ณต์‹œํ•˜๋„๋ก ํ•จ. โ–ก ๋‚˜์•„๊ฐ€ ์œ ์ง€๊ด€๋ฆฌ์™€ ๊ด€๋ จ๋œ ๋ณด์ˆ˜์˜ ๊ฒฝ์šฐ, ์ž๋ฌธ์—…์ž๊ฐ€ ์„œ๋น„์Šค๋Š” ์ œ๋Œ€๋กœ ์ œ๊ณตํ•˜์ง€ ์•Š์œผ๋ฉด์„œ ๊ด€๋ จ ๋ณด์ˆ˜๋งŒ์„ ์ˆ˜์ทจํ•˜๋Š” ๊ฒฝ์šฐ๊ฐ€ ๋ฐœ์ƒํ•˜์ง€ ์•Š๋„๋ก ๊ด€๋ จ ๋‚ด์šฉ์„ ๋ช…ํ™•ํžˆ ๊ณต์‹œํ•˜๊ณ  ์„ค๋ช…ํ•˜๋„๋ก ๊ทœ์ •ํ•จ. โ—‹ ์˜๊ตญ์˜ ๊ฒฝ์šฐ ์œ ์ง€๊ด€๋ฆฌ ๊ด€๋ จ ๋ณด์ˆ˜(On going fee)๋ฅผ ์ƒ์‹œ์ ์œผ๋กœ ์ˆ˜์ทจํ•˜๋ ค๋ฉด ๊ณ ๊ฐ์—๊ฒŒ ์ œ๊ณต๋˜๋Š” ์„œ๋น„์Šค์˜ ์„ธ๋ถ€๋‚ด์—ญ๊ณผ ๊ด€๋ จ ์ˆ˜์ˆ˜๋ฃŒ๋ฅผ ๋งค์นญํ•˜์—ฌ ์•Œ๋ ค์ฃผ๊ณ  ๋งŒ์•ฝ ๊ณ ๊ฐ์ด ์„œ๋น„์Šค๋ฅผ ์ค‘๋„์— ์ทจ์†Œํ•˜๊ธธ ์›ํ•œ๋‹ค๋ฉด ์ง€๋ถˆ์„ ์ค‘๋‹จํ•  ์ˆ˜ ์žˆ๋Š” ๋ฐฉ๋ฒ•๋„ ์•Œ๋ ค์ฃผ๋„๋ก ๊ทœ์ œํ•จ. โ—‹ ๋˜ํ•œ ์œ ์ง€๊ด€๋ฆฌ๊ด€๋ จ ๋ณด์ˆ˜๊ฐ€ ์šด์šฉํŽ€๋“œ์˜ ์ผ์ • ๋น„์œจ๋กœ ๊ณ„์‚ฐํ•˜์—ฌ ์ˆ˜์ทจํ•˜๋Š” ๊ฒฝ์šฐ์—๋Š” ํŽ€๋“œ๊ธˆ์•ก์ด ์ฆ๊ฐ€ํ•จ์— ๋”ฐ๋ผ ๊ด€๋ จ ์ง€๋ถˆ๊ธˆ์•ก ์—ญ์‹œ์ฆ๊ฐ€ํ•  ์ˆ˜ ์žˆ๋‹ค๋Š” ์ ์„ ๊ณ ๊ฐ์—๊ฒŒ ๋ช…์‹œ์ ์œผ๋กœ ์•Œ๋ ค์ฃผ๋„๋ก ํ•จ. โ—‹ ํ˜ธ์ฃผ์˜ ๊ฒฝ์šฐ์—๋„ ์˜๊ตญ๊ณผ ์œ ์‚ฌํ•˜๊ฒŒ ์ œ๊ณต๋ฐ›๋Š” ์„œ๋น„์Šค์˜ ์ข…๋ฅ˜๋ฅผ ๋ช…์‹œํ•˜๊ณ  ์ทจ์†Œ๋ฐฉ๋ฒ• ๋“ฑ์„ ์•Œ๋ ค์ฃผ๋„๋ก ๊ทœ์ œํ•˜๊ณ  ์žˆ์Œ.
Sort the book based on the price (highest to lowest) and then the timestamp (earliest to latest).
def sort_orders(self): self.orders = sorted(self.orders, key=attrgetter('timestamp')) self.orders = sorted(self.orders, key=attrgetter('price'), reverse=True) #self.orders = sorted(self.orders, key=lambda order: order.timestamp) #print('<><><> BOOK AFTER ADD ORDER <><><>') #self.show_book() # print the book # size of order book after order added self.total_size_post_order = self.check_size() # what's the prize of the book (up to target_price shares) after order self.total_potential_price_post_trade = self.check_total_price()
def sortByRating(self): try: self.items.sort(key=lambda x: float(x[3]), reverse=False) except: pass
๊ธฐํš์žฌ์ •๋ถ€๊ฐ€ ๋‹ฌ๊ฑ€๋ฅ˜ 8๊ฐ€์ง€ ํ•ญ๋ชฉ์— ๊ด€ํ•œ ํ• ๋‹น๊ด€์„ธ ๊ฐœ์ •์•ˆ์„ ๊ฒฐ์ •ํ•œ ๊ฑด ์–ธ์ œ์•ผ
๊ธฐ์žฌ๋ถ€๋Š” ๊ณ„๋ž€๋ฅ˜ 8๊ฐœ ํ’ˆ๋ชฉ 3๋งŒ6,000ํ†ค์— ๋Œ€ํ•œ ํ• ๋‹น๊ด€์„ธ ๊ทœ์ • ๊ฐœ์ •์•ˆ๋„ ์ด๋‚  ์˜๊ฒฐํ–ˆ๋‹ค.
ํ•˜์ง€๋งŒ ๊ฐœ์ •์•ˆ์ด ํ†ต๊ณผ๋ผ๋„ ์ผ๋Ÿฌ์•ผ 9์›”์—๋‚˜ ์‹œํ–‰ํ•  ์ˆ˜ ์žˆ๋Š” ๋ฐ๋‹ค, ์˜ˆ์‚ฐ์„ ์ฅ” ๊ธฐํš์žฌ์ •๋ถ€์˜ ๊ฐ•๋ ฅํ•œ ๋ฐ˜๋Œ€๊ฐ€ ์—ฌ์ „ํžˆ ๊ฑธ๋ฆผ๋Œ์ด๋‹ค.
๊ด‘์ „๋ฅ˜ ๋ด‰์šฐ๋ฆฌ๋ฅผ ์„ธ ๊ตฐ๋ฐ์—์„œ ๊ด€์ธกํ•  ์ˆ˜ ์žˆ๋Š” ์ด์œ ๊ฐ€ ๋ญ์•ผ?
๊ด‘์ „๋ฅ˜ ๋ด‰์šฐ๋ฆฌ๋Š” ์„ธ ๊ณณ์—์„œ ๊ด€์ธกํ•  ์ˆ˜ ์žˆ๋Š”๋ฐ ๊ทธ ์ด์œ ๋Š” \( \mathrm{CdGa}_{2} \mathrm{Se}_{4} \) ๋‹จ๊ฒฐ์ • ๋ฐ•๋ง‰์€ ์œก๋ฐฉ์ •๊ณ„ ๊ตฌ์กฐ๋กœ ์„ฑ์žฅ๋˜์–ด spin-orbit splitting๊ณผ non cubic crystalline field ์˜ ๋™์‹œํšจ๊ณผ์— ์˜ํ•˜์—ฌ band splitting ์ด ์ผ์–ด๋‚œ ๊ฒƒ์œผ๋กœ ๋ณผ ์ˆ˜ ์žˆ๋‹ค.
๊ด‘์ „๋ฅ˜ ์ŠคํŽ™ํŠธ๋Ÿผ์˜ ์ธก์ • ์—์„œ ์—๋„ˆ์ง€ ๊ฐญ์— ํ•ด๋‹น๋˜๋Š” ๊ฐ€์ „์ž๋Œ€์—์„œ ์ „๋„๋Œ€๋กœ ๋“ค๋œฌ ์ „์ž๋“ค์— ์˜ํ•œ ๊ด‘์ „๋ฅ˜ ๋ด‰์šฐ๋ฆฌ๋“ค๊ณผ ๋‹จํŒŒ์žฅ๋Œ€์—์„œ ๊ฐ€ ์ „์ž๋Œ€ splitting์— ์˜ํ•œ ๊ด‘์ „๋ฅ˜ ๋ด‰์šฐ๋ฆฌ๋“ค์ด ๊ด€์ธก๋˜์—ˆ๋‹ค.
์œ ๋Ÿฝ๋ถ€์–‘๊ทœ์•ฝ์€ ๊ฐœ์ธ์ •๋ณด ๋ณดํ˜ธ์˜ ์ด์ต์„ ์–ด๋–ค ๋ฐฉ์‹์œผ๋กœ ๊ฐ์•ˆํ•˜์˜€์–ด
์œ ๋Ÿฝ๋ถ€์–‘๊ทœ์•ฝ์€ ํ•œํŽธ์œผ๋กœ๋Š”, ์ค‘์•™๋‹น๊ตญ์ด ๋งˆ๋ จํ•ด ๋‘” ์ •๋ณด๋งŒ์„ ์ œ๊ณตํ•˜๋„๋ก ํ•˜๊ณ , ๋‹ค๋ฅธ ํ•œํŽธ์œผ๋กœ๋Š” ์ œ๊ณตํ•  ์ •๋ณด๋ฅผ ์ฃผ์†Œ, ์†Œ๋“, ์‚ฌ์šฉ์ž, ๊ณ„์ขŒ๋ณด์œ  ์—ฌ๋ถ€ ๋ฐ ์žฌ์‚ฐ ๋“ฑ์œผ๋กœ ์—ด๊ฑฐํ•˜๋Š” ๋ฐฉ์‹์„ ํ†ตํ•˜์—ฌ ๊ฐœ์ธ์ •๋ณด ๋ณดํ˜ธ์˜ ์ด์ต์„ ๊ณ ๋ คํ•˜์˜€๋‹ค. ์ด ๊ทœ์•ฝ์— ๋”ฐ๋ผ ๋…์ผ์€ ์™ธ๊ตญ์˜ ๋ถ€์–‘์ง‘ํ–‰๊ถŒ์›์— ๊ธฐํ•œ ๊ฐ•์ œ์ง‘ํ–‰๊ณผ ๊ด€๋ จํ•˜์—ฌ ๏ฝข๊ตญ์ œ ๋ถ€์–‘์ฒญ๊ตฌ๊ถŒ ํ–‰์‚ฌ์— ๊ด€ํ•œ๋ฒ•๋ฅ ๏ฝฃ ์ œ17์กฐ์— ์ค‘์•™๋‹น๊ตญ์˜ ์ •๋ณด์ฒญ๊ตฌ๊ถŒ๊ณผ ์ •๋ณด์˜๋ฌด๋ฅผ ๊ทœ์ •ํ•ด ๋†“๊ณ  ์žˆ๋‹ค. ์œ„ ๋ฒ•์ œ17์กฐ์˜ ๊ทœ์ • ๋‚ด์šฉ์€ ์ค‘์•™๋‹น๊ตญ์ด ๋ฒ•์  ์—ฐ๊ธˆ๋ณดํ—˜๊ธฐ๊ด€์— ํŠนํžˆ ๋‹น์‹œ์˜ ์‚ฌ์šฉ์ž์— ๊ด€ํ•œ ์ •๋ณด๋ฅผ ์กฐํšŒํ•  ์ˆ˜ ์žˆ๋„๋ก ํ•˜๊ณ , 4๋Œ€๋ณดํ—˜๊ธฐ๊ด€์— ๊ตฌ์ง์ž์˜ ๋…ธ๋ฌด์ œ๊ณต ํ˜„ํ™ฉ์„ ์กฐํšŒํ•˜๋„๋ก ํ•˜๋ฉฐ, ์—ฐ๋ฐฉ๊ตญ์„ธ์ฒญ์— ๊ตญ์„ธ๊ธฐ๋ณธ๋ฒ• ์ œ93b์กฐ ์ œ1ํ•ญ์˜ ๊ณ„์ขŒ์ •๋ณด๋ฅผ ์กฐํšŒํ•  ์ˆ˜ ์žˆ๋„๋ก ํ•˜๊ณ  ์žˆ๋‹ค. ์—ฌ๊ธฐ์— ์ž๋™์ฐจ์— ๋Œ€ํ•œ ์ •๋ณด๊นŒ์ง€ ๊ทœ์ •ํ•˜๊ณ  ์žˆ์–ด ๋Œ€์™ธ๋ถ€์–‘๋ฒ• ์ œ17์กฐ์˜ ๊ทœ์ •์€ ๋ฏผ์‚ฌ์†Œ์†ก๋ฒ•์ƒ์˜ ์žฌ์‚ฐ์กฐํšŒ์ œ๋„์™€ ์œ ์‚ฌํ•˜๋‹ค.
2. ์˜๊ตญ ๏ฝข์ต๋ช…ํ™” ์‹ค์ฒœ๊ทœ์•ฝ๏ฝฃ์˜ ์ฃผ์š”๋‚ด์šฉ โ–ก ์˜๊ตญ ์ •๋ณด๋ณดํ˜ธ์œ„์›ํšŒ ๏ฝข์ต๋ช…ํ™” ์‹ค์ฒœ๊ทœ์•ฝ๏ฝฃ์€ ์ผ์ข…์˜ ์•ˆ๋‚ด์„œ์ด๊ธฐ ๋•Œ๋ฌธ์—, ๊ทธ ์ž์ฒด๊ฐ€ ๋ฒ•์ ์ธ ๊ฐ•์ œ๋ ฅ์„ ๊ฐ€์ง€๋Š” ๊ฒƒ์€ ์•„๋‹ˆ์ง€๋งŒ, ์ด ์‹ค์ฒœ๊ทœ์•ฝ์—์„œ ๊ทœ์ •ํ•œ ์‚ฌํ•ญ๋“ค์„ ์ค€์ˆ˜ํ–ˆ๋‹ค๋Š” ์‚ฌ์‹ค์„ ์ •๋ณด๋ณดํ˜ธ์œ„์›ํšŒ๊ฐ€ ํ™•์ธํ•  ๊ฒฝ์šฐ, ์ด๋Š” ์ถ”ํ›„ ์ •๋ณด๋ณดํ˜ธ์œ„์›ํšŒ๊ฐ€ ๊ฐœ์ธ์ •๋ณด ๋ณดํ˜ธ์™€ ๊ด€๋ จํ•œ ๋ฒ• ์œ„๋ฐ˜ ์‚ฌ์‹ค์— ๋Œ€ํ•œ ์กฐ์‚ฌ๋‚˜ ๋ฒ•์ง‘ํ–‰์„ ํ•  ๊ฒฝ์šฐ ์ ๊ทน์ ์œผ๋กœ ๊ณ ๋ คํ•˜๊ฒŒ ๋œ๋‹ค๋Š” ์ ์—์„œ, ์ด ๊ทœ์•ฝ์€ ๊ฐ„์ ‘์ ์ธ ์˜ํ–ฅ๋ ฅ์„ ์ง€๋‹ˆ๋Š” ๊ฐ€์ด๋“œ๋ผ์ธ์˜ ํŠน์„ฑ์„ ์ง€๋‹ˆ๊ณ  ์žˆ๋‹ค๊ณ  ํ‰๊ฐ€ํ•  ์ˆ˜ ์žˆ์Œ โ–ก ์ด ์‹ค์ฒœ๊ทœ์•ฝ์€ ๊ฐœ์ธ์ •๋ณด์˜ ์ด์šฉ์— ์žˆ์–ด ๋ณดํ˜ธ์—…๋ฌด๋ฅผ ์ถฉ์‹คํžˆ ์ˆ˜ํ–‰ํ•˜๋Š” ๊ณผ์ •์—์„œ ์œ ๋ฐœ๋  ์ˆ˜ ์žˆ๋Š” ์œ„ํ—˜์„ ๊ด€๋ฆฌํ•˜๋Š” ๋ฐฉ์‹์œผ๋กœ ์ต๋ช…ํ™”๋ฅผ ์†Œ๊ฐœํ•˜๊ณ  ์žˆ์Œ โ—‹ ์šฐ์„  ์ด ๊ทœ์•ฝ์ด ์–ด๋–ค ๊ฒƒ์ด๋ฉฐ ์™œ ๋“ฑ์žฅํ–ˆ๊ณ  ๋ฒ•์ ์ธ ์ง€์œ„๊ฐ€ ์–ด๋– ํ•œ์ง€(์ œ1์žฅ)๋ฅผ ์„ค๋ช…ํ•˜๊ณ , ์ต๋ช…ํ™”๋ผ๋Š” ๊ฒƒ์€ ๊ฐœ์ธ์ •๋ณด์˜ ์ •์˜์™€ ๋ถ„๋ฆฌํ•˜๊ธฐ ์–ด๋ ค์šด ์กด์žฌ์ž„์„ ๋ฐํžŒ ํ›„(์ œ2์žฅ), ๊ฐœ์ธ์ •๋ณด์˜ ์ต๋ช…ํ™”๊ฐ€ ๊ฐœ์ธ์ •๋ณด ๋ณดํ˜ธ์— ํšจ๊ณผ์ ์ด๋ผ๋Š” ์ ์„ ์„ค๋ช…ํ•จ(์ œ3์žฅ) โ—‹ ์ด๋ฅผ ๋ฐ”ํƒ•์œผ๋กœ ์ต๋ช…ํ™”๋œ ์ •๋ณด๋ฅผ ์ƒ์‚ฐํ•˜๊ฑฐ๋‚˜ ๊ณต๊ฐœํ•  ๋•Œ์— ์–ธ์ œ๋“ ์ง€ ์ •๋ณด์ฃผ์ฒด์˜ ๋™์˜๊ฐ€ ํ•„์š”ํ•œ ๊ฒƒ์€ ์•„๋‹ˆ๋ผ๋Š” ์ (์ œ4์žฅ)๊ณผ ๊ณต๊ฐ„์ •๋ณด๋ฅผ ๊ฐœ์ธ์ •๋ณด์ฒ˜๋Ÿผ ์ทจ๊ธ‰ํ•ด์•ผ ํ• ์ง€๋ฅผ ๋‹ค๋ฃฌ ํ›„(์ œ5์žฅ), ๊ฐœ์ธ์ •๋ณด ๋ณดํ˜ธ๋ฒ•์ œ์— ๋”ฐ๋ผ์„œ๋Š” ๊ณต๊ฐœํ•ด๋„ ๋˜๋Š” ์ •๋ณด๋„ ์ธ๊ถŒ๋ฒ• ๊ฐ™์€ ๊ธฐํƒ€ ๋ฒ•๋ น์— ์˜ํ•ด ๊ณต๊ฐœํ•˜์ง€ ๋ง์•„์•ผ ํ•˜๋Š” ๋•Œ๊ฐ€ ์žˆ์Œ(์ œ6์žฅ)์„ ๋ฐํžˆ๊ณ  ์žˆ์Œ โ—‹ ๋˜ํ•œ ๊ฐœ์ธ์ •๋ณด ๋ณดํ˜ธ๋ฅผ ์œ„ํ•ด์„œ๋Š” ์ •๋ณด์˜ ์œ ํ˜•๋ณ„๋กœ ๋‹ฌ๋ฆฌ ์ทจ๊ธ‰๋  ํ•„์š”๊ฐ€ ์žˆ์œผ๋ฉฐ(์ œ7์žฅ), ์ •๋ณด๋ณดํ˜ธ๋ฅผ ์œ„ํ•œ ๊ฑฐ๋ฒ„๋„Œ์Šค๊ฐ€ ์ค‘์š”ํ•˜๋‹ค๋Š” ์ (์ œ8์žฅ) ๋ฐ ๋ณดํ˜ธ์™€ ์ด์šฉ์˜ ๊ท ํ˜•์„ ์œ„ํ•œ ์—ฐ๊ตฌ๋ชฉ์ ์ƒ ๊ฐœ์ธ์ •๋ณด ๋ณดํ˜ธ๋ฒ•์ œ์˜ ์ ์šฉ์˜ˆ์™ธ(์ œ9์žฅ)์— ๋Œ€ํ•ด ์„ค๋ช…ํ•˜๊ณ  ์žˆ์Œ
Add InvertdIndex doc_index of the document doc_id to the MultiDocument InvertedIndex (inverted), using doc_id as document identifier.
def inverted_index_add(inverted, doc_id, doc_index): for word, locations in doc_index.iteritems(): indices = inverted.setdefault(word, {}) indices[doc_id] = locations return inverted
def _act_index_exact(fieldname, doc, value, context): doc.add_term(fieldname, value, 0)
์ดˆ๊ธฐ ๋‹จ๊ณ„ ์กฐ์น˜์— ๋”ฐ๋ผ ํ–ฅํ›„ ๋ช‡ ๊ฐœ์›”๊ฐ„ ๊ตญ์ œ์‚ฌํšŒ์˜ ๋Œ€์ด๋ž€ ์ œ์žฌ๊ฐ€ ์ผ๋ถ€ ์™„ํ™”๋  ์ˆ˜ ์žˆ์–ด
์šฐ๋ฆฌ๋‚˜๋ผ์— ๋Œ€ํ•œ 2012๋…„ ๋ฏธ๊ตญ ๊ตญ๋ฐฉ์ˆ˜๊ถŒ๋ฒ•์ƒ ๋Œ€์ด๋ž€์ œ์žฌ ์˜ˆ์™ธ ์ง€์œ„ ์—ฐ์žฅ 3. ํ•œํŽธ, โ€˜P5+1โ€™ ๊ตญ๊ฐ€๋“ค(์œ ์—” ์•ˆ๋ณด๋ฆฌ ์ƒ์ž„์ด์‚ฌ๊ตญ์ธ ๋ฏธ, ๋Ÿฌ, ์ค‘, ์˜, ํ”„ 5๊ฐœ๊ตญ ๋ฐ ๋…์ผ)๊ณผ ์ด๋ž€์ด 11.24(์ผ) ์ œ๋„ค๋ฐ”์—์„œ ํ•ฉ์˜ํ•œ โ€˜์ดˆ๊ธฐ ๋‹จ๊ณ„ ์กฐ์น˜โ€™์—๋Š”, ์ด๋ž€ ์ธก์˜ ์•ฝ์† ์ค€์ˆ˜๋ฅผ ์กฐ๊ฑด์œผ๋กœ ํ–ฅํ›„ 6๊ฐœ์›”๊ฐ„ ๊ตญ์ œ์‚ฌํšŒ์˜ ๋Œ€์ด๋ž€ ์ œ์žฌ๊ฐ€ ์ผ๋ถ€ ์™„ํ™”๋  ์ˆ˜ ์žˆ๋‹ค๊ณ  ๋˜์–ด ์žˆ๋‹ค. ใ…‡ ๋”ฐ๋ผ์„œ, ์•ž์œผ๋กœ์˜ ์ด๋ž€์‚ฐ ์›์œ  ์ˆ˜์ž… ๊ฐ์ถ• ๋ฌธ์ œ ๋“ฑ ์ œ์žฌ ์™„ํ™”์™€ ๊ด€๋ จํ•œ ๋‚ด์šฉ์€ ํ–ฅํ›„ ์ œ๋ฐ˜ ์ƒํ™ฉ์— ๋”ฐ๋ผ ๊ตฌ์ฒดํ™”๋˜์–ด ๊ฐˆ ๊ฒƒ์œผ๋กœ ๋ณด๋ฉฐ, ์ •๋ถ€๋Š” ๊ทธ ๊ณผ์ •์—์„œ ๊ด€๋ จ ๊ตญ๊ฐ€๋“ค๊ณผ ํ•„์š”ํ•œ ํ˜‘์˜๋ฅผ ํ•ด ๋‚˜๊ฐˆ ์˜ˆ์ •์ด๋‹ค. โ€ป ๋‹ค๋งŒ, ์ด๋ฒˆ ์ œ๋„ค๋ฐ” ํ•ฉ์˜์—๋„ ๋ถˆ๊ตฌํ•˜๊ณ , ๋ฏธ๊ตญ์˜ใ€Œ2013๋…„ ๊ตญ๋ฐฉ์ˆ˜๊ถŒ๋ฒ•ใ€์— ํฌํ•จ๋œ ์ด๋ž€์ž์œ ๋ฐ๋ฐ˜ํ™•์‚ฐ๋ฒ•(IFCA: Iran Freedom and Counterproliferation Act) ๋ฐ 2010๋…„ ํ†ตํ•ฉ์ด๋ž€์ œ์žฌ๋ฒ•(CISADA: Comprehensive Iran Sanctions, Accountability and Divestment Act) ๋“ฑ ๋‹ค๋ฅธ ๋ฏธ๊ตญ ๋ฒ•์— ๋”ฐ๋ฅธ ๋Œ€์ด๋ž€ ์ œ์žฌ๋Š” ์ง€์† ์ ์šฉ๋˜๋ฏ€๋กœ ์œ ์˜ ํ•„์š” 4. ์šฐ๋ฆฌ ์ •๋ถ€๋Š” ๊ตญ์ œ์‚ฌํšŒ์˜ ์ฑ…์ž„์„ ๋‹คํ•˜๋Š” ์ผ์›์œผ๋กœ์„œ ์ด๋ž€ํ•ต ๋ฌธ์ œ์˜ ํ‰ํ™”์  ํ•ด๊ฒฐ์„ ์œ„ํ•œ ๋…ธ๋ ฅ์— ๊ณ„์†ํ•ด์„œ ์ ๊ทน ๋™์ฐธํ•  ์˜ˆ์ •์ด๋ฉฐ, ์ด๋Ÿฌํ•œ ๊ณผ์ •์—์„œ ์šฐ๋ฆฌ ๊ธฐ์—…๋“ค์ด ๋ถˆํ•„์š”ํ•œ ํ”ผํ•ด๋ฅผ ์ž…์ง€ ์•Š๋„๋ก ์•ž์œผ๋กœ๋„ ์ตœ๋Œ€ํ•œ์˜ ๋…ธ๋ ฅ์„ ๊ธฐ์šธ์—ฌ ๋‚˜๊ฐˆ ๊ฒƒ์ด๋‹ค.
๊ธ€๋กœ๋ฒŒ ์„์œ ์‹œ์žฅ ๋ณ€ํ™”๋ฅผ ์›์œ ์ˆ˜์ž…๋‹ค๋ณ€ํ™” ๊ธฐํšŒ๋กœ 3. ์ตœ๊ทผ ์„์œ ์‹œ์žฅ ๋ณ€ํ™” ๋ฐ ๋Œ€์‘ [ ์ด๋ž€์ œ์žฌ ํ•ด์ œ๋Š” ๋„์ž…์„  ๋‹ค๋ณ€ํ™”์˜ ๊ธฐํšŒ ] โ–ก ์ „๋ฌธ๊ฐ€๋“ค์€ ๋‚ด๋…„๋„ ์ƒ๋ฐ˜๊ธฐ ์ด๋ž€ ๊ฒฝ์ œ์ œ์žฌ ํ•ด์ œ๋กœ ์ด๋ž€์‚ฐ ์›์œ ๊ฐ€ ์‹œ์žฅ์— ๊ณต๊ธ‰๋  ๊ฒฝ์šฐ ์œ ๊ฐ€ํ•˜๋ฝ์— ์˜ํ–ฅ์„ ์ค„ ๊ฒƒ์œผ๋กœ ์ „๋ง ใ…‡ ์˜ค๋žœ ์ œ์žฌ๊ธฐ๊ฐ„์œผ๋กœ ์ด๋ž€ ์„์œ ๊ณต๊ธ‰์ด ๋‹จ๊ธฐ๊ฐ„๋‚ด ์ œ์žฌ์ „ ์ˆ˜์ค€์œผ๋กœ ํšŒ๋ณต๋˜๊ธฐ๋Š” ์‰ฝ์ง€ ์•Š์œผ๋‚˜, 2016๋…„์— 50๋งŒ๏ฝž100๋งŒB/d ๊ทœ๋ชจ์˜ ์„์œ  ๊ณต๊ธ‰์ด ๊ฐ€๋Šฅํ•˜๊ณ , 3,000๋งŒB~6,000๋งŒB ๊ทœ๋ชจ์˜ ํ•ด์ƒ์žฌ๊ณ ๋ฅผ ๋ณด์œ ํ•˜๊ณ  ์žˆ์–ด ํ–ฅํ›„ ์œ ๊ฐ€ ํ•˜๋ฝ์š”์ธ์œผ๋กœ ์ž‘์šฉํ•  ๊ฒƒ์œผ๋กœ ์ „๋ง * ์ด๋ž€์ œ์žฌ ํ•ด์ œ๋กœ ์œ ๊ฐ€ ์ถ”๊ฐ€ํ•˜๋ฝ์„ ์ „๋งํ•˜๋Š” ๊ฒฌํ•ด, ์ด๋ฏธ ์œ ๊ฐ€์— ์„ ๋ฐ˜์˜๋˜์–ด ์žˆ๋‹ค๋Š” ๊ฒฌํ•ด๊ฐ€ ์žˆ์–ด ํ•ด์ œ์‹œ ์œ ๊ฐ€ํ•˜๋ฝ ๊ทœ๋ชจ์—๋Š” ๋ถˆํ™•์‹ค์„ฑ์ด ์กด์žฌ โ–ก ์ •์œ ์—…๊ณ„๋Š” ์ด๋ž€์ œ์žฌ ํ•ด์ œ์‹œ, ์ œ์žฌ์ „ ์ˆ˜์ค€์œผ๋กœ ์ด๋ž€ ์›์œ ์ˆ˜์ž…์„ ๋Š˜๋ฆด ๊ฒƒ์œผ๋กœ ์ „๋ง ใ…‡ ํ˜„์žฌ ์ด๋ž€์‚ฐ ์›์œ ์ˆ˜์ž…์€ ์ œ์žฌ ์ „๊ณผ ๋น„๊ตํ•˜์—ฌ ์ ˆ๋ฐ˜ ์ˆ˜์ค€์œผ๋กœ ๊ฐ์†Œํ•œ ์ƒํ™ฉ์œผ๋กœ, ํ–ฅํ›„ ์ปจ๋ด์„ธ์ดํŠธ๋ฅผ ์ค‘์‹ฌ์œผ๋กœ ์ด๋ž€์›์œ ์— ๋Œ€ํ•œ ์ˆ˜์š” ์ฆ๊ฐ€ ์ „๋ง
Method is defined by the implemented data loaders to yield the sample indexes. Only used in data_generator.
def sample_index_generator(self): raise NotImplementedError
def get_sample(self, dataset, sample_idx): pass
check if loop brackets are balanced. if not exit assume the program was already sanitized from comments
def check_valid(program): balance = 0 for c in program: if c == '[': balance += 1 elif c == ']': balance -= 1 if balance < 0: print("\nERROR: Loop brackets not balanced", file=sys.stderr, flush=True) exit(-1) if balance > 0: print("\nERROR: Loop bracket not balanced, not enough closing", file=sys.stderr, flush=True) exit(-1)
def is_balanced(string: str) -> bool: open_brackets = {'(', '{', '['} close_brackets = {')', '}', ']'} bracket_pairs = { '}': '{', ')': '(', ']': '[' } stack = [] for bracket in string: if bracket in open_brackets: stack.append(bracket) elif bracket in close_brackets: if len(stack) and bracket_pairs[bracket] == stack[-1]: stack.pop() else: return False if not len(stack): return True return False
Return Relay vars from input shapes and create entries based on expected graph inputs to allow translation
def _get_relay_input_vars(graph, input_infos, prelude, is_module=True, default_dtype="float32"): graph_inputs = list(graph.inputs()) if is_module: # a module has "self" as first input, which we do not need/want graph_inputs = graph_inputs[1:] if not isinstance(input_infos, list): msg = "Graph inputs input_infos should be a list" raise RuntimeError(msg) if len(graph_inputs) != len(input_infos): msg = f"PyTorch has {len(graph_inputs)} inputs and input_infos lists {len(input_infos)}." raise RuntimeError(msg) def get_relay_ty(ishape, itype, pt_type): if pt_type.kind() == "TensorType": if not (_is_int_seq(ishape) or len(ishape) == 0): msg = "Shape for Tensors must be lists of ints" raise RuntimeError(msg) if (pt_type.dim() is not None and pt_type.dim() != len(ishape)) or ( pt_type.sizes() is not None and any([s1 != s2 for s1, s2 in zip(pt_type.sizes(), ishape)]) ): msg = "Shapes of input list and information in the graph do not match" raise RuntimeError(msg) if len(ishape) > 1 and any(dim <= 0 for dim in ishape[1:]): msg = ( "Expected input's non-batch dimensions to have positive length, " f"but input has a shape of {pt_type.sizes()}" ) raise RuntimeError(msg) pt_dtype = pt_type.scalarType() if not pt_dtype and itype: pt_dtype = itype dtype = _convert_data_type(pt_dtype, default_dtype=default_dtype) return TensorType(ishape, dtype) elif pt_type.kind() == "TupleType": if not isinstance(ishape, tuple): msg = "Shapes for tuples must be tuples" raise RuntimeError(msg) return TupleType( [get_relay_ty(elem, itype, pt_t) for elem, pt_t in zip(ishape, pt_type.elements())] ) elif pt_type.kind() == "ListType": if not isinstance(ishape, list): msg = "Shapes for lists must be lists" raise RuntimeError(msg) pt_elemtype = pt_type.getElementType() elem_tys = [get_relay_ty(s, itype, pt_elemtype) for s in ishape] if len(elem_tys) > 0 and not all(map(lambda ty: ty == elem_tys[0], elem_tys)): msg = "List elements need have identical types" raise RuntimeError(msg) rlist, _, _ = prelude.mod.get_type("List") return rlist(elem_tys[0]) elif pt_type.kind() == "OptionalType": # we do not support None yet, so we fill in the type return get_relay_ty(ishape, itype, pt_type.getElementType()) # TODO: scalar inputs raise NotImplementedError("unsupported input type") input_vars = {} new_input_infos = [] for num, inp in enumerate(input_infos): if not isinstance(inp, tuple): msg = f"Graph input {num} is not a tuple" raise RuntimeError(msg) if len(inp) != 2 or not isinstance(inp[0], str): msg = ( f"Graph input {inp} is not valid," f" expected ('name', shape) or ('name', (shape, dtype))" ) raise RuntimeError(msg) if not isinstance(inp[1], tuple) or len(inp[1]) == 0 or not isinstance(inp[1][-1], str): new_input_infos.append((inp[0], (inp[1], default_dtype))) else: new_input_infos.append(inp) input_types = [ (name, get_relay_ty(info[0], info[1], gi.type())) for (name, info), gi in zip(new_input_infos, graph_inputs) ] ir_inputs = [i.debugName() for i in graph_inputs] for ir_input, (name, itype) in zip(ir_inputs, input_types): inp = _expr.var(name, type_annotation=itype) # Translate from graph input to user input name input_vars[ir_input] = inp return input_vars
def parseInputs(): # Import past year's data data = {} # {year: df} # df row indices are months 0-11. colums are zones Z1-Z7 # values are energy consumption in that month in that zone, in GWh for year in const.DATA_YEARS: data[year] = pd.read_csv(f'{path}/{const.DATA_DIR}/NBTrend20{year}.csv', names=const.ZONES) # Import Incentive Rates incentiveRates = pd.read_csv(f'{path}/{const.INFO_DIR}/IncentiveRates.csv') emissionTax, nonEmissiveIncentive = incentiveRates.columns # $/kWh # Import Penalty Values penaltyValues = pd.read_csv(f'{path}/{const.INFO_DIR}/PenaltyValues.csv', names=const.PENALTY_ZONES) # Import Plant Production Rates plantProductionRates = pd.read_csv(f'{path}/{const.INFO_DIR}/PlantProductionRates.csv', names=const.PLANT_TYPES) return data, emissionTax, nonEmissiveIncentive, penaltyValues, plantProductionRates
This function creates a figure (chart) that is a Sankey Chart for the neighborhood that is input
def createSankeyChart(nbd): nbd_col = ( "neighbourhood_group_cleansed" if nbd == "All" else "neighbourhood_cleansed" ) if nbd != "All": three_proptype_df = rental_df[ rental_df["neighbourhood_group_cleansed"] == nbd ].copy() else: three_proptype_df = rental_df.copy() three_proptype_df = three_proptype_df[[nbd_col, "property_type_class"]].copy() # Limit types of property to House, Private Room and Shared Room three_proptype_df = three_proptype_df[ three_proptype_df["property_type_class"].str.contains( "House|Private Room|Shared Room|Condominium|Seviced apartment|Apartment|Townhouse" ) ] # we are not interested in Houseboats in the Sankey chart three_proptype_df = three_proptype_df[ ~three_proptype_df["property_type_class"].str.contains("Houseboat") ] label_list = three_proptype_df[nbd_col].unique().tolist() label_list.sort() label_list += three_proptype_df["property_type_class"].unique().tolist() # Create a new column count_listings with number of listings per neighborhood/neighborhood group and propert type sankey_df = three_proptype_df.groupby([nbd_col, "property_type_class"]).agg( count_listings=("property_type_class", "count") ) sankey_df = sankey_df.reset_index() # Create an dictionary of the indices of the nodes we are going to link (the sankey cchart links are created between these indices) label_idx_dict = {} for idx, label in enumerate(label_list): label_idx_dict[label] = idx # Use the dictionary to map the nodes to the indizes in the dataframe sankey_df["nbd_idx"] = sankey_df[nbd_col].map(label_idx_dict) sankey_df["prop_idx"] = sankey_df["property_type_class"].map(label_idx_dict) color_list = full_color_list[: len(three_proptype_df[nbd_col].unique().tolist())] group_color = dict(zip(list(sankey_df.groupby(nbd_col).groups.keys()), color_list,)) sankey_df["color_link"] = sankey_df[nbd_col].map(group_color) source = sankey_df["nbd_idx"].tolist() target = sankey_df["prop_idx"].tolist() values = sankey_df["count_listings"].tolist() # There are as many colors as nodes = 17 + 3 if nbd == "All": color_node = full_color_list + ["#befdb7", "#1B03A3", "#FEFCD7"] else: color_node = full_color_list[: len(label_list)] # For every neighborhood we use the same color for the link color_link = sankey_df["color_link"].tolist() fig = go.Figure( data=[ go.Sankey( node=dict( pad=15, thickness=20, line=dict(color="black", width=0.5), label=label_list, color=color_node, customdata=label_list, hovertemplate="%{customdata} has %{value} listings<extra></extra>", ), link=dict( source=source, target=target, value=values, color=color_link, hovertemplate="Link from %{source.customdata}<br />" + "to %{target.customdata}<br />has %{value} listings<extra></extra>", ), ) ] ) fig.update_layout( title_text="Available houses and rooms", font_size=12, title_font_color=dashboard_colors["medium-blue-grey"], font=dict(size=12, color=dashboard_colors["medium-blue-grey"]), ) # Set the theme fig.layout.template = "custom_dark" return fig
def show_custom_graph(self): pass
์•ผ์™ธ ์บ๋…ธํ”ผ/ํŒŒํ‹ฐ์˜ค์˜ ๋น„์šฉ์€ ์–ผ๋งˆ์ธ๊ฐ€์š”?
์ผ๋ฐ˜์ ์ธ ์ฃผํƒ์˜ ์ผ๋ฐ˜์ ์ธ ์ฐฝ๋ฌธ์— ๋Œ€ํ•œ ์ฐจ์–‘์€ ํฌ๊ธฐ์— ๋”ฐ๋ผ 300๋‹ฌ๋Ÿฌ์—์„œ 700๋‹ฌ๋Ÿฌ์˜ ๋น„์šฉ์ด ๋“ค ๊ฒƒ์ž…๋‹ˆ๋‹ค. ์—ฌ๊ธฐ์—๋Š” ํ”„๋ ˆ์ž„๊ณผ ์ˆ˜๋™ ๋ฆฌํŠธ๋ž™์…˜ ๋ฉ”์ปค๋‹ˆ์ฆ˜, ์กฐ๋ฆฝ ํ•˜๋“œ์›จ์–ด ๋ฐ ์„ค์น˜ ๋ธŒ๋ž˜ํ‚ท์ด ํฌํ•จ๋ฉ๋‹ˆ๋‹ค. ๋น„๋ฆฌํŠธ๋ž™์…˜ ๊ธˆ์† ์ฐจ์–‘์€ ์‚ฌ์šฉํ•˜๊ธฐ๋กœ ๊ฒฐ์ •ํ•œ ๊ธˆ์†์— ๋”ฐ๋ผ 500๋‹ฌ๋Ÿฌ์—์„œ 800๋‹ฌ๋Ÿฌ์˜ ๋น„์šฉ์ด ๋“ค ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ๊ณฐํŒก์ด ๋ฐฉ์ง€ ์ฒ˜๋ฆฌ๋ฅผ ์ ์šฉํ•˜๋ฉด ๋น„์šฉ์„ ์ ˆ๊ฐํ•  ์ˆ˜ ์žˆ์œผ๋ฉฐ, ๋ณ€์ƒ‰ ์ œ๊ฑฐ๋ฅผ ์œ„ํ•œ ์ฒญ์†Œ ๋น„์šฉ์€ 40๋‹ฌ๋Ÿฌ์ž…๋‹ˆ๋‹ค. ํŒŒํ‹ฐ์˜ค์šฉ ๋ชจํ„ฐํ™”๋œ ๋ฆฌํŠธ๋ž™์…˜ ์ฐจ์–‘์€ 2,500๋‹ฌ๋Ÿฌ์—์„œ 3,500๋‹ฌ๋Ÿฌ์˜ ๋น„์šฉ์ด ๋“ค ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค.
ํŒจ๋ธŒ๋ฆญ ์ฐจ์–‘์˜ ๋น„์šฉ์€ ์–ผ๋งˆ์ธ๊ฐ€์š”? ์ง€์—ญ ํŒŒํ‹ฐ์˜ค ๋ฐ ๊ฒฝ๋กœ ๊ณ„์•ฝ์ž์—๊ฒŒ ๋ฌด๋ฃŒ ๊ฒฌ์ ์„ ๋ฐ›์œผ์„ธ์š”. ์•ผ์™ธ ์ƒํ™œ ๊ณต๊ฐ„์—์„œ ๋ณด๋‚ด๋Š” ์‹œ๊ฐ„์„ ๊ทน๋Œ€ํ™”ํ•˜๊ธฐ ์œ„ํ•ด ๊ทธ๋Š˜์„ ์ถ”๊ฐ€ํ•˜๋Š” ๊ฒƒ์€ ์—ฌ๋ฆ„์˜ ๋”์œ„ ์†์—์„œ ๋ฌด์‹œ๋˜๋Š” ํŒŒํ‹ฐ์˜ค์™€ ์˜ค๋ฝ์— ์ ํ•ฉํ•œ ๋ถ„์ฃผํ•œ ํŒŒํ‹ฐ์˜ค์˜ ์ฐจ์ด๋ฅผ ๋งŒ๋“ค ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค.
Displays a text on the frame with a shadow behind it for better visualization on any background
def text_on_frame(frame, text, position, thickness, font_size=1, text_color=(255, 255, 255), shadow_color=(128, 128, 128), font_style=cv2.FONT_HERSHEY_SIMPLEX, line_style=cv2.LINE_AA): cv2.putText(frame, text, position, font_style, font_size, shadow_color, thickness+1, line_style) cv2.putText(frame, text, position, font_style, font_size, text_color, thickness, line_style)
def render_text(self, text, color, bg=None): return self.font.render(text, True, color, bg) # type: pygame.SurfaceType
Return the image from the input ``images`` list that has the largest overlap with the ``refimage`` image.
def max_overlap_image(refimage, images, enforce_user_order): if len(images) < 1: return None if enforce_user_order: # revert to old tweakreg behavior return images.pop(0) area = [refimage.intersection_area(im) for im in images] idx = np.argmax(area) return images.pop(idx)
def get_images_id_from_database(db, tags, img_type, use_count, limit): # First find intersection of each tag index in the databases all_tags_list = [] for tag in tags: coll = db[tag] raw_result = coll.find({}) image = [image['_id'] for image in list(raw_result)] result = db.images.find( {"_id" : {"$in" : image}, "img_type": img_type, "use_count": {"$gt": use_count}}, {"_id": 1}) images = [str(image['_id']) for image in list(result)] all_tags_list.append(images) id_list = set(all_tags_list[0]).intersection(*all_tags_list[1:]) # Second, check whether we have to fetch more images from default image pool if len(id_list) >= limit: print("Get images from index!") images_list = id_list[:limit] else: print("Not enough images from index!") print("Try to find more in the default image pool!") coll = db['images'] # Then to find other images from the default image pool all_tags_list = [] for tag in tags: tag_str = 'tags.%s' % tag result = coll.find( {tag_str: {"$exists": True}, "img_type": img_type, "use_count": {"$gt": use_count}}, {"_id": 1}) images = [str(image['_id']) for image in list(result)] all_tags_list.append(images) images_list = list(set(all_tags_list[0]).intersection(*all_tags_list[1:])) if len(images_list) >= limit: images_list = images_list[:limit] return images_list[:limit]
ํ‰ํ‰ํ•œ ๊ธฐํŒ์ด๋ž€ ๋ฌด์—‡์ธ๊ฐ€?
์œ ๋ฆฌ ๊ธฐํŒ์€ ์•ก์ • ํ…”๋ ˆ๋น„์ „(LCD), ํ”Œ๋ผ์ฆˆ๋งˆ ํ…”๋ ˆ๋น„์ „(PDP), ๋…ธํŠธ๋ถ ๋””์Šคํ”Œ๋ ˆ์ด์™€ ๊ฐ™์€ ํ‰ํŒ ๋””์Šคํ”Œ๋ ˆ์ด(FPD)๋ฅผ ๋งŒ๋“œ๋Š” ์‹œํŠธ ์œ ๋ฆฌ์ž…๋‹ˆ๋‹ค. ์ด ์žฌ๋ฃŒ๋Š” ์ฐฝ์œ ๋ฆฌ์— ์‚ฌ์šฉ๋˜๋Š” ์‹œํŠธ ์œ ๋ฆฌ์™€ ๋‹ค๋ฆ…๋‹ˆ๋‹ค. ์•ก์ • ๋””์Šคํ”Œ๋ ˆ์ด์™€ ํ”Œ๋ผ์ฆˆ๋งˆ ๋””์Šคํ”Œ๋ ˆ์ด ๊ฐ„์—๋„ ์žฌ๋ฃŒ๊ฐ€ ๋‹ค๋ฆ…๋‹ˆ๋‹ค.
ํŠนํžˆ ์ค‘์š”ํ•˜๊ณ  ๋‰ด์Šค ๊ฐ€์น˜๊ฐ€ ์žˆ๋Š” ๊ฒƒ์€ ๊ฐค๋Ÿญ์‹œ S6 ์—ฃ์ง€(๋ฐ ๊ฐค๋Ÿญ์‹œ ๋…ธํŠธ ์—ฃ์ง€)์˜ ์œ ์—ฐํ•œ ํ”Œ๋ผ์Šคํ‹ฑ ๊ธฐํŒ์—์„œ์˜ OLED ๋””์Šคํ”Œ๋ ˆ์ด ์„ฑ๋Šฅ์ด ๊ฐค๋Ÿญ์‹œ S6์˜ ์ „ํ†ต์ ์ธ ํ‰๋ฉด ๋ฐ ๋‹จ๋‹จํ•œ ๊ธฐํŒ์—์„œ์˜ ์„ฑ๋Šฅ๊ณผ ๋ณธ์งˆ์ ์œผ๋กœ ๋™์ผํ•˜๋‹ค๋Š” ์ ์ž…๋‹ˆ๋‹ค. ์ด๋Š” 500ํ”ฝ์…€ ์ด์ƒ ๋ฐ 2560x1440 ํ•ด์ƒ๋„์—์„œ๋„ ๋งˆ์ฐฌ๊ฐ€์ง€์ž…๋‹ˆ๋‹ค.
Count `constraints` for part one. Lines where `char` occurs at least `low` and at most `high` times.
def part_one(constraints: t.List[Constraint]) -> int: n_valid = 0 for low, high, char, password in constraints: n_valid += low <= password.count(char) <= high return n_valid
def main(text, char_lower, char_upper, total_lower, total_upper): answer_char, answer_total = [], [] _ = [char_lower.append(i) for i in text if i not in char_lower and i.islower()] _ = [char_upper.append(i) for i in text if i not in char_upper and i.isupper()] char_lower, char_upper = sorted(char_lower), sorted(char_upper) _ = [total_lower.append(text.count(i)) for i in char_lower] _ = [total_upper.append(text.count(i)) for i in char_upper] _ = answer_char.extend(char_lower), answer_char.extend(char_upper) _ = answer_total.extend(total_lower), answer_total.extend(total_upper) for i in range(len(answer_char)): total1, total2 = answer_total[i] // 5, answer_total[i] % 5 print("%s : %s" % (answer_char[i], (("-----|" * total1 + "-" * total2).rstrip("|"))))
This view should return a list of all the enrollments organized for the currently authenticated user's group.
def get_queryset(self): user = self.request.user if user.is_superuser: return Enrollment.objects.all() return Enrollment.objects.filter(organizer__in=user.groups.all())
def _GetEntitiesInEntityGroup(self, entity_group): raise NotImplementedError
Integration test with a real consul agent. Start a service, register it, close it, verify it's deregistered.
async def test_integration(self, deregister): tags = ("foo", "bar") service_id = str(uuid.uuid4()) # allow for parallel tests con = _LocalConsulAgentClient(token=None) ca = ConsulAgent( name="test-metrics", service_id=service_id, tags=tags, deregister=deregister, ) try: server = await aio.web.start_http_server( addr="127.0.0.1", service_discovery=ca ) except aiohttp.ClientOSError: pytest.skip("Missing consul agent.") svc = (await con.get_services())[service_id] assert "test-metrics" == svc["Service"] assert sorted(tags) == sorted(svc["Tags"]) assert server.socket.addr == svc["Address"] assert server.socket.port == svc["Port"] await server.close() services = await con.get_services() if deregister: # Assert service is gone iff we are supposed to deregister. assert service_id not in services else: assert service_id in services # Clean up behind ourselves. resp = await con.deregister_service(service_id) assert 200 == resp.status
def test_service_status_on_daemon_with_pid(): mock_service_list = ( '{\n\t"LimitLoadToSessionType" = "System";\n\t"Label" =' ' "com.salt";\n\t"OnDemand" = false;\n\t"LastExitStatus" = 0;\n\t"PID" =' ' 218;\n\t"Program" = "/opt/salt";\n\t\t"--disable-keepalive";\n\t);\n};' ) salt_dict = { "service.list": MagicMock(return_value=mock_service_list), } with patch.dict(mac_service.__salt__, salt_dict): with patch.object(mac_service, "_launch_agent", lambda _: False): with patch.object(mac_service, "_get_service", lambda _: {"": ""}): with patch.object( mac_service, "_always_running_service", lambda _: True ): assert mac_service.status("com.salt") is True
Counts the number of trainable tf.Variables to get a rough idea of how complex this Model is
def count_num_trainable(self): self.num_trainable_variables = 0 for variable in tf.trainable_variables(): # shape is an array of tf.Dimension shape = variable.get_shape() variable_parameters = 1 for dim in shape: variable_parameters *= dim.value self.num_trainable_variables += variable_parameters return self.num_trainable_variables
def param_size(model: torch.nn.Module) -> int: return sum(v.numel() for name, v in model.named_parameters() if "auxiliary" not in name)
This method accesses the get all profiles endpoint which takes a token since the route is authenticated
def get_all_profiles(self, token): return self.client.get( "/api/profiles/", HTTP_AUTHORIZATION='Bearer ' + token, format="json" )
def callback(): app = _get_app() app.authorize_access_token() return redirect(url_for('.profile'))
Returns the luminance of a pixel, which indicates its subjective brightness. This implementation uses the NTSC formula.
def luminance(pixel): r = GImage.getRed(pixel) g = GImage.getGreen(pixel) b = GImage.getBlue(pixel) return round(0.299 * r + 0.587 * g + 0.114 * b)
def __call__(self, image: np.ndarray) -> np.ndarray: if self.mode == "luminance": lum = hdrpy.get_lumianance(image) factor = 1. / np.amax(lum) else: factor = 1. / np.amax(image) return multiply_scalar(image, factor)
Run queued jobs. Since we've already sent the jobs to the PBS job system, don't do anything.
def run_queued_jobs(self): pass
def execute_queue(self): self.legacy_queue_execution()
Test _check_cache_for_egg checks the cache for the egg, returns path if present locally, or None if not.
def test_03_check_cache_for_egg(self): # Cleanup on isle one! launcher = ZMQEggDriverProcess("DUMMY_VAL") self.assertEqual(launcher._get_remote_egg(EGG), "/tmp/%s" % EGG) self.assertEqual(launcher._check_cache_for_egg("NOT_FOUND_EGG"), None) self.assertEqual(launcher._check_cache_for_egg(EGG), "/tmp/%s" % EGG)
def get(cache_dir=CACHE_DIR): current_wall = os.path.join(cache_dir, "pyc") if os.path.isfile(current_wall): return read_file(current_wall)[0] return "None"
๋ถ€ํƒ„์€ ์ง€๋„์—์„œ ์–ด๋””์— ์œ„์น˜ํ•ด ์žˆ์Šต๋‹ˆ๊นŒ?
๋ถ€ํƒ„ ์ง€๋„๋Š” ๋ถ€ํƒ„์ด ๋Œ€ ํžˆ๋ง๋ผ์•ผ ์‚ฐ๋งฅ์˜ ๋™์ชฝ์— ์œ„์น˜ํ•œ ๋‚ด๋ฅ™ ๊ตญ๊ฐ€์ž„์„ ๋ณด์—ฌ์ค๋‹ˆ๋‹ค. ๋ถ€ํƒ„์€ ๋ถ์ชฝ์œผ๋กœ๋Š” ์ค‘๊ตญ๊ณผ, ๋‚จ์ชฝ์œผ๋กœ๋Š” ์ธ๋„์™€ ๊ตญ์ œ ๊ฒฝ๊ณ„๋ฅผ ๊ณต์œ ํ•˜๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค.
๋ถ€ํƒ„ ๋ฐ ์ธ์ ‘ ๊ตญ๊ฐ€์˜ ์ƒ์„ธ ์ง€๋„. ๊ตฌ๊ธ€ ๋งต์Šค ๋ถ€ํƒ„ ์œ„์น˜ ๋ชฉ๋ก์— ์˜ค์‹  ๊ฒƒ์„ ํ™˜์˜ํ•ฉ๋‹ˆ๋‹ค. ๊ตฌ๊ธ€ ๋งต์Šค ๊ด€๊ด‘์ด ์˜๋ฏธ ์žˆ๋Š” ๊ณณ์— ์˜ค์‹  ๊ฒƒ์„ ํ™˜์˜ํ•ฉ๋‹ˆ๋‹ค! ์ข…ํ•ฉ์ ์ธ ๋ชฉ์ ์ง€ ์•ˆ๋‚ด์„œ์™€ ํ•จ๊ป˜ maplandia.com์€ ์ƒ์„ธํ•œ ์œ„์„ฑ ์ด๋ฏธ์ง€๋ฅผ ํ†ตํ•ด ๋ถ€ํƒ„์„ ํƒํ—˜ํ•  ์ˆ˜ ์žˆ๋„๋ก ํ•˜์—ฌ, ๊ทธ ์–ด๋А ๋•Œ๋ณด๋‹ค ๋น ๋ฅด๊ณ  ์‰ฝ๊ฒŒ ๊ฐ€๋Šฅํ•ฉ๋‹ˆ๋‹ค.
๋”๋•๊น€์น˜, ์žˆ๋‹ค, ์˜ค์ง•์–ด๋ฌด๋ง๋žญ์ด๊น€์น˜
๊ทธ๊ฒƒ์˜ ์ข…๋ฅ˜๋กœ๋Š” ๋”๋•๊น€์น˜, ์˜ค์ง•์–ด๋ฌด๋ง๋žญ์ด๊น€์น˜ ๋“ฑ์ด ์žˆ๋‹ค.
์ด๊ณณ์˜ ๋ฐฅ์ƒ์€ ๊น€์น˜์™€ ์šฐ๊ฑฐ์ง“๊ตญ, ๊ทธ๋ฆฌ๊ณ  ๋ฌด๋ง๋žญ์ด ๋ฒ„๋ฌด๋ฆฐ ๊ฒƒ๋ฟ์ด์—ˆ๋‹ค.
1985๋…„๋ถ€ํ„ฐ 1997๋…„๊นŒ์ง€ ๋Œ€ํ•œ์–‘๊ถํ˜‘ํšŒ์žฅ์€ ์ •๋ชฝ๊ตฌ ํ˜„๋Œ€์ฐจ๊ทธ๋ฃน ํšŒ์žฅ์ด ๋งก์•˜๊ณ  2005๋…„๋ถ€ํ„ฐ ์—ฌํƒœ๊นŒ์ง€๋Š” ์ •์˜์„  ๋ถ€ํšŒ์žฅ์ด ๋งก๊ณ  ์žˆ๋Š” ์ค‘์ด๋‹ค.
1985๋…„๋ถ€ํ„ฐ 1997๋…„๊นŒ์ง€ ๋Œ€ํ•œ์–‘๊ถํ˜‘ํšŒ์žฅ์€ ์ •๋ชฝ๊ตฌ ํ˜„๋Œ€์ฐจ๊ทธ๋ฃน ํšŒ์žฅ์ด์—ˆ๊ณ  2005๋…„๋ถ€ํ„ฐ ์ง€๊ธˆ๊นŒ์ง€๋Š” ์ •์˜์„  ๋ถ€ํšŒ์žฅ์ด๋‹ค.
1983๋…„๋ถ€ํ„ฐ 1985๋…„๊นŒ์ง€ ์žฌ์ž„ํ•œ ์ดˆ๋Œ€ ์–‘๊ถํ˜‘ํšŒ์žฅ์€ ์ •๋ชฝ์ค€ ์•„์‚ฐ์žฌ๋‹จ ์ด์‚ฌ์žฅ์ด์—ˆ๊ณ  ๋’ค์ด์–ด ์ •๋ชฝ๊ตฌ ํšŒ์žฅ์ด ๋„ค ๋ฒˆ ํšŒ์žฅ์ด ๋˜์—ˆ๋‹ค.
Remove slave(s) from and existing zone (multiroom). Zone must already exist and slaves array cannot be empty.
def remove_zone_slave(self, slaves): if not slaves: _LOGGER.warning("Unable to find slaves to remove") else: _LOGGER.info( "Removing slaves from zone with master %s", self._device.config.name ) # SoundTouch API seems to have a bug and won't remove slaves if there are # more than one in the payload. Therefore we have to loop over all slaves # and remove them individually for slave in slaves: # make sure to not try to remove the master (aka current device) if slave.entity_id != self.entity_id: self._device.remove_zone_slave([slave.device])
def get_redis_slave_nodes(): az = _get_aci_client() return [cntr for cntr in az.container_groups.list() if _REDIS_SLAVE_REGEX.match(cntr.name)]
Stitch a series of images into an animation. Currently supports animated gifs, other formats coming as needed.
def stitch_to_animation(images, outpath=None, duration=0.5, palettesize=256, verbose=True): # import imageio try: import imageio except ImportError: raise ImportError( 'WrightTools.artists.stitch_to_animation requires imageio - https://imageio.github.io/') # parse filename if outpath is None: outpath = os.path.splitext(images[0])[0] + '.gif' # write try: t = wt_kit.Timer(verbose=False) with t, imageio.get_writer(outpath, mode='I', duration=duration, palettesize=palettesize) as writer: for p in images: image = imageio.imread(p) writer.append_data(image) except BaseException: print('Error: {0}'.format(sys.exc_info()[0])) return None # finish if verbose: interval = np.round(t.interval, 2) print('gif generated in {0} seconds - saved at {1}'.format(interval, outpath)) return outpath
def dream_seq(self): for i, path in enumerate(self.img_list[self.config["start_position"] :]): if i == 0: img1 = Image.open(path) d_img = self.deep_dream( self.transform(img1), self.model, i, seq="first" ) self.save_img(d_img, "", i) d_img = convert(d_img) flow_iter = 0 # the iterations needs to be reduced self.config["num_iterations"] -= 5 if i > 0: img2 = Image.open(path) feature_img, background_masked = self.get_opflow_image( img1, d_img, img2 ) feature_img = np.clip(feature_img, 0, 255) background_masked[background_masked > 0] = 1 - (flow_iter * 0.1) # 0.5 background_masked[background_masked == 0] = flow_iter * 0.1 d_img = self.deep_dream( self.transform(np.uint8(feature_img)), self.model, i, seq="first", mask=background_masked, ) # change position img1 = img2 self.save_img(d_img, "", i) d_img = convert(d_img) flow_iter += 1 flow_iter = 0 if flow_iter > 5 else flow_iter
Recives OCR text, extracts product. Text could contain garbage or wrong reads. >>> extract_product_from_text(''' ... d J
def extract_product_from_text(text): def extract(pattern): return extract_ocr_pattern(pattern, text) return { 'VENDORID': 1, 'VENDOR': 'example', 'ITEMNO': extract(r'ITEM ?ID(.*)DESC'), 'DESCRIPTION': extract(r'DESC(.*)UPC'), 'UPC': extract(r'UPC ?NO(.*)WEIGHT'), 'COST': extract(r'PRICE(.*?)\n'), }
def extract_text(self, input_file): logging.info("Extracting text from file " + input_file) try: tree = etree.parse(input_file) except etree.XMLSyntaxError: logging.error("XML syntaxerror in file " + input_file) if self.force: return else: sys.exit(-1) root = tree.getroot() for element in root.iter(): try: self.extract_element_info(element, input_file) except KeyError: pass
๋Œ€๋งˆ์ดˆ๊ฐ€ ์†Œ๋ณ€์— ์–ผ๋งˆ๋‚˜ ์˜ค๋ž˜ ๋‚จ์•„ ์žˆ๋Š”์ง€
์†Œ๋ณ€์€ ๋งˆ๋ฆฌํ™”๋‚˜๋ฅผ ๋‹จ ํ•œ ๋ฒˆ ์‚ฌ์šฉํ•œ ํ›„ ์ตœ๋Œ€ 8์ผ ๋™์•ˆ THC ๋Œ€์‚ฌ์‚ฐ๋ฌผ์ธ THC-COOH์˜ ์กด์žฌ๋ฅผ ๋‚˜ํƒ€๋ƒ…๋‹ˆ๋‹ค. ์†Œ๋ณ€ ๊ฒ€์‚ฌ ์ •ํ™•๋„๋Š” ์•ฝ ์ผ์ฃผ์ผ ํ›„ ๊ฐ์†Œํ•˜์ง€๋งŒ, ๋งŽ์€ ๋นˆ๋ฒˆํ•˜๊ฑฐ๋‚˜ ๊ณผ๋„ํ•œ ๋งˆ๋ฆฌํ™”๋‚˜ ์‚ฌ์šฉ์ž๋“ค์€ ๊ฐ๊ฐ ์ตœ๋Œ€ 15์ผ ๋ฐ 90์ผ ๋™์•ˆ ์–‘์„ฑ ๋ฐ˜์‘์„ ๋ณด์ผ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค.
THC๊ฐ€ ์†Œ๋ณ€์— ์–ผ๋งˆ๋‚˜ ์˜ค๋ž˜ ๋‚จ์•„ ์žˆ๋Š”์ง€๋Š” ๊ฐœ์ธ์— ๋”ฐ๋ผ ๋‹ค๋ฆ…๋‹ˆ๋‹ค. ์ฒด์ค‘๊ณผ ์ด์ „์˜ ๋งˆ๋ฆฌํ™”๋‚˜ ๋…ธ์ถœ์€ ํ…ŒํŠธ๋ผํ•˜์ด๋“œ๋กœ์นด๋‚˜๋น„๋†€(Tetrahydrocannabinol)์ด ์ฒด๋‚ด์— ์–ผ๋งˆ๋‚˜ ์˜ค๋ž˜ ๋‚จ์•„ ์žˆ์„ ์ˆ˜ ์žˆ๋Š”์ง€๋ฅผ ๊ฒฐ์ •ํ•˜๋Š” ์ฃผ์š” ์š”์ธ ์ค‘ ์ผ๋ถ€์ž…๋‹ˆ๋‹ค.
Setter method to set the next turn. Calls get turn and sets opposite player to next turn.
def set_next_turn(self): if self.get_turn() == 'B': self._turn = 'W' else: self._turn = 'B'
def set_next(self,next_class: 'Class') -> None: self.next = next_class
Return the second element of the given list. If the list has no second element, return None.
def select_second(L): if len(L)<2: return None return L[1]
def _return_elem_or_list(self, args): return args[0] if len(args) == 1 else args
Clear only parameter values, not the list of managed parameters. All parameters will be set to None.
def clear_parameter_values(self): for param_name in self.managed_parameters: self.managed_parameters[param_name] = None
def unfreeze_parameters(self): self.are_parameters_frozen = False
์ฐธ์—ฌ์ž, ์ข…์ด, ๋ฉ”์‹œ์ง€, ๋‚จ๊ธฐ๋‹ค, ์‘์›
๋ชจ๋“  ์ฐธ์—ฌ์ž๋Š” ์ฃผ์–ด์ง„ ์ข…์ด์— ์‘์›์˜ ๋ฉ”์‹œ์ง€๋ฅผ ๋‚จ๊ธด๋‹ค.
์žฌ์ฃผ๋ผ๋Š” ๊ฒƒ์€ ์˜ˆ์ ˆ, ์Œ์•…, ํ™œ์˜๊ธฐ, ๊ธ€์“ฐ๊ธฐ, ๋งํƒ€๊ธฐ, ๊ณ„์‚ฐํ•˜๊ธฐ๋‹ค.
์• ํ”Œ TV ์—”ํ„ฐํ…Œ์ธ๋จผํŠธ ์ฑ„๋„์€ ๋ช‡ ๊ฐœ์ž…๋‹ˆ๊นŒ?
2007๋…„์— ์ฒ˜์Œ ์ถœ์‹œ๋œ ์ดํ›„, ์• ํ”Œ TV๋Š” ๋‹จ์ผ ๋ฐฉ์†ก ๋„คํŠธ์›Œํฌ์—์„œ๋ถ€ํ„ฐ ๊ฑฐ์˜ 20๊ฐœ์˜ ํ•œ๊ตญ TV ๋„คํŠธ์›Œํฌ๋ฅผ ํ•˜๋‚˜์˜ ์ฑ„๋„๋กœ ์ง‘๊ณ„ํ•œ ๊ฒƒ๊นŒ์ง€, ์•ฝ 50๊ฐœ์˜ "์ฑ„๋„" ๋˜๋Š” ์„ ํ˜ธํ•˜๋Š” ์šฉ์–ด์ธ ์•ฑ์„ ๋ณด์œ ํ•˜๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค.
ํ˜ธ์ฃผ ๋‚ด ์ธ๋„ TV ๋ฐ ๋ผ๋””์˜ค ์ฑ„๋„. ์ธ๋„ ๋“œ๋ผ๋งˆ, ์Œ์•… ๋ฐ ์ฝ”๋ฏธ๋”” ์‡ผ ๋“ฑ์€ ๋ชจ๋“  ์ธ๋„์ธ์˜ ๋งˆ์Œ์— ๊ฐ€๊น์Šต๋‹ˆ๋‹ค. ์šฐ๋ฆฌ๋Š” ์ธ๋„ TV ์ฑ„๋„๊ณผ ๋ผ๋””์˜ค๋ฅผ ํ†ตํ•ด ์ธ๋„์™€ ์—ฐ๊ฒฐ๋˜์–ด ์žˆ๋‹ค๊ณ  ๋А๋‚๋‹ˆ๋‹ค. ์ธ๋„ ์—”ํ„ฐํ…Œ์ธ๋จผํŠธ์™€์˜ ์—ฐ๊ฒฐ์„ ์œ ์ง€ํ•˜๊ธฐ ์œ„ํ•ด ํ˜ธ์ฃผ์—์„œ ์ธ๋„ TV ์ฑ„๋„ ๋ฐ ๋ผ๋””์˜ค๋ฅผ ์–ป๋Š” ๋ฐฉ๋ฒ•์— ๋Œ€ํ•œ ํฌ๊ด„์ ์ธ ๋ชฉ๋ก์„ ํ™•์ธํ•˜์‹ญ์‹œ์˜ค: ์ธ๋„ TV ์ฑ„๋„ ์–ป๊ธฐ: Yupptv: ํ˜ธ์ฃผ์—์„œ YuppTV๋ฅผ ํ†ตํ•ด ์ œ๊ณต๋˜๋Š” ์ธ๋„ TV ์ฑ„๋„.
๊ฐ€๋ฉด๋ผ์ด๋” ๋””์ผ€์ด๋“œ์˜ ํ•„์‚ด๊ธฐ๊ฐ€ ๋ญ์•ผ
๋˜ํ•œ ์ปดํ”Œ๋ฆฌํŠธ ํผ ์ƒํƒœ์—์„œ ๋‹ค๋ฅธ ๋ผ์ด๋”๋ฅผ ์†Œํ™˜ํ•œ ๋’ค ์ด ์นด๋“œ๋ฅผ ์‚ฌ์šฉํ•˜๋ฉด, ๋””์ผ€์ด๋“œ๋Š” ๋ผ์ด๋“œ ๋ถ์ปค๋ฅผ ํ™œ์šฉํ•ด์„œ ์ž์‹ ๊ณผ ๋˜‘๊ฐ™์ด ์›€์ง์ด๋Š”, ์†Œํ™˜๋œ ๋ผ์ด๋”์™€ ํ•จ๊ป˜, ์†Œํ™˜๋œ ๋ผ์ด๋”์˜ ์ตœ์ข… ํ˜•ํƒœ๊ฐ€ ์‚ฌ์šฉํ•˜๋Š” ํ•„์‚ด๊ธฐ๋ฅผ ์‹œ์ „ํ•œ๋‹ค. ๋””์ผ€์ด๋“œ๋Š” ๋””๋ฉ˜์…˜ ํ‚ฅ ์ด๋ผ๋Š” ๋ผ์ด๋” ํ‚ฅ์„ ํ•„์‚ด๊ธฐ๋กœ ์‚ฌ์šฉํ•œ๋‹ค. ๋ผ์ด๋“œ ๋ถ์ปค ์†Œ๋“œ ๋ชจ๋“œ์—์„œ๋Š” ์ ์„ ํž˜๊ป ๋ฒ ๋Š” ๋””๋ฉ˜์…˜ ์Šฌ๋ž˜์‹œ ๋ฅผ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋‹ค. ๋˜ํ•œ ๊ฑด ๋ชจ๋“œ์—์„œ๋Š” ์ ์—๊ฒŒ ๊ฐ•๋ ฅํ•œ ์—๋„ˆ์ง€ํƒ„์„ ์˜๋Š” ๋””๋ฉ˜์…˜ ๋ธ”๋ž˜์ŠคํŠธ ๋ฅผ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋‹ค. ๊ฐ ํ•„์‚ด๊ธฐ๋ฅผ ์‚ฌ์šฉํ•  ๋•Œ๋งˆ๋‹ค ๋””์ผ€์ด๋“œ์˜ ํŒŒ์ด๋„ ์–ดํƒ ๋ผ์ด๋“œ ์นด๋“œ๋ฅผ ํ˜•์ƒํ™”ํ•œ ํ™˜์˜๋“ค์ด ์ ์„ ํ–ฅํ•ด ์ „๊ฐœ๋œ๋‹ค. ํŒŒ์ด๋„ ๊ฐ€๋ฉด๋ผ์ด๋” ๋‹จ๋ง K-ํ„ฐ์น˜ ๋Š” ํ„ฐ์น˜(ํ„ฐ์น˜์Šคํฌ๋ฆฐ)ํฐ(ํœด๋Œ€ํฐ)์˜ ์ผ์ข…์œผ๋กœ, ๊ฐ€๋ฉด๋ผ์ด๋” ๋””์ผ€์ด๋“œ์˜ ๊ฐ•ํ™” ๋ณ€์‹  ์žฅ๋น„์ด๋‹ค. ๋„ค๊ฑฐ๋น„ํŠธ์˜ ์„ธ๊ณ„(๊ฐ€๋ฉด๋ผ์ด๋” ๋””์ผ€์ด๋“œ์˜ ์ƒˆ๋กœ์šด ์„ธ๊ณ„์˜ ๋“ฑ์žฅ ์ธ๋ฌผ#๋„ค๊ฑฐํ‹ฐ๋ธŒ์˜ ์„ธ๊ณ„)์—์„œ ์ธ ์นด์‚ฌ๊ฐ€ ์†์— ๋„ฃ์—ˆ๋‹ค.
์œ ํ‚ค ์กฐ์ง€๊ฐ€ ๋ณ€์‹ ํ•˜๋Š” ๋ผ์ด๋”๋งจ์€ ๊ทน์žฅํŒ์—์„œ ๋ชจ๋“  ๋ผ์ด๋”๋“ค์ด ๋‚˜ํƒ€๋‚  ๋•Œ ๊ฐ™์ด ๋“ฑ์žฅํ–ˆ์ง€๋งŒ, ๋ณ€์‹ ํ•˜๋Š” ์žฅ๋ฉด์€ ๋ฌ˜์‚ฌ๋˜์ง€ ์•Š์•˜๋‹ค. ์œ ํ‚ค ์กฐ์ง€์˜ ์ธ๊ฐ„ ๋ชจ์Šต์€ ๊ฐํŠธ๊ฐ€ ์—ฐ๊ธฐํ–ˆ์œผ๋ฉฐ, "The Next Decade"์—์„œ๋„ ์—ฐ๊ธฐํ–ˆ๋‹ค. ๋Œ€์‡ผ์ปค๋Š” ์ˆ˜๋งŽ์€ ์•…์˜ ๊ฒฐ์‚ฌ๊ฐ€ ์ง‘ํ•ฉํ•œ ๊ฑฐ๋Œ€ํ•œ ์•…์˜ ์กฐ์ง์ด๋‹ค. ๋กœ๊ณ ๋Š” ์›๋ž˜์˜ ์‡ผ์ปค ๋กœ๊ณ ์— ๋””์ผ€์ด๋“œ๋ฅผ ์ƒ์ง•ํ•˜๋Š” "DCD"๊ฐ€ ์ถ”๊ฐ€๋œ ํ˜•ํƒœ์ด๋‹ค. ์‚ฌ์‹  ๋ฐ•์‚ฌ(๊ฐ€๋ฉด๋ผ์ด๋” ๋””์ผ€์ด๋“œ์˜ ๋“ฑ์žฅ ์ธ๋ฌผ#ํžˆ์นด๋ฆฌ ์—์ด์ง€๋กœ), ์•„ํด๋กœ ๊ฐ€์ด์ŠคํŠธ(๊ฐ€๋ฉด๋ผ์ด๋” ๋””์ผ€์ด๋“œ์˜ ๋“ฑ์žฅ ์ธ๋ฌผ#์•„ํด๋กœ ๊ฐ€์ด์ŠคํŠธ) ์™ธ ์—ญ๋Œ€ ๊ฐ€๋ฉด๋ผ์ด๋” ์‹œ๋ฆฌ์ฆˆ์˜ ์•…๋‹น๊ณผ ๊ดด์ธ๋“ค์ด ๋ชจ์—ฌ ์žˆ๋‹ค. ์ง€์˜ฅ๋Œ€์‚ฌ๋Š” ๊ฐ€๋ฉด๋ผ์ด๋”์˜ ์„ธ๊ณ„(๊ฐ€๋ฉด๋ผ์ด๋”)์—์„œ ์˜จ, ์‡ผ์ปค(์‡ผ์ปค (๊ฐ€๋ฉด๋ผ์ด๋”))์˜ ๊ฐ„๋ถ€์ด๋‹ค. ๋ฐฉ์šธ๋ฑ€ ํ˜•ํƒœ์˜ ๊ฐ€๋ผ๊ฐ€๋ž€๋‹ค๋ผ๋Š” ๊ดด์ธ์œผ๋กœ ๋ณ€์‹ ํ•  ์ˆ˜ ์žˆ๋‹ค.
Public method to get the word at the current position. the word at that current position (string)
def getCurrentWord(self): line, index = self.getCursorPosition() return self.getWord(line, index)
def echoPreviousWord(self, obj, offset=None): try: text = obj.queryText() except NotImplementedError: return if not offset: offset = text.caretOffset - 1 if (offset < 0): return [char, startOffset, endOffset] = \ text.getTextAtOffset( \ offset, pyatspi.TEXT_BOUNDARY_CHAR) if not self.utilities.isWordDelimiter(char): return # OK - we seem to be cool so far. So...starting with what # should be the last character in the word (caretOffset - 2), # work our way to the beginning of the word, stopping when # we hit another word delimiter. # wordEndOffset = offset - 1 wordStartOffset = wordEndOffset while wordStartOffset >= 0: [char, startOffset, endOffset] = \ text.getTextAtOffset( \ wordStartOffset, pyatspi.TEXT_BOUNDARY_CHAR) if self.utilities.isWordDelimiter(char): break else: wordStartOffset -= 1 # If we came across a word delimiter before hitting any # text, we really don't have a previous word. # # Otherwise, get the word. Remember we stopped when we # hit a word delimiter, so the word really starts at # wordStartOffset + 1. getText also does not include # the character at wordEndOffset, so we need to adjust # for that, too. # if wordStartOffset == wordEndOffset: return else: word = self.utilities.\ substring(obj, wordStartOffset + 1, wordEndOffset + 1) if self.utilities.linkIndex(obj, wordStartOffset + 1) >= 0: voice = self.voices[settings.HYPERLINK_VOICE] elif word.decode("UTF-8").isupper(): voice = self.voices[settings.UPPERCASE_VOICE] else: voice = self.voices[settings.DEFAULT_VOICE] word = self.utilities.adjustForRepeats(word) speech.speak(word, voice)
Wraps an agent's program function and prints the inputs/outputs, so you can watch it in its environment.
def trace_agent(agent:'agent') -> 'agent': old_program = agent.program def new_program(percept): action = old_program(percept) print('{} perceives {} and does {}'.format(agent, percept, action)) return action agent.program = new_program return agent
def foo(args): output("Foo!") return 0
Returns the index ID of the column. The structure of the CSV is defined in the configuration.
def __get_column_id(element: str) -> Optional[int]: idx = 0 for col in config.csv_headers: if col == element: return idx else: idx += 1
def ts_id_col(self): return self._ts_id_col if self._ts_id_col else ["ts_col_1"]
Representative content for a taxon. Will throw an error if the file it needs does not exist
def representative_content_for_taxon(taxon): # TODO: What to do about test/train content? representative_content_path = RepresentativeContent.path_for_representative_content(taxon) if os.path.exists(representative_content_path) and os.path.isfile(representative_content_path): most_representative_content = pd.read_csv(representative_content_path, low_memory=False) return most_representative_content['combined_text'].to_list() else: raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), f"Representative content file for {taxon.unique_title()} has not been generated, should be at: {representative_content_path}")
def usage(): with open('./usage.txt') as file_obj: contents = file_obj.read() return contents
๊ทธ๋ฆฐ์Šค๋ณด๋กœ์˜ ๋‚ ์”จ๋Š” ์–ด๋–ค๊ฐ€์š”?
๋…ธ์Šค์บ๋กค๋ผ์ด๋‚˜ ๋‚ ์”จ > ๊ทธ๋ฆฐ์Šค๋ณด๋กœ ๋‚ ์”จ. ๊ทธ๋ฆฐ์Šค๋ณด๋กœ, NC์˜ ๊ธฐํ›„๋Š” ์—ฌ๋ฆ„์— ๊ธฐ์˜จ์ด 70๋„๋Œ€์— ์ด๋ฅด๋Š” ๋”ฐ๋œปํ•œ ๋‚ ์”จ๋ฅผ ๋ณด์ด๋ฉฐ, ๊ฒจ์šธ์—๋Š” ๊ธฐ์˜จ์ด 30๋„๋Œ€์— ์ด๋ฅด๋Š” ๋งค์šฐ ์ถ”์šด ๋‚ ์”จ๋ฅผ ๋ณด์ž…๋‹ˆ๋‹ค.
๊ทธ๋ฆฐ์Šค๋ณด๋กœ ๋‚ ์”จ ํ‰๊ท  ๋ฐ ๊ธฐํ›„ ๊ทธ๋ฆฐ์Šค๋ณด๋กœ, ๋…ธ์Šค์บ๋กค๋ผ์ด๋‚˜. ์›”๋ณ„ ์˜จ๋„, ๊ฐ•์ˆ˜๋Ÿ‰ ๋ฐ ์ผ์กฐ ์‹œ๊ฐ„. ๊ฐ•์ˆ˜๋Ÿ‰ ๋ฐ์ดํ„ฐ, ์˜จ๋„ ๋ฐ ์ •์ƒ๊ฐ’์„ ๋ณด์—ฌ์ฃผ๋Š” ๊ธฐํ›„ ๊ทธ๋ž˜ํ”„. ํ‰๊ท  ๋‚ ์”จ ๊ทธ๋ฆฐ์Šค๋ณด๋กœ, NC.
Tests that newly created story cells decompose into concept cells, matching the concepts appearing in the text.
def test_cells_concept_extraction(self): # verify that the expected concepts do not exist self.assertEquals(0, ConceptCell.objects.filter(name="EveryBlock").count(), "Expected that the concept EveryBlock does not exist in the test fixtures") self.assertEquals(0, ConceptCell.objects.filter(name="Glenn").count(), "Expected that the concept Glenn Ford does not exist in the test fixtures") # add agents self.society_cell.add_agent("Bill", "Bill", DATASOURCE_TYPE_TWITTER) self.society_cell.add_agent("Al", "Al", DATASOURCE_TYPE_TWITTER) # fetch agents query = AgentCell.objects.filter(user__user_name="Bill") self.assertTrue(query.count() > 0, "Agent wasn't created") bill = query[0] query = AgentCell.objects.filter(user__user_name="Al") self.assertTrue(query.count() > 0, "Agent wasn't created") al = query[0] # add stories al.add_read_story("RT @yahoo RE YQL, EveryBlock API: were working \w senator Glenn Ford on all the details", [bill.user]) self.society_cell.process() # look up the concepts: YQL, EveryBlock, Glenn Ford self.assertEquals(1, ConceptCell.objects.filter(name="EveryBlock").count(), "Expected that the concept EveryBlock would be extracted from the added story") self.assertEquals(1, ConceptCell.objects.filter(name="Glenn").count(), "Expected that the concept Glenn Ford would be extracted from the added story") # get the concepts everyblock = ConceptCell.objects.filter(name="EveryBlock")[0] glenn = ConceptCell.objects.filter(name="Glenn")[0] # test concept recipient self.assertTrue(everyblock.recipients.all().count() > 0, "Expected to find reciipients for the new concepts") self.assertEquals(al.user, everyblock.recipients.all()[0], "Expected to find a specific recipient of the concept") self.assertTrue(glenn.recipients.all().count() > 0, "Expected to find reciipients for the new concepts") self.assertEquals(al.user, glenn.recipients.all()[0], "Expected to find a specific recipient of the concept")
def test_trait_count_12(self): things = self.model_factory.create_batch(20, source_study_version__i_is_deprecated=False) table = self.table_class(things) row = table.rows[0] n_traits = 12 factories.SourceTraitFactory.create_batch( n_traits, source_dataset=row.record) # Remake the table, to update trait counts. table = self.table_class(things) row = table.rows[0] self.assertEqual(row.get_cell('trait_count'), '{:,}'.format(n_traits))
Given an m x n matrix zero the rows and columns that have a 0 in it.
def zero_rows_columns(matrix): rows = [] columns = [] for i in range(len(matrix)): for j in range(len(matrix[0])): if matrix[i][j] == 0: rows.append(i) columns.append(j) for i in range(len(matrix)): for j in range(len(matrix[0])): if i in rows or j in columns: matrix[i][j] = 0 return matrix
def find_empty(board): for i in range(len(board)): for j in range(len(board[0])): if board[i][j] == 0: return i, j return None
๋ฏธ๊ตญ ๊ณต๊ตฐ ์‚ฌ๊ด€ํ•™๊ต๋Š” ์–ด๋””์— ์žˆ์Šต๋‹ˆ๊นŒ?
ํฌ๋ธŒ์Šค ๋ฆฌ์ŠคํŠธ. ํ”„๋กœํ•„. ๋ฏธ๊ตญ ๊ณต๊ตฐ ์‚ฌ๊ด€ํ•™๊ต๋Š” ์ฝœ๋กœ๋ผ๋„ ์Šคํ”„๋ง์Šค, CO์— ์œ„์น˜ํ•œ ํ•™์ƒ ์‚ฌ๊ด€์ƒ๋“ค์„ ์œ„ํ•œ ๊ตฐ์‚ฌ ์„œ๋น„์Šค ์•„์นด๋ฐ๋ฏธ๋กœ, ๊ณต๊ตฐ ๊ธฐ์ง€๋กœ๋„ ๊ธฐ๋Šฅํ•ฉ๋‹ˆ๋‹ค. ๊ณต๊ตฐ์€ ๊ณตํ•™ ํ”„๋กœ๊ทธ๋žจ์œผ๋กœ ์œ ๋ช…ํ•˜๋ฉฐ, ์ด ํ•™๊ต๋Š” ๊ตญ๊ฐ€ ์•ˆ๋ณด ์—ฐ๊ตฌ์†Œ์™€ ํ˜‘๋ ฅํ•˜์—ฌ ๊ด‘๋ฒ”์œ„ํ•œ ์—ฐ๊ตฌ ๊ธฐํšŒ์™€ ๋ณด์กฐ๊ธˆ์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค.
๋ฏธ๊ตญ ๊ณต๊ตฐ ์‚ฌ๊ด€ํ•™๊ต. ๋ฏธ๊ตญ ๊ณต๊ตฐ ์‚ฌ๊ด€ํ•™๊ต(USAFA ๋˜๋Š” ๊ณต๊ตฐ ์‚ฌ๊ด€ํ•™๊ต๋กœ๋„ ์•Œ๋ ค์ง)๋Š” ๋ฏธ๊ตญ ๊ณต๊ตฐ์˜ ์žฅ๊ต ํ›„๋ณด์ƒ์„ ์œ„ํ•œ ๊ตฐ์‚ฌ ํ•™๊ต์ž…๋‹ˆ๋‹ค. ์ด ์บ ํผ์Šค๋Š” ๋ฏธ๊ตญ ์„œ๋ถ€ ์ฝœ๋กœ๋ผ๋„์— ์œ„์น˜ํ•˜๊ณ  ์žˆ์œผ๋ฉฐ, ์—˜ํŒŒ์†Œ ์นด์šดํ‹ฐ์˜ ์ฝœ๋กœ๋ผ๋„ ์Šคํ”„๋ง์Šค ๋ฐ”๋กœ ๋ถ์ชฝ์— ์žˆ์Šต๋‹ˆ๋‹ค.
Disable Intan impedance testing Make sure DAQ is running, e.g. by calling startStreaming_boardSamples(), before calling this function.
def disableZCheck(): """ cmdData_DACconfig = ((0x1 << 24) | (0xFF << 16) | (0b10000101 << 8) | 0) # clear register cmdData_DACchan = ((0x1 << 24) | (0xFF << 16) | (0b10000111 << 8) | 0) # clear register """ #### cmds = [] #cmds.append(DC.reg_write(DC.MOD_DAQ, DC.DAQ_CHIP_CMD, cmdData_DACconfig)) cmds.append(_intanRegWrite(address=5, data=0)) #cmds.append(DC.reg_write(DC.MOD_DAQ, DC.DAQ_CHIP_CMD, cmdData_DACchan)) cmds.append(_intanRegWrite(address=7, data=0)) #cmds.append(DC.reg_write(DC.MOD_DAQ, DC.DAQ_CHIP_CMD, 0)) # clear the CMD register cmds.append(_intanRegWrite(clear=True)) resps = _controlCmdWrapper(cmds)
def test_reportexclude(self): # default self.assertIsNone(self.settings.reportexclude) # turn things off options = random.randrange(1, 4) value = ''.join([random.choice('CDMSV') for option in xrange(options)]) self.settings.reportexclude = value self.assertEqual(' --reportexclude {0}'.format(value), str(self.settings)) # bad exclusion (number) with self.assertRaises(CameraobscuraError): self.settings.reportexclude = random.randrange(100) # invalid option with self.assertRaises(CameraobscuraError): self.settings.reportexclude = random.choice('ABEFGHIJKLNOP') return
๋งˆ์ฐฐ๋ ฅ์ด ๊ฐ•ํ•ด์ ธ ์›๋ฐ˜์ด ์—‘์Šค์„ ์„ ๋ฐฉ์ถœํ• ์ˆ˜ ์žˆ๊ฒŒ ๋œจ๊ฑฐ์›Œ์ง€๋ฉด, ์—‘์Šค์„ ์„ ๋ฌด์—‡์œผ๋กœ ๊ด€์ฐฐ ๊ฐ€๋Šฅํ•œ๊ฐ€์š”?
๋ธ”๋ž™ํ™€ ๊ฐ•์ฐฉ์„ ์ผ์œผํ‚ค๋Š” ์ฒœ์ฒด๊ฐ€ ์ค‘์„ฑ์ž๋ณ„์ด๋‚˜ ๋ธ”๋ž™ํ™€์ผ ๊ฒฝ์šฐ, ๊ฐ•์ฐฉ์›๋ฐ˜ ์•ˆ์ชฝ์˜ ๊ฐ€์Šค๋Š” ๋ฐ€์ง‘์„ฑ ๋ณธ์ฒด์— ๋งค์šฐ ๊ฐ€๊นŒ์›Œ์ ธ ์—„์ฒญ๋‚œ ๊ณ ์†์œผ๋กœ ํšŒ์ „ํ•  ๊ฒƒ์ด๋‹ค. ๊ทธ๋Ÿฌ๋ฉด ๋งˆ์ฐฐ๋ ฅ์ด ๋„ˆ๋ฌด ๊ฐ•ํ•ด์ ธ์„œ ์›๋ฐ˜์ด ์ „์ž๊ธฐ ๋ณต์‚ฌ(์ฃผ๋กœ ์—‘์Šค์„ )๋ฅผ ๋ฐฉ์ถœํ•  ๋งŒํผ ๋œจ๊ฑฐ์›Œ์ง„๋‹ค. ์ด๋ ‡๊ฒŒ ๋ฐ์€ ์—‘์Šค์„ ์›๋“ค์€ ๋ง์›๊ฒฝ์œผ๋กœ ๊ด€์ธก์ด ๊ฐ€๋Šฅํ•˜๋‹ค. ๊ฐ•์ฐฉ์›๋ฐ˜์˜ ์—‘์Šค์„  ๋ฐฉ์ถœ์€ ์ง€๊ธˆ๊นŒ์ง€ ์•Œ๋ ค์ง„ ๊ฐ€์žฅ ์—๋„ˆ์ง€ ํšจ์œจ์ด ๋†’์€ ๊ณผ์ •์— ์†ํ•œ๋‹ค. ํ•ญ์„ฑ ๋‚ด๋ถ€ ํ•ต์œตํ•ฉ์ด ์งˆ๋Ÿ‰์˜ 0.7%๋ฅผ ์—๋„ˆ์ง€๋กœ ๋ฐฉ์ถœํ•˜๋Š” ๋ฐ˜๋ฉด, ๊ฐ•์ฐฉ์›๋ฐ˜์€ ์งˆ๋Ÿ‰์˜ 40%๋ฅผ ๋ณต์‚ฌ ์—๋„ˆ์ง€๋กœ ๋ฐฉ์ถœ์‹œํ‚จ๋‹ค. ๋งŽ์€ ๊ฒฝ์šฐ ๊ฐ•์ฐฉ์›๋ฐ˜์€ ๋ฐ€์ง‘์„ฑ์˜ ์–‘๊ทน ๋ฐฉํ–ฅ์œผ๋กœ ๋ถ„์ถœ๋˜๋Š” ์—„์ฒญ๋‚œ ์—๋„ˆ์ง€์˜ ์ƒ๋Œ€๋ก ์  ์ œํŠธ๋ฅผ ์ˆ˜๋ฐ˜ํ•œ๋‹ค. ์ด ์ œํŠธ๊ฐ€ ๋งŒ๋“ค์–ด์ง€๋Š” ๊ณผ์ •์€ ํ˜„์žฌ๋กœ์„œ ์™„์ „ํžˆ ์ดํ•ด๋˜์ง€ ๋ชปํ•˜๊ณ  ์žˆ๋‹ค.
๋ธ”๋ž™ํ™€ ์ฐฐ์Šค ํ† ๋จธ์Šค ๋ณผํŠผ, ๋ฃจ์ด์Šค ์›น์Šคํ„ฐ(Louise Webster), ํŒŒ์šธ ๋ฌด๋ฅด๋”˜(Paul Murdin)์ด 1972๋…„์— ์ตœ์ดˆ์˜ ๊ฐ•๋ ฅํ•œ ๋ธ”๋ž™ํ™€ ํ›„๋ณด ์ฒœ์ฒด ๋ฐฑ์กฐ์ž๋ฆฌ X-1๋ฅผ ์ด ๋ฐฉ์‹์œผ๋กœ ๋ฐœ๊ฒฌํ•ด๋ƒˆ๋‹ค. ๊ทธ๋Ÿฌ๋‚˜ ๋™๋ฐ˜์„ฑ์ด ๋ธ”๋ž™ํ™€ ํ›„๋ณด๋ณด๋‹ค ๋” ๋ฌด๊ฑฐ์šธ ์ˆ˜ ์žˆ๊ธฐ์— ์ด ๋ฐฉ๋ฒ•์—๋Š” ๋‹ค์†Œ์˜ ๋ถˆํ™•์‹ค์„ฑ์ด ๋‚จ์•„ ์žˆ๋‹ค. ํ˜„์žฌ๋Š” ์—‘์Šค์„  ์Œ์„ฑ๊ณ„ ์ค‘์—์„œ๋„ ์ผ์‹œ์  ์—ฐ์—‘์Šค์„ (X-ray transients)์œผ๋กœ ๋ถ„๋ฅ˜๋˜๋Š” ๊ณ„๋“ค์—์„œ ๋ธ”๋ž™ํ™€ ํ›„๋ณด๋“ค์ด ๋ณด๋‹ค ๋” ์ž˜ ๋ฐœ๊ฒฌ๋œ๋‹ค. ์ด ์ข…๋ฅ˜์˜ ์Œ์„ฑ๊ณ„์—์„œ๋Š” ๋ฐ€์ง‘์„ฑ์˜ ๋™๋ฐ˜์„ฑ์˜ ์งˆ๋Ÿ‰์ด ์ƒ๋Œ€์ ์œผ๋กœ ์ž‘์œผ๋ฉฐ, ๊ทธ ๋•Œ๋ฌธ์— ๋ธ”๋ž™ํ™€์˜ ์งˆ๋Ÿ‰์„ ๋ณด๋‹ค ์ •ํ™•ํ•˜๊ฒŒ ์ถ”์‚ฐํ•  ์ˆ˜ ์žˆ๋‹ค. ๋˜ํ•œ ์ด๋“ค ๊ณ„๋Š” 10 ~ 50๋…„ ๋™์•ˆ ๋ถˆ๊ณผ ์ˆ˜ ๊ฐœ์›” ๋™์•ˆ๋งŒ ์—‘์Šค์„ ์„ ํ™œ๋ฐœํ•˜๊ฒŒ ๋ฐฉ์ถœํ•œ๋‹ค. ์—‘์Šค์„  ๋ฐฉ์ถœ์ด ์ €์กฐํ•ด์ง€๋Š” ์‹œ๊ธฐ๋ฅผ ํœด๋ฉด๊ธฐ(quiescence)๋ผ ํ•˜๋ฉฐ, ์ด ๋•Œ ๊ฐ•์ฐฉ์›๋ฐ˜์ด ๊ทน๋„๋กœ ์–ด๋‘์›Œ์ ธ ๋™๋ฐ˜์„ฑ์„ ์„ธ๋ฐ€ํ•˜๊ฒŒ ๊ด€์ธกํ•  ์ˆ˜ ์žˆ๊ฒŒ ๋œ๋‹ค. ์ด๋Ÿฌํ•œ ๋ธ”๋ž™ํ™€ ํ›„๋ณด ์ค‘ ๊ฐ€์žฅ ์ ์ ˆํ•œ ์˜ˆ๊ฐ€ ๋ฐฑ์กฐ์ž๋ฆฌ V404์ด๋‹ค.
Test that a ValueError gets raised when a required SignatureVerifyResponsePayload field is missing when encoding the struct.
def test_write_missing_validity_indicator(self): payload = signature_verify.SignatureVerifyResponsePayload( unique_identifier='49a1ca88-6bea-4fb2-b450-7e58802c3038' ) stream = utils.BytearrayStream() args = (stream, ) self.assertRaisesRegex( ValueError, "Payload is missing the validity indicator field.", payload.write, *args )
def test_some_missing_data(self, client): response = client.post( "/api/v1/register", data=json.dumps(some_missing), content_type='application/json;charset=utf-8') res_data = json.loads(response.get_data(as_text=True)) assert 'Bad Request' in res_data['status'] assert 'Missing data for required field.' in str(res_data['Message'])
Apply on a scalar field. If the argument is not a scalar field an error is raised.
def __call__(self, scalar_field): return self.v1(self.v2(scalar_field)) - self.v2(self.v1(scalar_field))
def alter_field(self, model, old_field, new_field, strict=False): # TODO : Logic to "upgrade" old data pass
solve_tsp solve the traveling salesman problem start with assignment model add cuts until there are no subcycles
def solve_tsp(V, c): # ้ƒจๅˆ†ๅทกๅ›ž่ทฏ้™คๅŽป่ฃฝ่–ฌ def addcut(edges): G = nx.Graph() G.add_nodes_from(V) for (i, j) in edges: G.add_edge(i, j) Components = list(nx.connected_components(G)) if len(Components) == 1: return False for S in Components: model.addConstr(quicksum(x[i, j] for i in S for j in S if j > i) <= len(S) - 1) return True model = Model("tsp") x = {} for i in V: for j in V: x[i, j] = model.addVar(ub=1) model.update() for i in V: model.addConstr(quicksum(x[j, i] for j in V if j < i) + quicksum(x[i, j] for j in V if j > i) == 2) model.setObjective(quicksum(c[i, j] * x[i, j] for i in V for j in V if j > i), GRB.MINIMIZE) EPS = 1.e-6 while True: model.optimize() edges = [] for (i, j) in x: if x[i, j].X > EPS: edges.append((i, j)) if not addcut(edges): if model.IsMIP: break for (i, j) in x: x[i, j].VType = "B" model.update() return model.ObjVal, edges
def post_calc(self): # self.mfp = fcl.mean_free_path(self.solver_T, self.solver_p, # self.atmosphere.d) # self.Kn = self.mfp / self.spacecraft.L ## self.Re = fcl.KnReMa(self.atmosphere.k, Kn=self.Kn, ## Ma=self.Ma) # self.Re = fcl.Reynolds(self.solver_rho, self.V, self.spacecraft.L, # self.solver_mu) # Continuum: 0 < Kn < 0.001 # Slip: 0.001 <= Kn < 0.1 # Transition: 0.1 <= Kn < 10 # Free molecular: 10 < Kn self.regimes = placeholder() if len(np.argwhere(self.Kn > 10)) != 0: self.index_tran_freemol = np.argwhere(self.Kn > 10)[-1] self.regimes.free_molecular = np.argwhere(self.Kn >= 10) else: self.index_tran_freemol = None if len(np.argwhere(self.Kn > 0.1)) != 0: self.index_slip_tran = np.argwhere(self.Kn > 0.1)[-1] self.regimes.transition = np.argwhere((self.Kn < 10) & (self.Kn >= 0.1)) else: self.index_slip_tran = None if len(np.argwhere(self.Kn > 0.001)) != 0: self.index_cont_slip = np.argwhere(self.Kn > 0.001)[-1] self.regimes.slip = np.argwhere((self.Kn < 0.1) & (self.Kn >= 0.001)) else: self.index_cont_slip = None if len(np.argwhere((self.Kn > 0) & (self.Kn <= 0.001))) != 0: self.regimes.continuum = np.argwhere((self.Kn < 0.001) & (self.Kn >= 0)) else: self.index_cont_slip = None return [self.mfp, self.Kn, self.Re]
Get the auth time for the security context. Gets the auth time for the established security context.
def krb5_extract_authtime_from_sec_context( context: "SecurityContext", ) -> int:
def getServerTimeForOTP(self) -> datetime.datetime: # This method seems to be useless to generate the OTP(One Time Password), but I leave it there if not self.__connected: raise ConnectionError r = self.__s.get(f"{BASE_URL}/nvsecurityapi/rest/enrollments/time") httpTime = r.headers["Date"] time = datetime.datetime.strptime( httpTime, '%a, %d %b %Y %H:%M:%S GMT') return time
Shows ann error if the user specifies an overly complicated system that requires too much memory allocation.
def show_size_error(self): messagebox.showerror(title = 'Memory Error', message = 'The parameters ' + 'you have specified require too much memory to be ' + 'computed. Please decrease the simulation time ' + 'and/or the transport rates to rectify this issue.') self.frame.quit()
def test_memory_usage(self): manager = ApacheManager() # Make sure there are Apache workers alive that have handled a couple of requests. for i in range(10): manager.fetch_status_page(manager.text_status_url) assert sum(manager.memory_usage) > 0 # TODO Create a WSGI process group so we can perform a useful test here? assert isinstance(manager.wsgi_process_groups, dict)
Adds an occupant to its list of occupants to keep track of as well as the rlt
def add(self, o): self.__occupants.append(o[0]) self.__position_rlt[o[0]] = (o[1], o[2])
def increment_room_occupancy(room, occupancy_additional): with sqlite3.connect(database) as c: c.execute('''CREATE TABLE IF NOT EXISTS rooms (name text UNIQUE, capacity integer, occupancy integer, noiseLevel integer);''') validate_room(room) current_occupancy = c.execute('''SELECT occupancy FROM rooms WHERE name=?;''', (room,)).fetchone()[0] new_occupancy = current_occupancy + occupancy_additional c.execute('''UPDATE rooms SET occupancy=? WHERE name=?;''', (new_occupancy, room))
Writes output matches to a file in the subfolder "Matches". It supports any iterable as output matches.
def file_output(matches: list, output_file_name: str = 'matches.txt'): with open("test/Matches/" + output_file_name, 'w') as f: for match in matches: for event in match.events: f.write("%s\n" % event.payload) f.write("\n")
def write(afile, seqs): for s in seqs : writeseq(afile, s)
Return whether finding group list for a specified node is supported by the resolver (in optional namespace).
def has_node_groups(self, namespace=None): try: return bool(self._source(namespace).reverse_upcall) except GroupResolverSourceError: return False
def is_in_group(user, group_name): return user.groups.filter(name__exact=group_name).exists()
A callback method that is called when the desired properties of the devices device twin are updated.
def on_device_twin_desired_updated(self): return self._on_device_twin_desired_updated
def set_update_callback(self): pass
๊ตฐ์ฒญ, ์ง์›, ์ Š๋‹ค, ์ƒ๋ƒฅํ•˜๋‹ค
๊ตฐ์ฒญ์˜ ์ Š์€ ์ง์›์ด ์œ ๋… ๋ฌด์ฒ™ ์ƒ๋ƒฅํ•œ ๊ฒƒ ๊ฐ™๋‹ค.
์žฌ์ฃผ๋Š” ์˜ˆ์ ˆ, ์Œ์•…, ํ™œ์˜๊ธฐ, ๊ธ€์“ฐ๊ธฐ, ๋งํƒ€๊ธฐ, ๊ณ„์‚ฐํ•˜๊ธฐ ๋“ฑ๋“ฑ ์ด๋‹ค.
inherits the __init__ from Number if just normal number or uses new method if its a list.
def __init__(self, number=None): if type(number) != list: print("hei") super().__init__(float(number)) elif type(number) == list: self.number = [] for i in number: self.number.append(i) else: raise TypeError("number added inn does not fill any of the type categories ยดint/float/listยด")
def __new__(*args, **kwargs): # real signature unknown pass
Iteratively zoom in on the minimum position in an image until the deltapeak value is below `mindiff`
def iterative_zoom(image, mindiff=1., zoomshape=[10,10], return_zoomed=False, zoomstep=2, verbose=False, minmax=np.min, ploteach=False, return_center=True): image_zoom = image argminmax = np.argmin if "min" in minmax.__name__ else np.argmax zf = 1. # "zoom factor" initialized to 1 for the base shift measurement offset = np.array([0]*image.ndim,dtype='float') # center offset delta_image = (image_zoom - minmax(image_zoom)) xaxzoom = np.indices(image.shape) if ploteach: ii = 1 pl.figure(ii) pl.clf() pl.pcolor(np.arange(image.shape[0]+1)-0.5,np.arange(image.shape[1]+1)-0.5, image) minpos = np.unravel_index(argminmax(image_zoom), image_zoom.shape) pl.plot(minpos[1],minpos[0],'wx') # check to make sure the smallest *nonzero* difference > mindiff while np.abs(delta_image[np.abs(delta_image)>0]).min() > mindiff: minpos = np.unravel_index(argminmax(image_zoom), image_zoom.shape) center = xaxzoom[0][minpos],xaxzoom[1][minpos] offset = xaxzoom[0][minpos]-(image.shape[0]-1)/2,xaxzoom[1][minpos]-(image.shape[1]-1)/2 zf *= zoomstep xaxzoom, image_zoom = zoom.zoom_on_pixel(image, center, usfac=zf, outshape=zoomshape, return_xouts=True) delta_image = image_zoom-minmax(image_zoom) # base case: in case you can't do any better... # (at this point, you're all the way zoomed) if np.all(delta_image == 0): if verbose: print("Can't zoom any further. zf=%i" % zf) break if verbose: print(("Zoom factor %6i, center = %30s, offset=%30s, minpos=%30s, min|diff|=%15g" % (zf, ",".join(["%15g" % c for c in center]), ",".join(["%15g" % c for c in offset]), ",".join(["%5i" % c for c in minpos]), np.abs(delta_image[np.abs(delta_image)>0]).min() ))) if ploteach: ii += 1 pl.figure(ii) pl.clf() pl.pcolor(centers_to_edges(xaxzoom[1][0,:]),centers_to_edges(xaxzoom[0][:,0]),image_zoom) pl.contour(xaxzoom[1],xaxzoom[0],image_zoom-image_zoom.min(),levels=[1,5,15],cmap=pl.cm.gray) pl.plot(center[1],center[0],'wx') minpos = np.unravel_index(argminmax(image_zoom), image_zoom.shape) pl.plot(xaxzoom[1][minpos], xaxzoom[0][minpos], 'w+') pl.arrow(center[1],center[0],xaxzoom[1][minpos]-center[1],xaxzoom[0][minpos]-center[0],color='w', head_width=0.1/zf, linewidth=1./zf, length_includes_head=True) pl.figure(1) #pl.contour(xaxzoom[1],xaxzoom[0],image_zoom-image_zoom.min(),levels=[1,5,15],cmap=pl.cm.gray) pl.arrow(center[1],center[0],xaxzoom[1][minpos]-center[1],xaxzoom[0][minpos]-center[0],color='w', head_width=0.1/zf, linewidth=1./zf, length_includes_head=True) if return_center: result = center else: result = offset if return_zoomed: return image_zoom,zf,result else: return result
def initial_guess(cts, xmin, xmax, size_window_background_left, size_window_background_right): # Automatic finding peak parameters number_of_image = len(cts) # nbre of images in the scan add_x0 = 0 add_IM = 0 add_H = 0 add_A = 0 add_B = 0 for ii in range(number_of_image): number_of_pixel = len(cts[ii]) x = np.arange(0, number_of_pixel, 1) step = x[1] - x[0] # Obtaining and add x0, IM and H add_x0 += (xmin + list_manipulation.index(cts[ii][xmin:xmax], max(cts[ii][xmin:xmax]))) add_IM += max(cts[ii][xmin:xmax]) add_H += (trapeze_method(cts[ii][xmin:xmax], step) / max(cts[ii][xmin:xmax])) # Obtaining A and B background_left = cts[ii][xmin:xmin + size_window_background_left] background_right = cts[ii][xmax - size_window_background_right:xmax] left_point_value = np.mean(background_left) right_point_value = np.mean(background_right) left_point_absc = xmin + size_window_background_left / 2 right_point_absc = xmax - size_window_background_right / 2 B_temp = (left_point_value - right_point_value) / (left_point_absc - right_point_absc) A_temp = left_point_value - B_temp * left_point_absc add_A += A_temp add_B += B_temp x0 = add_x0 / number_of_image IM = add_IM / number_of_image H = add_H / number_of_image A = add_A / number_of_image B = add_B / number_of_image return x0, IM, H, A, B
BOOL SQLInstallTranslatorEx( LPCTSTR lpszTranslator, LPCTSTR lpszPathIn, LPTSTR lpszPathOut, WORD cbPathOutMax, WORD pcbPathOut, WORD fRequest, LPDWORD lpdwUsageCount )
def odbccp32_SQLInstallTranslatorEx(jitter, get_str, set_str): ret_ad, args = jitter.func_args_stdcall(["lpszTranslator", "lpszPathIn", "lpszPathOut", "cbPathOutMax", "pcbPathOut", "fRequest", "lpdwUsageCount"]) raise RuntimeError('API not implemented') jitter.func_ret_stdcall(ret_ad, ret_value)
def odbccp32_SQLInstallTranslator(jitter, get_str, set_str): ret_ad, args = jitter.func_args_stdcall(["lpszInfFile", "lpszTranslator", "lpszPathIn", "lpszPathOut", "cbPathOutMax", "pcbPathOut", "fRequest", "lpdwUsageCount"]) raise RuntimeError('API not implemented') jitter.func_ret_stdcall(ret_ad, ret_value)
Helper function that returns the AmpLayout.Layout name for a given index. See amp.validator.AmpLayout.Layout in validator.proto for details.
def GetLayout(validator_pb2, layout_index): amp_layout = validator_pb2.DESCRIPTOR.message_types_by_name['AmpLayout'] layouts = amp_layout.fields_by_name['supported_layouts'].enum_type.values return layouts[layout_index].name
def get_label(index): if index == 0: return "Sepal Length" if index == 1: return "Sepal Width" if index == 2: return "Petal Length" if index == 3: return "Petal Width"
when removing a room we wan't to be able to remove it from the cache as well
def remove_room_exists(self, channel_id, room_id):
def rm_from_cache(self, *args, **kwargs): try: del self._cache[self._prehash( bind_callargs(self._signature, *args, **kwargs) )] except KeyError: pass
Generates a trajectory given a path. Does it using splines for each of the x, y, z
def create_trajectory(self, path): # Added this last pose because in some cases the drone would stop at the before-last pose. # Couldn't figure out the reason for this, I plotted the splines and they seemed fine. It # looks like some bug in the crazyflie firmware. path.add_pose(path.poses[-1], path.times[-1] + EPS) self.__duration = path.times[-1] # Generates splines for x, y and z, using the t array as knots. x_spline = CubicSpline(path.times, [p.position().x for p in path.poses], bc_type='natural') y_spline = CubicSpline(path.times, [p.position().y for p in path.poses], bc_type='natural') z_spline = CubicSpline(path.times, [p.position().z for p in path.poses], bc_type='natural') # Passing to the structure ros will read. Note that the polynomials must have 8 constants. for i in range(1, len(path.times)): x_coef = np.concatenate((x_spline.c[:, i - 1][::-1], [0] * 4)) y_coef = np.concatenate((y_spline.c[:, i - 1][::-1], [0] * 4)) z_coef = np.concatenate((z_spline.c[:, i - 1][::-1], [0] * 4)) p = Polynomial4D(path.times[i] - path.times[i - 1], x_coef, y_coef, z_coef, [0] * 8) self.__polynomials.append(p)
def generate_spiral_path_dtu(poses, n_frames=120, n_rots=2, zrate=.5, perc=60): # Get radii for spiral path using 60th percentile of camera positions. positions = poses[:, :3, 3] radii = np.percentile(np.abs(positions), perc, 0) radii = np.concatenate([radii, [1.]]) # Generate poses for spiral path. render_poses = [] cam2world = poses_avg(poses) up = poses[:, :3, 1].mean(0) z_axis = focus_pt_fn(poses) for theta in np.linspace(0., 2. * np.pi * n_rots, n_frames, endpoint=False): t = radii * [np.cos(theta), -np.sin(theta), -np.sin(theta * zrate), 1.] position = cam2world @ t render_poses.append(viewmatrix(z_axis, up, position, True)) render_poses = np.stack(render_poses, axis=0) return render_poses
Unlike a given repo on the Hub (e.g. remove from favorite list). See also [`like`] and [`list_liked_repos`].
def unlike( self, repo_id: str, *, token: Optional[str] = None, repo_type: Optional[str] = None, ) -> None: if repo_type is None: repo_type = REPO_TYPE_MODEL response = get_session().delete( url=f"{self.endpoint}/api/{repo_type}s/{repo_id}/like", headers=self._build_hf_headers(token=token) ) hf_raise_for_status(response)
def repository(full_name): session = db.get_session() try: repository = session.query(Repository) \ .filter(Repository.full_name == full_name) \ .one() commit_shas = session.query(Commit.sha) \ .join( commit_repository, commit_repository.c.repository_clone_url == repository.clone_url, ) \ .filter(commit_repository.c.commit_sha == Commit.sha) \ .all() commit_shas = [c[0] for c in commit_shas] if commit_shas: session.query(Commit) \ .filter(Commit.sha.in_(commit_shas)) \ .delete(synchronize_session=False) session.query(Repository) \ .filter(Repository.full_name == full_name) \ .delete() session.commit() finally: session.close()
๊ตญ์™ธ๋กœ ๋‚˜๊ฐ€์ง€ ์•Š๊ณ ๋„ ๋ฒ ๋ฅผ๋ฆฐ ํ•„ํ•˜๋ชจ๋‹‰ ์˜ค์ผ€์ŠคํŠธ๋ผ๊ฐ€ ์šด์˜ํ•˜๋Š” ๊ณต์—ฐ ์‹คํ™ฉ์ค‘๊ณ„ ์‹œ์Šคํ…œ์„ ํ†ตํ•ด ๊ณต์—ฐ์„ ๊ด€๋žŒํ•  ์ˆ˜ ์žˆ๋‹ค.
๋ฒ ๋ฅผ๋ฆฐ ํ•„ํ•˜๋ชจ๋‹‰ ์˜ค์ผ€์ŠคํŠธ๋ผ๊ฐ€ ๊ณต์—ฐ ์‹คํ™ฉ์ค‘๊ณ„ ์‹œ์Šคํ…œ์„ ๊ตฌ์ถ•ํ•จ์œผ๋กœ ์ธํ•ด ๊ตญ๋‚ด์—์„œ๋„ ์ธํ„ฐ๋„ท์„ ํ†ตํ•ด ๊ณต์—ฐ ๊ด€๋žŒ์ด ๊ฐ€๋Šฅํ•˜๋‹ค.
์„œ๋กœ๊ฐ€ ํ•˜๋Š” ๊ณต์—ฐ ๊ด€๋žŒ์„ ์„œ์šธ, ๋ถ€์‚ฐ์„ ์˜ค๊ฐ€๋ฉฐ ํ–ˆ๊ณ  ๊ณต์—ฐ์„ ๋งˆ์น˜๋ฉด ํ•จ๊ป˜ ์†Œ์ฃผ๋ฅผ ๋งˆ์…จ๋‹ค.
The number of seconds connection draining is active. Acceptable values are from 1 second to 3600 seconds.
def drain_timeout_in_sec(self) -> int: return pulumi.get(self, "drain_timeout_in_sec")
def KeepAliveTimeout(self): return self._get_attribute('keepAliveTimeout')
๊ฑ”๊ฐ€ ํ•œ ๋‹ฌ์— ๋ฒ„๋Š” ๋ˆ์€ ์–ผ๋งˆ์•ผ
๊ฑ”๊ฐ€ ํ•œ ๋‹ฌ์— ๋ฒ„๋Š” ๋ˆ์ด ๋˜๋Š”๋ฐ?
๊ทธ๋Ÿฌ๋ฉด ๊ฐˆ ๋•Œ๋งˆ๋‹ค ์–ผ๋งˆ์”ฉ ๊ฑท๋Š” ๊ฑฐ์•ผ ์•„๋‹ˆ๋ฉฐ ๋งค๋‹ฌ ์–ผ๋งˆ์”ฉ ๊ฑท๋Š” ๊ฑฐ์•ผ?
Goes through all functions to calculate all important numbers for the tree Calculates num_child, num_grand, tree_size, depth
def calc_all(self): self.calc_num_child() # calculate number of children self.calc_num_grand() # calculate number of grandkids self.calc_tree_size() # calculate number of tree size self.calc_depth() self.calc_max_depth()
def calc_num_child(self): self.num_child = len(self.children) # count num of kids for child in self.children: # Go thru all kids child.calc_num_child() # Ask each kid to take stock also
Fit a quadratic around a point and find its local minimum/maximum Center should be in row,column format
def fitquadratic(im,center,n): w = int((n-1)/2) start = center - w patch = im[start[0]:(start[0]+n), start[1]:(start[1]+n)] x0 = (n+1)/2 x,y = np.meshgrid(range(n),range(n)) x = x.flatten()+1-x0 y = y.flatten()+1-x0 # Constants for coefficient vector [f a b c d e] A = np.vstack([np.ones(n*n), x**2, y**2, x*y, x, y]).T coef = np.linalg.lstsq(A, patch.flatten())[0] # Find minimum coordinates B = np.array([[2*coef[1], coef[3]], [coef[3], 2*coef[2]]]) rhs = np.array([[-coef[4]],[-coef[5]]]) # [2a c; c 2b][x;y]=-[d;e] xymin = np.linalg.lstsq(B,rhs)[0] xymin += np.array([[center[1]],[center[0]]]) return xymin, coef
def closest_point_to(self, x): min_pt, min_dist = None, np.inf for s in self.segments: s_min_dist, s_min_pt = s.closest_point_to(x) if s_min_dist < min_dist: min_dist = s_min_dist min_pt = s_min_pt sd = np.copysign(min_dist, -1 if self.is_inside(x) else 1) return sd, min_pt
Return an instance of BlackjackMDP where peeking is the optimal action at least 10% of the time.
def peekingMDP(): # BEGIN_YOUR_CODE (our solution is 2 lines of code, but don't worry if you deviate from this) return BlackjackMDP(cardValues=[4, 5, 10, 11], multiplicity=1, threshold=20, peekCost=1) # END_YOUR_CODE
def _greedy_policy(self, state): raise NotImplementedError
์ธํ๋ฒ ์ดํ„ฐ์—์„œ ๋ณ‘์•„๋ฆฌ ์•Œ์„ ๋ถ€ํ™”ํ•˜๋Š” ๋ฐ ์–ผ๋งˆ๋‚˜ ๊ฑธ๋ฆฝ๋‹ˆ๊นŒ?
๊ทธ๋ ‡๋‹ค๋ฉด ์ˆ˜์ •๋œ ๋‹ญ ์•Œ์ด ๋ถ€ํ™”ํ•˜๋Š” ๋ฐ ์–ผ๋งˆ๋‚˜ ๊ฑธ๋ฆด๊นŒ์š”? ๊ทธ์— ๋Œ€ํ•œ ๋‹ต์€ ์ •ํ™•ํžˆ 21์ผ์ž…๋‹ˆ๋‹ค. ๊ทธ๋Ÿฌ๋‚˜ ์—ฌ๋Ÿฌ ๊ฐ€์ง€ ์ƒํ™ฉ์— ๋”ฐ๋ผ ๋‹ฌ๋ผ์งˆ ์ˆ˜ ์žˆ๋‹ค๋Š” ์ ์„ ์—ผ๋‘์— ๋‘์–ด์•ผ ํ•ฉ๋‹ˆ๋‹ค. ์–ด๋–ค ์ด์œ ๋กœ ์ธํ•ด ์˜จ๋„๊ฐ€ ์ ์ • ์˜จ๋„ ์ดํ•˜๋กœ ๋–จ์–ด์ง„ ๊ฒฝ์šฐ, ์ผ๋ถ€ ์•Œ์€ 22์ผ, 23์ผ, ๋˜๋Š” ์‹ฌ์ง€์–ด 24์ผ์ด๋‚˜ 25์ผ์— ๋ถ€ํ™”ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ˆ˜์ •๋œ ๋‹ญ ์•Œ์€ ์ผ๋ฐ˜์ ์œผ๋กœ ์ •ํ™•ํžˆ 21์ผ์ด ๊ฑธ๋ฆฌ๋Š” ๋ฐ˜๋ฉด, ๋Œ€๋ถ€๋ถ„์˜ ์˜ค๋ฆฌ ์•Œ์€ 28์ผ์ด ๊ฑธ๋ฆฌ๋ฉฐ, ๋ฌด์Šค์ฝ”๋น„ ์˜ค๋ฆฌ๋ฅผ ์ œ์™ธํ•ฉ๋‹ˆ๋‹ค.
๋‹ญ ์•Œ์€ ์ผ๋ฐ˜์ ์œผ๋กœ ๋ถ€ํ™”ํ•˜๊ธฐ ์ „์— ์•ฝ 21์ผ ๋™์•ˆ ์ธํ๋ฒ ์ด์…˜๋ฉ๋‹ˆ๋‹ค.
๋ฏผ์ค€์ด, ์—ฌ๋ฆ„, ํœด๊ฐ€, ๊ธฐ์ฐจ, ์—ฌํ–‰, ๋– ๋‚˜๋‹ค, ํ•˜๋‹ค
๋ฏผ์ค€์ด๋Š” ์—ฌ๋ฆ„ ํœด๊ฐ€๋กœ ํ˜ผ์ž ๊ธฐ์ฐจ ์—ฌํ–‰์„ ๋– ๋‚˜๊ธฐ๋กœ ํ–ˆ๋‹ค.
์žฌ์ฃผ๋Š” ์˜ˆ์ ˆ, ์Œ์•…, ํ™œ์˜๊ธฐ, ๊ธ€์“ฐ๊ธฐ, ๋งํƒ€๊ธฐ, ๊ณ„์‚ฐํ•˜๊ธฐ ๋“ฑ๋“ฑ ์ด๋‹ค.
์ „์ž ํ™˜์›๋ฐ˜์‘์„ ํ†ตํ•ด ์„ธํฌ์™€ ์กฐ์ง์— ๋…์„ฑ์„ ์ผ์œผํ‚ค๋Š”๊ฑด ๋ญ์•ผ?
\( \mathrm{O}_{2}^{-} \)์€ ์ „์ž ํ™˜์›๋ฐ˜์‘์„ ํ†ตํ•ด ์„ธํฌ์™€ ์กฐ์ง์— ๋…์„ฑ์„ ์ผ์œผ์ผœ ์งˆ๋ณ‘์„ ์œ ๋ฐœ์‹œํ‚ค๋ฉฐ, ์ด๋Š” ๋…ธํ™”์™€ ๋ฐ€์ ‘ํ•œ ๊ด€๋ จ์ด ์žˆ๋Š” ๊ฒƒ์œผ๋กœ ์•Œ๋ ค์ ธ ์žˆ๋‹ค.
์ด๋Š” ๋ณธ ์—ฐ๊ตฌ์—์„œ ๋†ํ™”๋ฐฐ์–‘๋œ ํ˜ผํ•ฉ๋ฏธ์ƒ๋ฌผ์ด ํ™˜์›์ „๊ทน ๋ฐ˜์‘๊ธฐ ๋‚ด์—์„œ ์ „๊ทน์œผ๋กœ ๋ถ€ํ„ฐ ์ „์ž๋ฅผ ๋ฐ›์•„ \( \mathrm{CO}_{2} \) ๋ฅผ ์•„์„ธํŠธ์‚ฐ์œผ๋กœ ํ™˜์›ํ•˜๋Š” ์ƒ๋ฌผ์ „๊ธฐํ™œ์„ฑ๋ฐ˜์‘์„ ์ผ์œผํ‚ค๋Š” ๊ฒƒ์„ ๋‚˜ํƒ€๋‚ธ๋‹ค.
์‚ฌ์šฉ๋˜๋‹ค, ์น˜๋ฃŒ์ œ, ํ†ต์ฆ, ๋ด‰๋…
๋ด‰๋…์€ ์ฃผ๋กœ ํ†ต์ฆ ์น˜๋ฃŒ์ œ๋กœ ์‚ฌ์šฉ๋˜์–ด ์™”๋‹ค.
์ƒ๋Ÿฌ๋“œ์˜ ์–‘๋…์œผ๋กœ ์ฃผ๋กœ ์†Œ๊ธˆ, ํ›„์ถ”, ํ–ฅ๋ฃŒ, ์ผ€์ดํผ ๋“ฑ์ด ์‚ฌ์šฉ๋˜๊ณ  ์žˆ๋‹ค.
Return a dictionary of css dictionaries representing this scheme. This can be fed to the ly.colorize.format_stylesheet() function.
def css_scheme(self): scheme = {} # base/default styles d = scheme[None] = {} for name, fmt in self.defaultStyles.items(): d[name] = fmt2css(fmt) # mode/group styles for mode, styles in self.allStyles.items(): d = scheme[mode] = {} for name, fmt in styles.items(): d[name] = fmt2css(fmt) return scheme
def stylers(self): if self._stylers is None: self._stylers = styler_factory(self.project_config().get('style', {})) return self._stylers
Verifies that the report is not accredited if it contains a sample application that is not accredited.
def test_get_report_accreditation_false(report_api_mip_dna, mip_analysis_api, case_mip_dna): # GIVEN a list of samples when one of them is not accredited mip_metadata = mip_analysis_api.get_latest_metadata(case_mip_dna.internal_id) samples = report_api_mip_dna.get_samples_data(case_mip_dna, mip_metadata) samples[0].application.accredited = False # WHEN retrieving the report accreditation accredited = report_api_mip_dna.get_report_accreditation(samples) # THEN check that the report is not accredited assert not accredited
def test_get_sample_bad_sample(cli_runner: CliRunner, base_context: CGConfig): # GIVEN an empty database # WHEN getting a sample name = "dummy_name" result = cli_runner.invoke(get, ["sample", name], obj=base_context) # THEN it should warn about missing sample id instead of getting a sample # it will not fail since the API accepts multiple samples assert result.exit_code == 0
Search for documents based on type filters, search_text or order_by and return a queryset of document objects
def search_documents(self, types=None, search_text=None, order_by=None): documents = self if types and isinstance(types, list) and types[0] != 'all': documents = documents.filter(type__in=types) if search_text: documents = documents.filter(Q(name__icontains=search_text) | Q(description__icontains=search_text) | Q(search__icontains=search_text)) if order_by: # TODO: Validate that order_by is a valid sort parameter documents = documents.order_by(order_by) return documents
def search(self, query_string, fields=None, backend='default'): search_backend = get_search_backend(backend) return search_backend.search(query_string, self, fields=None)
Performs a single read from the socket and hands the data off to the h2 connection object.
def _single_read(self): # Begin by reading what we can from the socket. # # Concurrency # # Synchronizes reading the data # # I/O occurs while the lock is held; waiting threads will see a delay. with self._lock: if self._sock is None: raise ConnectionError('tried to read after connection close') self._sock.fill() data = self._sock.buffer.tobytes() self._sock.advance_buffer(len(data)) with self._conn as conn: events = conn.receive_data(data) stream_ids = set(getattr(e, 'stream_id', -1) for e in events) stream_ids.discard(-1) # sentinel stream_ids.discard(0) # connection events self.recent_recv_streams |= stream_ids for event in events: if isinstance(event, h2.events.DataReceived): self._adjust_receive_window(event.flow_controlled_length) self.streams[event.stream_id].receive_data(event) elif isinstance(event, h2.events.PushedStreamReceived): if self._enable_push: self._new_stream(event.pushed_stream_id, local_closed=True) self.streams[event.parent_stream_id].receive_push(event) else: # Servers are forbidden from sending push promises when # the ENABLE_PUSH setting is 0, but the spec leaves the # client action undefined when they do it anyway. So we # just refuse the stream and go about our business. self._send_rst_frame(event.pushed_stream_id, 7) elif isinstance(event, h2.events.ResponseReceived): self.streams[event.stream_id].receive_response(event) elif isinstance(event, h2.events.TrailersReceived): self.streams[event.stream_id].receive_trailers(event) elif isinstance(event, h2.events.StreamEnded): self.streams[event.stream_id].receive_end_stream(event) elif isinstance(event, h2.events.StreamReset): if event.stream_id not in self.reset_streams: self.reset_streams.add(event.stream_id) self.streams[event.stream_id].receive_reset(event) elif isinstance(event, h2.events.ConnectionTerminated): # If we get GoAway with error code zero, we are doing a # graceful shutdown and all is well. Otherwise, throw an # exception. self.close() # If an error occured, try to read the error description from # code registry otherwise use the frame's additional data. if event.error_code != 0: try: name, number, description = errors.get_data( event.error_code ) except ValueError: error_string = ( "Encountered error code %d" % event.error_code ) else: error_string = ( "Encountered error %s %s: %s" % (name, number, description) ) raise ConnectionError(error_string) else: log.info("Received unhandled event %s", event) self._send_outstanding_data(tolerate_peer_gone=True, send_empty=False)
def read(self): assert self.status in (WAIT_LEN, WAIT_MESSAGE) if self.status == WAIT_LEN: self._read_len() # go back to the main loop here for simplicity instead of # falling through, even though there is a good chance that # the message is already available elif self.status == WAIT_MESSAGE: read = self.socket.recv(self.len - len(self.message)) if len(read) == 0: logging.error("can't read frame from socket (get %d of %d bytes)" % (len(self.message), self.len)) self.close() return self.message += read if len(self.message) == self.len: self.status = WAIT_PROCESS
Verify that a generated quote came from a trusted TPM and matches the previously obtained PCR values
def quote_verify(data, validation, aik, pcrvalues): select = 0 maxpcr = 0 # Verify that the validation blob was generated by a trusted TPM pubkey = aik.get_pubkey() n = m2.bin_to_bn(pubkey) n = m2.bn_to_mpi(n) e = m2.hex_to_bn("010001") e = m2.bn_to_mpi(e) rsa = M2Crypto.RSA.new_pub_key((e, n)) m = hashlib.sha1() m.update(data) md = m.digest() try: ret = rsa.verify(md, str(validation), algo='sha1') except M2Crypto.RSA.RSAError: return False # And then verify that the validation blob corresponds to the PCR # values we have values = bytearray() for pcr in sorted(pcrvalues): values += pcrvalues[pcr] select |= (1 << pcr) maxpcr = pcr if maxpcr < 16: header = struct.pack('!H', 2) header += struct.pack('@H', select) header += struct.pack('!I', len(values)) else: header = struct.pack('!H', 4) header += struct.pack('@I', select) header += struct.pack('!I', len(values)) pcr_blob = header + values m = hashlib.sha1() m.update(pcr_blob) pcr_hash = m.digest() if pcr_hash == data[8:28]: return True else: return False
def test_change_secret(self): sig1 = self.notary.compute_signature(self.nb) self.notary.secret = b"different" sig2 = self.notary.compute_signature(self.nb) self.assertNotEqual(sig1, sig2)
this function returns the list of animals with a new session within the last few ('days_passed') days
def get_current_animals(root: Root, days_passed: int = 4): now = datetime.datetime.now() all_animals = root.get_all_animals() if all_animals == []: logging.warning('No animal found!') return [] animalList = [] for animal in all_animals: animalTag = File(root, animal) sessionList = animalTag.get_all_sessions() if not sessionList: continue lastSessionDate = animalTag.get_session_date(sessionList[-1]) if (now - lastSessionDate).days <= days_passed: animalList.append(animal) return animalList
def ended_recently(self): return self.filter( status=Contest.CLOSED, created_at__gt=timezone.now()-(SUBMISSIONS_LENGTH+VOTING_LENGTH+RECENT_LENGTH) ).order_by("-created_at")
Activated at the end of the with statement. It automatically releases the lock if it isn't locked.
def __exit__(self, exc_type, exc_val, exc_tb): if self.is_locked: self.release()
def unlock(self): utilities.lock.unlock(self.lockfd)
Returns the mean squared error for model (a polynomial of the specified degree) on X and y.
def mse(X, y, degree, model): # calculate MSE for X and y and return both MSE_x = np.square(np.subtract(X, model[0])).mean()/len(X) MSE_y = np.square(np.subtract(y, model[1])).mean()/len(y) return MSE_x, MSE_y
def error(self, x: np.array, y: np.array) -> float: yh = self.predict(x) return np.mean(y != yh)
์žˆ๋‹ค, ์‹ค์†, ๋•…, ๋•…, ๊ฐ€์น˜, ๊ท€ํ•˜๋‹ค
๊ทธ ๋•…์€ ์‹ค์†์ด ์žˆ๊ณ  ๊ฒฝ์ œ์  ๊ฐ€์น˜๊ฐ€ ๊ท€ํ•œ ๋•…์ด๋‹ค.
์ˆ˜๋ฐ•, ์ฐธ์™ธ, ์กฐ๋กฑ๋ฐ•, ์ˆ˜์„ธ๋ฏธ, ์˜ค์ด, ๊ฐ€์ง€๋ฅผ ์ •์„ฑ๊ป ์‹ฌ์–ด ๋ฌด๋Ÿญ๋ฌด๋Ÿญ ํ‚ค์› ๋‹ค.
ํ†ต์˜์‹œ์™€ ํ†ต์˜ ํ์กฐ์„ ์†Œ ์žฌ์ƒ์‚ฌ์—… ํ˜‘์•ฝ์„ ์ฒด๊ฒฐํ•œ ๊ฑด ์–ด๋””์ง€?
ํ•œ๊ตญํ† ์ง€์ฃผํƒ๊ณต์‚ฌ(LH)๋Š” ํ†ต์˜์‹œ ์†Œ์žฌ ์‹ ์•„sb์กฐ์„ ์†Œ์—์„œ ๊ฒฝ์ƒ๋‚จ๋„, ํ†ต์˜์‹œ์™€ ํ†ต์˜ ํ์กฐ์„ ์†Œ ์žฌ์ƒ์‚ฌ์—… ๊ธฐ๋ณธํ˜‘์•ฝ์„ ์ฒด๊ฒฐํ–ˆ๋‹ค๊ณ  30์ผ ๋ฐํ˜”๋‹ค
ํ•œ๊ตญํ† ์ง€์ฃผํƒ๊ณต์‚ฌ(LH)๋Š” ํ†ต์˜ ํ์กฐ์„ ์†Œ ์žฌ์ƒ์‚ฌ์—… ๋งˆ์Šคํ„ฐํ”Œ๋žœ ๊ตญ์ œ๊ณต๋ชจ ๋‹น์„ ์ž‘์œผ๋กœ ํฌ์Šค์ฝ”์—์ด์•ค์”จ ์ปจ์†Œ์‹œ์—„์˜ ํ†ต์˜ ์บ ํ”„ ๋งˆ๋ ˆ๋ฅผ ์ตœ์ข…์„ ์ •ํ–ˆ๋‹ค๊ณ  10์ผ ๋ฐํ˜”๋‹ค
Given a RefID and an optional name, create a profile and then return that newly created profile.
def new_profile_by_refid(self, refid: Optional[str], name: Optional[str], pid: Optional[int]) -> ValidatedDict: if refid is None: return None if name is None: name = 'ใชใ—' if pid is None: pid = 51 userid = self.data.remote.user.from_refid(self.game, self.version, refid) defaultprofile = ValidatedDict({ 'name': name, 'pid': pid, 'settings': { 'flags': 223 # Default to turning on all optional folders }, }) self.put_profile(userid, defaultprofile) profile = self.get_profile(userid) return profile
def post_create_user(sender, instance, created, raw, **kwargs): if created and not raw: user = instance (user_profile, created) = UserProfile.objects.get_or_create( user=user )
Try to match country name with its code. Name of the city helps when country_name is "Korea".
def match_country_name_to_its_code(country_name, city=''): if country_name: country_name = country_name.upper().replace('.', '').strip() if country_to_iso_code.get(country_name): return country_to_iso_code.get(country_name) elif country_name == 'KOREA': if city.upper() in south_korean_cities: return 'KR' else: for c_code, spellings in countries_alternative_spellings.items(): for spelling in spellings: if country_name == spelling: return c_code return None
def find_code(city_name): try: code_2 = pycountry.subdivisions.lookup(city_name) code_3 = pycountry.countries.get(alpha_2=code_2.country_code) return code_3.alpha_3 except LookupError: print(f'Sorry, could not find a code for {city_name}', file=sys.stderr) return 'XXX'
takes a path to a folder and write all filenames recursively (files of all sub folders to)
def get_all_file_names(folderpath, out="../output.txt"): print("folder pathh: ", folderpath) file_list = os.listdir(folderpath) print("file_list:") print(file_list) with open(out, "a") as file2: for object_name in file_list: file2.write(str(object_name) + "\n") dir_path = os.path.join(folderpath, str(object_name)) if os.path.isdir( dir_path ): # Return True if path is an existing directory. This follows symbolic links, so both islink() and isdir() can be true for the same path.) print("Is dir", dir_path) get_all_file_names(dir_path, out)
def get_files_in_folder(folder): filenames = os.listdir(folder) # os.path.join combines paths while dealing with /s and \s appropriately full_filenames = [os.path.join(folder, filename) for filename in filenames] return full_filenames
์ฝ˜ํฌ๋ฆฌํŠธ ์Šฌ๋ž˜๋ธŒ ๋น„์šฉ
์šฐ๋ฆฌ์˜ ์ฝ˜ํฌ๋ฆฌํŠธ ์Šฌ๋ž˜๋ธŒ ์ž์žฌ ์ถ”์ •์— ๋”ฐ๋ฅด๋ฉด, ์ฝ˜ํฌ๋ฆฌํŠธ ์Šฌ๋ž˜๋ธŒ์˜ ํ‰๊ท  ์ตœ์†Œ ๋น„์šฉ์€ ํ‰๋ฐฉํ”ผํŠธ๋‹น 1.36๋‹ฌ๋Ÿฌ์ด๋ฉฐ, ์ตœ๋Œ€ ๋น„์šฉ์€ 1.88๋‹ฌ๋Ÿฌ์ž…๋‹ˆ๋‹ค. ๋น„๊ตํ•˜์ž๋ฉด, ์‹œ๋ฉ˜ํŠธ ์Šฌ๋ž˜๋ธŒ์˜ ํ‰๋ฐฉํ”ผํŠธ๋‹น ๊ฐ€๊ฒฉ์€ 5.50๋‹ฌ๋Ÿฌ์ž…๋‹ˆ๋‹ค.
์ฝ˜ํฌ๋ฆฌํŠธ ์Šฌ๋ž˜๋ธŒ์™€ ํฌ์žฅ์žฌ์˜ ๋น„์šฉ ๋น„๊ต. ์ผ๋ฐ˜์ ์œผ๋กœ ํ‘œ์ค€ ์ฝ˜ํฌ๋ฆฌํŠธ ์Šฌ๋ž˜๋ธŒ๋Š” ๋Œ€์•ˆ๋ณด๋‹ค ์ œ๊ณฑํ”ผํŠธ๋‹น ๋น„์šฉ์ด ๋‚ฎ์Šต๋‹ˆ๋‹ค. ์ผ๋ฐ˜์ ์œผ๋กœ ํ‘œ์ค€ ์ฝ˜ํฌ๋ฆฌํŠธ ์Šฌ๋ž˜๋ธŒ ๋Œ€์‹  ํฌ์žฅ์„์„ ์„ ํƒํ•  ๊ฒฝ์šฐ 10%-15% ๋” ์ง€๋ถˆํ•˜๊ฒŒ ๋ฉ๋‹ˆ๋‹ค. ์Šคํƒฌํ”„ ์ฝ˜ํฌ๋ฆฌํŠธ๋กœ ์—…๊ทธ๋ ˆ์ด๋“œํ•˜๊ธฐ๋กœ ๊ฒฐ์ •ํ•˜๋ฉด, ๋Œ€๋ถ€๋ถ„์˜ ๊ฒฝ์šฐ ํฌ์žฅ์„์˜ ๋น„์šฉ์ด ๋™์ผํ•˜๊ฑฐ๋‚˜ ๋” ์ €๋ ดํ•  ๊ฐ€๋Šฅ์„ฑ์ด ๋†’์Šต๋‹ˆ๋‹ค.
Processes a range of filenames and labels in the MPII dataset corresponding to the given thread index.
def _process_image_files_single_thread(coder, thread_index, ranges, mpii_dataset): if FLAGS.is_train: base_name = 'train' else: base_name = 'test' shards_per_thread = FLAGS.train_shards/FLAGS.num_threads shard_ranges = pose_util.get_n_ranges(ranges[thread_index][0], ranges[thread_index][1], shards_per_thread) for shard_index in range(len(shard_ranges)): tfrecord_index = int(thread_index*shards_per_thread + shard_index) tfrecord_filename = '{}{}.tfrecord'.format(base_name, tfrecord_index) tfrecord_filepath = os.path.join(FLAGS.train_dir, tfrecord_filename) options = tf.python_io.TFRecordOptions( compression_type=tf.python_io.TFRecordCompressionType.ZLIB) with tf.python_io.TFRecordWriter(path=tfrecord_filepath, options=options) as writer: shard_start = shard_ranges[shard_index][0] shard_end = shard_ranges[shard_index][1] for img_index in range(shard_start, shard_end): with tf.gfile.FastGFile(name=mpii_dataset.img_filenames[img_index], mode='rb') as f: image_jpeg = f.read() _write_example(coder, image_jpeg, mpii_dataset.people_in_imgs[img_index], writer)
def parallel_search_range(data, query_range, n_processor): results = [] pool = Pool(processes=n_processor) ### START CODE HERE ### # Perform data partitioning first partition_result=h_partition(data,n_processor) for query in range(query_range[0],query_range[1]+1): hash_value=s_hash(query,n_processor) working_dataset=partition_result[hash_value] indices=pool.apply(linear_search,[working_dataset,query]) for index in indices: results.append(working_dataset[index]) ### END CODE HERE ### return results