text
stringlengths
29
850k
#!/usr/bin/env python # npyscreenreactory.py # Inspired by pausingreactor.py and xmmsreactor.py # npyscreen modifications # Copyright (c) 2015 Mark Tearle <[email protected]> # See LICENSE for details. # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4 """ This module provides npyscreen event loop support for Twisted. In order to use this support, simply do the following:: | import npyscreenreactor | npyscreenreactor.install() Then, when your root npyscreenApp has been created:: | from twisted.internet import reactor | reactor.registerNpyscreenApp(yourApp) | reactor.run() Then use twisted.internet APIs as usual. Stop the event loop using reactor.stop() Maintainer: Mark Tearle """ from twisted.python import log, runtime from twisted.internet import selectreactor import npyscreen class NpyscreenReactor(selectreactor.SelectReactor): """ npyscreen reactor. npyscreen drives the event loop """ def doIteration(self, timeout): # Executing what normal reactor would do... self.runUntilCurrent() selectreactor.SelectReactor.doIteration(self, timeout) # push event back on the npyscreen queue self.npyscreenapp.queue_event(npyscreen.Event("_NPYSCREEN_REACTOR")) def registerNpyscreenApp(self, npyscreenapp): """ Register npyscreen.StandardApp instance with the reactor. """ self.npyscreenapp = npyscreenapp # push an event on the npyscreen queue self.npyscreenapp.add_event_hander("_NPYSCREEN_REACTOR", self._twisted_events) def _twisted_events(self, event): self.doIteration(0) def _stopNpyscreen(self): """ Stop the Npsycreen event loop if it hasn't already been stopped. Called during Twisted event loop shutdown. """ if hasattr(self, "npyscreenapp"): self.npyscreenapp.setNextForm(None) def run(self,installSignalHandlers=True): """ Start the reactor. """ # Executing what normal reactor would do... self.startRunning(installSignalHandlers=installSignalHandlers) # do initial iteration and put event on queue to do twisted things self.doIteration(0) # add cleanup events: self.addSystemEventTrigger("after", "shutdown", self._stopNpyscreen) # self.npyscreenapp.run() def install(): """ Configure the twisted mainloop to be run inside the npyscreen mainloop. """ reactor = NpyscreenReactor() from twisted.internet.main import installReactor installReactor(reactor) return reactor __all__ = ['install']
Thursday night was another wonderful evening thanks to Newpark Town Center‘s great community development program. In addition to their dozens of great events (see their Facebook page to get an idea of them all), part of the program is a great live music series from the Mountain Town Music non-profit organization that brings wonderful artists to our neighborhood every Thursday night starting at 6PM throughout the summer. Several hundred people showed up for the concert last night – it was a complete blast. Maxwell’s East Coast Eatery serves up great pizza and beverage options, or you are welcome to bring your own food & drinks and chairs & blankets just are you are welcome to do for many of the live music events in Park City throughout the summer. Brian Richards, of Mountain Town Music, mentioned that they program over 200 live music shows in 90 days throughout the summer. It’s wonderful being in Park City!
# coding=utf-8 # Copyright 2020 The Google Research Team Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Functions for logging debug info for use during model dev cycle.""" from absl import logging import data def is_int_list(value): """Checks if a value's type is a list of integers.""" return value and isinstance(value, list) and isinstance(value[0], int) def is_unicode_list(value): """Checks if a value's type is a list of Unicode strings.""" if value and isinstance(value, list): return isinstance(value[0], str) return False def is_valid_unicode(text): """Check if a string is valid unicode. Did we slice on an invalid boundary?""" try: text.decode("utf-8") return True except UnicodeDecodeError: return False def log_debug_info(filename, line_no, entry, debug_info, reverse_vocab_table): """Logs `debug_info` for debugging purposes.""" # Enable when debugging experimental new things. extremely_verbose = False def sanitize_char(c): """Optionally normalize chars we don't want in log messages.""" # Don't like having too many newlines in your debugging log output? # Change this. remove_newlines = False if c == "\r": if remove_newlines: return " " return "\r" if c == "\n": if remove_newlines: return " " return "\n" return c def sanitize(s): return "".join(sanitize_char(c) for c in s) doc = entry["plaintext"] if "json" in debug_info: json_elem = debug_info["json"] else: json_elem = None logging.info("No 'json' key in `debug_info`.") if "tydi_example" in debug_info: tydi_example = debug_info["tydi_example"] else: tydi_example = None logging.info("No 'tydi_example' key in `debug_info`.") offset_to_wp = None doc_wp = None logging.info("=== Logging example %s:%d ===", filename, line_no) window = 20 for i in range(0, data.byte_len(entry["contexts"]), window): span_text = data.byte_slice( entry["contexts"], i, i + window, errors="replace") doc_offsets = entry["context_to_plaintext_offset"][i:i + window] # Now double-check that those doc offsets actually match the text we expect. recovered_doc = [ data.byte_slice(doc, i, i + 1, errors="replace") for i in doc_offsets if i != -1 ] if extremely_verbose: logging.info("context_to_doc: %d: %s (%s) %s", i, sanitize(span_text), " ".join(str(x) for x in doc_offsets), sanitize(recovered_doc)) for key, value in debug_info.items(): if key == "offset_to_wp": offset_to_wp = value continue # Convert wordpiece vocab IDs back into readable text. if is_int_list(value) and "wp_ids" in key: value = [reverse_vocab_table[word_id] for word_id in value] # Convert Unicode escapes to readable text. if is_unicode_list(value): value = [word.encode("utf-8") for word in value] if key == "all_doc_wp_ids": doc_wp = value # Represent lists as plaintext. if isinstance(value, list): value = " ".join(str(item) for item in value) value = str(value) logging.info("%s: %s", key, value) if offset_to_wp is not None: for i in range(0, data.byte_len(entry["contexts"]), window): wp_slice = [] for byte_offset in range(i, i + window): if byte_offset in offset_to_wp: wp_offset = offset_to_wp[byte_offset] wp_slice.append(doc_wp[wp_offset]) else: wp_slice.append("-1") context_slice = data.byte_slice( entry["contexts"], i, i + window, errors="replace") logging.info("context_to_wp: %d: %s (%s)", i, sanitize(context_slice), " ".join(str(x) for x in wp_slice)) if "searched_offset_to_wp" in debug_info: logging.info("searched_offset_to_wp: %s", " ".join(str(i) for i in debug_info["searched_offset_to_wp"])) if json_elem: logging.info( "json.annotations[0].minimal_answer.plaintext_start_byte: %d", json_elem["annotations"][0]["minimal_answer"]["plaintext_start_byte"]) logging.info( "json.annotations[0].minimal_answer.plaintext_end_byte: %d", json_elem["annotations"][0]["minimal_answer"]["plaintext_end_byte"]) min_ans_sp = json_elem["annotations"][0]["minimal_answer"] min_ans_text = data.byte_slice( json_elem["document_plaintext"], min_ans_sp["plaintext_start_byte"], min_ans_sp["plaintext_end_byte"], errors="replace") min_ans_text_in_context = data.byte_slice( json_elem["document_plaintext"], min_ans_sp["plaintext_start_byte"] - 100, min_ans_sp["plaintext_end_byte"] + 100, errors="replace") logging.info("minimal answer text (from json): %s", min_ans_text) logging.info("minimal answer text in context: %s", min_ans_text_in_context) logging.info("entry.answer.span_start: %d", entry["answer"]["span_start"]) logging.info("entry.answer.span_end: %d", entry["answer"]["span_end"]) logging.info("entry.answer.span_text: %s", entry["answer"]["span_text"]) if tydi_example: # Non-train examples may not have offsets. if tydi_example.start_byte_offset: logging.info("tydi_example.start_byte_offset: %d", tydi_example.start_byte_offset) logging.info("tydi_example.end_byte_offset: %d", tydi_example.end_byte_offset) tydi_example_min_ans_text = data.byte_slice( entry["contexts"], tydi_example.start_byte_offset, tydi_example.end_byte_offset, errors="replace") logging.info( "minimal answer text (from TyDiExample byte offsets in `contexts`): %s", tydi_example_min_ans_text) logging.info("^^^ End example ^^^")
Have you ever felt like you were made for something? Created and designed specifically for a reason? Most of my life I have felt that my purpose here on earth was to to be a wife and a mother, to love on and care for my babies. The Lord gave me these desires for a reason and they are strong. I met my now husband in the third grade and we went on our first date when we were fifteen. During our dating years we were patient and knew how we wanted to wait and prepare for being husband and wife. Logan and I entered into our marriage open and excited about the possibility of welcoming a baby into our lives. I will never forget standing on the altar with my almost husband saying our vows. It is so vivid in my mind when we were asked "Are you prepared to accept children lovingly from God and to bring them up according to the law of Christ and his Church?”, while I softly said “I am”, Logan proudly stated “absolutely!” My heart fell in love with him once again. "I will never forget standing on the altar with my almost-husband saying our vows. It is so vivid in my mind when we were asked 'Are you prepared to accept children lovingly from God and to bring them up according to the law of Christ and his Church?', while I softly said “I am", Logan proudly stated 'absolutely!'" A mothers heart with no children. Oh how I long for the day. As the months passed and our desires only grew stronger I started to feel lost. I thought this was my purpose? I thought I had everything figured out in this part of my life. Going through college I really only looked at school as something I needed to get through to marry my best friend at the end. I cared about looking for jobs and have been searching for that perfect career, but have fallen short in finding that solution to my plan. At the end of the day that all didn’t matter to me as much though because hey, I was going to be a mom and that was my purpose. I quickly realized how I have gone through so many stages of my life waiting for that next step, the next moment that I really wanted, where I truly thought I should be. I hardly got involved in areas of my life that were great opportunities to meet new people, focus on the now and live in the moment. Yeah, sure I felt present, but was I really? I was meant to be a mother and a wife I told myself, but I was only twenty and not yet married, so I would just get through. Try to enjoy what my life looked like then, but not fully invest. I was preparing my heart for marriage and motherhood, but missed out on caring for what my heart needed then. Openness and acceptance on school not going the way I planned or not getting engaged as soon as I thought. This is where I needed healing and attention. Instead, I brushed my anxieties and worries away because those moments didn’t matter because they were not the big picture, my main purpose. "I quickly realized how I have gone through so many stages of my life waiting for that next step, the next moment that I really wanted, where I truly thought I should be." But whoever said you have one purpose? Who told me that we are all put here to fulfill our purpose, then return back home to our Creator? I got this idea in my head at such a young age that I was meant for motherhood. I know I am called to be a wife and a mother. I know this is my vocation. I have had such a draw to children and mothers hearts for all my life. But this is all “my plan." Yes, I am being open and honest to what my Father is leading me towards. I have prayed to feel confident in my choices and asked for guidance in this calling. But how do I feel like I am failing this big purpose of mine by not being able to conceive a child yet? Why am I missing all of the moments to connect with other women in their struggles and desires? How am I making date nights with my husband turn into tears at the end of the night because I am really longing to be at home giving our child a bath? These times of sadness and heartache matter. They are real. But these are also times of learning, joy and alone time. Yes, I feel I have a purpose here on earth, but does this have to be one thing? Can I have purpose in being a newlywed enjoying the time I have with my husband? Can I have purpose in being a good friend and sister, spending time with those who bring me joy? Being in this moment, now. Having intentional relationships and deep conversations instead of superficial interactions because those just yield signs to what I am waiting for. These are the lessons I am learning. These are how I am finding my purpose in being a married woman with no children yet. "Being in this moment, now. Having intentional relationships and deep conversations instead of superficial interactions because those just yield signs to what I am waiting for. These are the lessons I am learning." During this time of waiting I have been reminded to be gentle on myself, I don’t need to be a mom to have a mother's heart. I have purpose in being tender and loving to my husband, my friends and my family. Sharing that true, deep, caring smile and friendly conversation to the stranger at the post office is my purpose. Hugging and kissing on my nieces and nephew is my purpose. My mother's heart is real. I feel it. But it is not only meant for my children. What is your purpose? Do you even feel you have one? You may have thought you were meant to go to that school, have that career, marry that guy, buy that house, but in the end it wasn’t where the good Lord was leading you. Be gentle on yourself dear sister. You have purpose here. For all I know I could be pregnant next month and would be so excited to share the good news with you. It would be easy for me to feel silly about writing this and how I should have just suffered through it alone and not made a big fuss, but that is not the truth. The enemy wants us to feel isolated in our battles, to tell yourself you are silly and small, what you are going through wont matter or affect others. But I know my God never wants us to feel alone, community and companionship are gifts that we can choose to accept. If I cannot share with you my struggles, how can I share with you my joy? No one's life is all that they want it to be or what it appears to be. Whatever pain you are going through I hope you know that you are worth being listened to, loved on and cared for. Prayers for you all dear, sweet sisters. Hello! I am Hannah Russell, 26 years old, from Bulverde, Texas. I am a pre-school teacher to 11 huggable two year olds. I have been married for a year and a half to my high school sweetheart. I have a children's clothing and decor shop called Russell Sprouts Kids, full of ethical and fair trade goodness. My biggest hobby is traveling or working on our tiny house when home. I love to design and decorate spaces and dream to make this a bigger part of my life someday. I always overthink what I say. I am most comfortable with a child in my arms. I have a major sweet tooth. My parents both come from big catholic families and we are all very close. I am an extroverted introvert. I have not taken the Enneagram Test yet. My vocation on a day-to-day basis looks like always giving my husband a kiss and blessing from bed as he leaves while it is still dark outside. Tag teaming chores... he cooks and vacuums/swiffers, and I do dishes and laundry. We spend time praying for each other and together. Tell us about Russell Sprout Kids! How has this played a role in your life as you wait for a child? Russell Sprouts Kids! A dream I chased after! Owning a children's clothing and decor shop has come with a roller coaster of emotions as I wait for a child of my own. There are days that I spend all my time brainstorming new ideas and looking at baby clothes, while other days I take a break from it all. The best part about RSK that keeps me going is getting to work with my friends and family and their children. I love having a job where I can spend time with the kiddos I love most and witness the amazing mothers my friends have become. Are there any spiritual resources that have helped you along this journey? About two years ago I made the decision if I was going to spend time on social media I was really going to sensor who/what I am following. I wanted to learn, connect and engage. Through this I have found many places on the internet that have been a spiritual resource for me during my time as an engaged woman, a newlywed and as I wait for motherhood. Mass and adoration have helped me along this journey. As well as family and friends openly praying for my husband and I. We have been able to have so many real and honest conversations opening up about our journey that has allowed us to connect and lead us to beautiful communities. How can we, as millennial Catholic women, be more sensitive to and supportive of those who haven’t been able to have children yet? First off, I want women with children to know what an inspiration they are. The millennial Catholic mothers I am surrounded by are a huge part of what makes my desire to have children so strong. I am in awe of the beauty of motherhood. Seeing women have such a gentle and strong love while caring for their children draws me to my vocation even more. I think it is so important for women to understand that we are all suffering. My heart hurts when I have had the conversations with my sisters who have conceived about them feeling guilty for sharing their own struggles with me. We cannot compare our crosses, yet accept and support one another through them. If I can't be there for these women how do I expect them to journey along with me? I love advice, but as with any pain, it is not always what someone wants or needs to hear. I am a fixer and when I see someone hurting all I want is to make it better. So much of this is out of our control and I have learned through my own experience how just walking side by side with someone during their pain can be the biggest blessing. It is okay to simply just be there and acknowledge their suffering. We don't always have to know the answer or what to say. "I have learned through my own experience how just walking side by side with someone during their pain can be the biggest blessing. It is okay to simply just be there and acknowledge their suffering. We don't always have to know the answer or what to say." I mostly use instagram. My personal account is @_hannruss and my shop is @russellsproutskids (Website russellsproutskids.com). I am outside in the evening enjoying the sunset with loved ones.
#!/usr/bin/env python # # Copyright (C) 2014, Cumulus Networks www.cumulusnetworks.com # # DOCUMENTATION = ''' --- module: cl_quagga_ospf author: Cumulus Networks short_description: Configure basic OSPFv2 parameters and interfaces using Quagga description: - Configures basic OSPFv2 global parameters such as \ router id and bandwidth cost, or OSPFv2 interface configuration \ like point-to-point settings or enabling OSPFv2 on an interface. \ Configuration is applied to single OSPFv2 instance. \ Multiple OSPFv2 instance configuration is currently not supported. \ It requires Quagga version 0.99.22 and higher with the non-modal Quagga CLI \ developed by Cumulus Linux. For more details go to the Routing User Guide @ \ http://cumulusnetworks.com/docs/2.2/ and Quagga Docs @ http://www.nongnu.org/quagga/ options: router_id: description: - Set the OSPFv2 router id required: true reference_bandwidth: description: - Set the OSPFv2 auto cost reference bandwidth default: 40000 saveconfig: description: - Boolean. Issue write memory to save the config choices: ['yes', 'no'] default: ['no'] interface: description: - define the name the interface to apply OSPFv2 services. point2point: description: - Boolean. enable OSPFv2 point2point on the interface choices: ['yes', 'no'] require_together: - with interface option area: description: - defines the area the interface is in required_together: - with interface option cost: description: - define ospf cost. required_together: - with interface option passive: description: - make OSPFv2 interface passive choices: ['yes', 'no'] required_together: - with interface option state: description: - Describes if OSPFv2 should be present on a particular interface.\ Module currently does not check that interface is not associated \ with a bond or bridge. \ User will have to manually clear the configuration of the interface \ from the bond or bridge. \ This will be implemented in a later release choices: [ 'present', 'absent'] default: 'present' required_together: - with interface option requirements: ['Cumulus Linux Quagga non-modal CLI, Quagga version 0.99.22 and higher'] ''' EXAMPLES = ''' Example playbook entries using the cl_quagga_ospf module tasks: - name: configure ospf router_id cl_quagga_ospf: router_id=10.1.1.1 - name: enable OSPFv2 on swp1 and set it be a point2point OSPF \ interface with a cost of 65535 cl_quagga_ospf: interface=swp1 point2point=yes cost=65535 - name: enable ospf on swp1-5 cl_quagga_ospf: interface={{ item }} with_sequence: start=1 end=5 format=swp%d - name: disable ospf on swp1 cl_quagga_ospf: interface=swp1 state=absent ''' def run_cl_cmd(module, cmd, check_rc=True, split_lines=True): try: (rc, out, err) = module.run_command(cmd, check_rc=check_rc) except Exception, e: module.fail_json(msg=e.strerror) # trim last line as it is always empty if split_lines: ret = out.splitlines() else: ret = out return ret def check_dsl_dependencies(module, input_options, dependency, _depend_value): for _param in input_options: if module.params.get(_param): if not module.params.get(dependency): _param_output = module.params.get(_param) _msg = "incorrect syntax. " + _param + " must have an interface option." + \ " Example 'cl_quagga_ospf: " + dependency + "=" + _depend_value + " " + \ _param + "=" + _param_output + "'" module.fail_json(msg=_msg) def has_interface_config(module): if module.params.get('interface') is not None: return True else: return False def get_running_config(module): running_config = run_cl_cmd(module, '/usr/bin/vtysh -c "show run"') got_global_config = False got_interface_config = False module.interface_config = {} module.global_config = [] for line in running_config: line = line.lower().strip() # ignore the '!' lines or blank lines if len(line.strip()) <= 1: if got_global_config: got_global_config = False if got_interface_config: got_interface_config = False continue # begin capturing global config m0 = re.match('router\s+ospf', line) if m0: got_global_config = True continue m1 = re.match('^interface\s+(\w+)', line) if m1: module.ifacename = m1.group(1) module.interface_config[module.ifacename] = [] got_interface_config = True continue if got_interface_config: module.interface_config[module.ifacename].append(line) continue if got_global_config: m3 = re.match('\s*passive-interface\s+(\w+)', line) if m3: ifaceconfig = module.interface_config.get(m3.group(1)) if ifaceconfig: ifaceconfig.append('passive-interface') else: module.global_config.append(line) continue def get_config_line(module, stmt, ifacename=None): if ifacename: pass else: for i in module.global_config: if re.match(stmt, i): return i return None def update_router_id(module): router_id_stmt = 'ospf router-id ' actual_router_id_stmt = get_config_line(module, router_id_stmt) router_id_stmt = 'ospf router-id ' + module.params.get('router_id') if router_id_stmt != actual_router_id_stmt: cmd_line = "/usr/bin/cl-ospf router-id set %s" %\ (module.params.get('router_id')) run_cl_cmd(module, cmd_line) module.exit_msg += 'router-id updated ' module.has_changed = True def update_reference_bandwidth(module): bandwidth_stmt = 'auto-cost reference-bandwidth' actual_bandwidth_stmt = get_config_line(module, bandwidth_stmt) bandwidth_stmt = bandwidth_stmt + ' ' + \ module.params.get('reference_bandwidth') if bandwidth_stmt != actual_bandwidth_stmt: cmd_line = "/usr/bin/cl-ospf auto-cost set reference-bandwidth %s" %\ (module.params.get('reference_bandwidth')) run_cl_cmd(module, cmd_line) module.exit_msg += 'reference bandwidth updated ' module.has_changed = True def add_global_ospf_config(module): module.has_changed = False get_running_config(module) if module.params.get('router_id'): update_router_id(module) if module.params.get('reference_bandwidth'): update_reference_bandwidth(module) if module.has_changed is False: module.exit_msg = 'No change in OSPFv2 global config' module.exit_json(msg=module.exit_msg, changed=module.has_changed) def check_ip_addr_show(module): cmd_line = "/sbin/ip addr show %s" % (module.params.get('interface')) result = run_cl_cmd(module, cmd_line) for _line in result: m0 = re.match('\s+inet\s+\w+', _line) if m0: return True return False def get_interface_addr_config(module): ifacename = module.params.get('interface') cmd_line = "/sbin/ifquery --format json %s" % (ifacename) int_config = run_cl_cmd(module, cmd_line, True, False) ifquery_obj = json.loads(int_config)[0] iface_has_address = False if 'address' in ifquery_obj.get('config'): for addr in ifquery_obj.get('config').get('address'): try: socket.inet_aton(addr.split('/')[0]) iface_has_address = True break except socket.error: pass else: iface_has_address = check_ip_addr_show(module) if iface_has_address is False: _msg = "interface %s does not have an IP configured. " +\ "Required for OSPFv2 to work" module.fail_json(msg=_msg) # for test purposes only return iface_has_address def enable_or_disable_ospf_on_int(module): ifacename = module.params.get('interface') _state = module.params.get('state') iface_config = module.interface_config.get(ifacename) if iface_config is None: _msg = "%s is not found in Quagga config. " % (ifacename) + \ "Check that %s is active in kernel" % (ifacename) module.fail_json(msg=_msg) return False # for test purposes found_area = None for i in iface_config: m0 = re.search('ip\s+ospf\s+area\s+([0-9.]+)', i) if m0: found_area = m0.group(1) break if _state == 'absent': for i in iface_config: if found_area: cmd_line = '/usr/bin/cl-ospf clear %s area' % \ (ifacename) run_cl_cmd(module, cmd_line) module.has_changed = True module.exit_msg += "OSPFv2 now disabled on %s " % (ifacename) return False area_id = module.params.get('area') if found_area != area_id: cmd_line = '/usr/bin/cl-ospf interface set %s area %s' % \ (ifacename, area_id) run_cl_cmd(module, cmd_line) module.has_changed = True module.exit_msg += "OSPFv2 now enabled on %s area %s " % \ (ifacename, area_id) return True def update_point2point(module): ifacename = module.params.get('interface') point2point = module.params.get('point2point') iface_config = module.interface_config.get(ifacename) found_point2point = None for i in iface_config: m0 = re.search('ip\s+ospf\s+network\s+point-to-point', i) if m0: found_point2point = True break if point2point: if not found_point2point: cmd_line = '/usr/bin/cl-ospf interface set %s network point-to-point' % \ (ifacename) run_cl_cmd(module, cmd_line) module.has_changed = True module.exit_msg += 'OSPFv2 point2point set on %s ' % (ifacename) else: if found_point2point: cmd_line = '/usr/bin/cl-ospf interface clear %s network' % \ (ifacename) run_cl_cmd(module, cmd_line) module.has_changed = True module.exit_msg += 'OSPFv2 point2point removed on %s ' % \ (ifacename) def update_passive(module): ifacename = module.params.get('interface') passive = module.params.get('passive') iface_config = module.interface_config.get(ifacename) found_passive = None for i in iface_config: m0 = re.search('passive-interface', i) if m0: found_passive = True break if passive: if not found_passive: cmd_line = '/usr/bin/cl-ospf interface set %s passive' % \ (ifacename) run_cl_cmd(module, cmd_line) module.has_changed = True module.exit_msg += '%s is now OSPFv2 passive ' % (ifacename) else: if found_passive: cmd_line = '/usr/bin/cl-ospf interface clear %s passive' % \ (ifacename) run_cl_cmd(module, cmd_line) module.has_changed = True module.exit_msg += '%s is no longer OSPFv2 passive ' % \ (ifacename) def update_cost(module): ifacename = module.params.get('interface') cost = module.params.get('cost') iface_config = module.interface_config.get(ifacename) found_cost = None for i in iface_config: m0 = re.search('ip\s+ospf\s+cost\s+(\d+)', i) if m0: found_cost = m0.group(1) break if cost != found_cost and cost is not None: cmd_line = '/usr/bin/cl-ospf interface set %s cost %s' % \ (ifacename, cost) run_cl_cmd(module, cmd_line) module.has_changed = True module.exit_msg += 'OSPFv2 cost on %s changed to %s ' % \ (ifacename, cost) elif cost is None and found_cost is not None: cmd_line = '/usr/bin/cl-ospf interface clear %s cost' % \ (ifacename) run_cl_cmd(module, cmd_line) module.has_changed = True module.exit_msg += 'OSPFv2 cost on %s changed to default ' % \ (ifacename) def config_ospf_interface_config(module): enable_int_defaults(module) module.has_changed = False # get all ospf related config from quagga both globally and iface based get_running_config(module) # if interface does not have ipv4 address module should fail get_interface_addr_config(module) # if ospf should be enabled, continue to check for the remaining attrs if enable_or_disable_ospf_on_int(module): # update ospf point-to-point setting if needed update_point2point(module) # update ospf interface cost if needed update_cost(module) # update ospf interface passive setting update_passive(module) def saveconfig(module): if module.params.get('saveconfig') is True and\ module.has_changed: run_cl_cmd(module, '/usr/bin/vtysh -c "wr mem"') module.exit_msg += 'Saving Config ' def enable_int_defaults(module): if not module.params.get('area'): module.params['area'] = '0.0.0.0' if not module.params.get('state'): module.params['state'] = 'present' def check_if_ospf_is_running(module): if not os.path.exists('/var/run/quagga/ospfd.pid'): _msg = 'OSPFv2 process is not running. Unable to execute command' module.fail_json(msg=_msg) def main(): module = AnsibleModule( argument_spec=dict( reference_bandwidth=dict(type='str', default='40000'), router_id=dict(type='str'), interface=dict(type='str'), cost=dict(type='str'), area=dict(type='str'), state=dict(type='str', choices=['present', 'absent']), point2point=dict(type='bool', choices=BOOLEANS), saveconfig=dict(type='bool', choices=BOOLEANS, default=False), passive=dict(type='bool', choices=BOOLEANS) ), mutually_exclusive=[['reference_bandwidth', 'interface'], ['router_id', 'interface']] ) check_if_ospf_is_running(module) check_dsl_dependencies(module, ['cost', 'state', 'area', 'point2point', 'passive'], 'interface', 'swp1') module.has_changed = False module.exit_msg = '' if has_interface_config(module): config_ospf_interface_config(module) else: # Set area to none before applying global config module.params['area'] = None add_global_ospf_config(module) saveconfig(module) if module.has_changed: module.exit_json(msg=module.exit_msg, changed=module.has_changed) else: module.exit_json(msg='no change', changed=False) # import module snippets from ansible.module_utils.basic import * import re import os import socket # incompatible with ansible 1.4.4 - ubuntu 12.04 version # from ansible.module_utils.urls import * if __name__ == '__main__': main()
This spacious and clean 3 bedroom, 2 1/2 bathroom townhome is priced to sell. Care-free living at its best, with low monthly maintenance fee providing roof and exterior building maintenance and insurance. This home features laminate flooring throughout, ceramic tile in foyer, kitchen and bathrooms, and a nice sized yard. The living area is large and opens to the dining and kitchen spaces which makes entertaining easy. There is a convenient half bathroom located on the first floor as well. All bedrooms are upstairs, along with a small computer/study nook and the utility room. The master bedroom is opposite the secondary bedrooms, which allows for more privacy. The master bathroom has double sinks and separate tub and shower. Commute will be a breeze with the easy access to Beltway 8/Sam Houston Tollway.
""" This pattern provides different behaviours based on the internal object state An implementation examples based on the test execution life cycle will be provided """ import abc from src.utils import get_selenium_driver class Manager: """ State machine manager. Acting as an interface to the client and providing the actual state of the object """ def __init__(self, state): """ :param state: current object state """ self._state = state def get_state(self): """ :return: state getter """ self._state.run() class State(metaclass=abc.ABCMeta): """ Interface definition for behaviour encapsulation """ def __init__(self): self._driver = get_selenium_driver('chrome') def get_driver(self): return self._driver @abc.abstractmethod def run(self): pass class StartTest(State): """ Prepare the test execution environment """ def run(self): print(" Start test state!!! ") self.get_driver().get('https://en.wikipedia.org/') class ExecuteTest(State): """ Run run different test steps """ SEARCH_BUTTON = 'searchButton' def run(self): print(" Execute test steps state!!! ") if self.get_driver().find_element_by_id(ExecuteTest.SEARCH_BUTTON).is_displayed(): print("Search button available") self._driver.find_element_by_id(ExecuteTest.SEARCH_BUTTON).click() else: print("Search button not available") class StopTest(State): """ Close the testing session """ def run(self): print(" Stop test state!!! ") self.get_driver().quit() if __name__ == '__main__': start = StartTest() execute = ExecuteTest() stop = StopTest() for test_state in [start, execute, stop]: manager = Manager(test_state) manager.get_state()
Nemours, one of the nation’s premier pediatric health systems, today announces the public opening of the Nemours Children’s Hospital (NCH) Preview Center which is located at 9145 Narcoossee Road, Suite A102 in Orlando, FL. The facility will introduce the new hospital to the community and showcase the transformation of family-centered pediatric care delivery in the 21st century. The NCH Preview Center will also serve as a test site for technology, equipment and furnishings, as well as a place to recruit quality professionals and support the training and orientation of new hospital personnel. The NCH Preview Center illustrates the integration between Nemours’ family-centered model of care and the hospital/clinic architecture that will support the delivery of education, research and advocacy. In family-centered care, parents are deeply involved in their child’s care by partnering with doctors and nurses, eliminating the old model of authority driven decision making. The innovative designs and technology in the NCH Preview Center support this partnership and create an environment that enhances communication between the family and caregiver, improves the quality and safety of patient care, and helps parents and children feel comfortable and in control. Visitors to the new facility will be able to explore full-scale hospital replicas of an outpatient exam room, Emergency Department (ED) room, Intensive Care Unit (ICU) inpatient room and a Medical Surgical inpatient room. Beginning February 18, the NCH Preview Center will be open to the public Tuesday through Saturday between 8:00 a.m. and 5:00 p.m. Nemours is currently building a top-tier pediatric health care system in Orlando that began in June 2009. The 630,000-square-foot children’s hospital located in Lake Nona’s Medical City will open in 2012 with 95 beds and potential capacity of up to 139 beds, a children’s clinic, ED, diagnostic and ambulatory programs along with education and research centers.
#!/usr/bin/python # -*- coding: utf-8 -*- import time from sqlalchemy import desc from forms import * from flask.ext.security import roles_accepted, roles_required, login_required, Security, utils, current_user from . import app, db, r from .utils.table import Table from flask import request, g, render_template, redirect, url_for, session, send_from_directory, flash from models import * from admin import init_admin # initilize flask-security security = Security(app, user_datastore, register_form=ExtendedRegisterForm) # page render time @app.before_request def before_request(): g.request_start_time = time.time() g.request_time = lambda: "%.5fs" % (time.time() - g.request_start_time) # search engine things @app.route('/robots.txt') @app.route('/sitemap.xml') def static_from_root(): return send_from_directory(app.static_folder, request.path[1:]) @app.route('/') @app.route('/index') def index(): news = News.query.all() return render_template('index.html', title='Anasayfa', news=news) @app.route('/about') def about(): return render_template('about.html', title=u'Hakkında') @app.route('/news/<slug>') def news(slug): post = News.query.filter_by(slug=slug).first_or_404() return render_template('news.html', title=post.title, post=post) @app.route('/problems/') @app.route('/problems/<int:page>') def problem_list(page=1): problems = sort(Problem, Problem.query, problem_sort_list).paginate( page=page, per_page=app.config["PRODUCTS_PER_PAGE"], ) problems_table = Table(problem_sort_list, problem_column_list, problems) return render_template('problem_list.html', title='Problem Listesi', problems_table=problems_table) @app.route('/problem/<slug>', methods=['GET', 'POST']) def problem(slug): problem = Problem.query.filter_by(slug=slug).first_or_404() form = SubmissionForm() if form.validate_on_submit(): try: newS = Submission(problem_id=problem.id, user_id=current_user.id, code=form.code.data) db.session.add(newS) db.session.commit() # publish to redis r.publish('submissions', str(newS.id)) flash(u'Tebrikler kodunuz eklendi, kodlarım sayfasından görebilirsiniz', 'success') except: db.session.rollback() flash(u'Bir hata oluştu lütfen daha sonra deneyin', 'error') return render_template('problem.html', title=problem.title, problem=problem, form=form) @app.route('/problem/<slug>/solution') @login_required def problem_solution(slug): problem = Problem.query.filter_by(slug=slug).first_or_404() return render_template('problem_solution.html', title=problem.title, problem=problem) @app.route('/problem/<slug>/suggestion') @login_required def problem_suggestion(slug): problem = Problem.query.filter_by(slug=slug).first_or_404() return render_template('problem.html', title=problem.title, problem=problem) @app.route('/author/profile/<username>/') @app.route('/author/profile/<username>/<int:page>') def author_profile(username, page=1): author = User.query.filter_by(username=username).first_or_404() problems = sort(Problem, author.problems, problem_sort_list).paginate( page=page, per_page=app.config["PRODUCTS_PER_PAGE"], ) problems_table = Table(problem_sort_list, problem_column_list, problems) return render_template('author_profile.html', title=author.username, author=author, problems_table=problems_table) @app.route('/tag/<name>/') @app.route('/tag/<name>/<int:page>') def tag(name, page=1): tag = Tag.query.filter_by(name=name).first_or_404() problems = sort(Problem, tag.problems, problem_sort_list).paginate( page=page, per_page=app.config["PRODUCTS_PER_PAGE"], ) problems_table = Table(problem_sort_list, problem_column_list, problems) return render_template('tag.html', title=tag.name, tag=tag, problems_table=problems_table) @app.route('/user/<username>') def user_profile(username): user = User.query.filter_by(username=username).first_or_404() # order submissions by timestamp submissions = user.submissions.order_by(Submission.timestamp.desc()) return render_template('user_profile.html', title=user.username, user=user, submissions=submissions) @login_required @app.route('/submission/<int:id>') def user_submission(id): submission = Submission.query.filter_by(id=id).first_or_404() return render_template('user_submission.html', title=u"Submision", submission=submission) @app.route('/author/panel/add', methods=['GET', 'POST']) @login_required @roles_accepted('author', 'admin') def author_panel_add(): form = ProblemForm() if form.validate_on_submit(): try: newp = Problem(title=form.title.data, body=form.body.data, solution=form.solution.data) newp.tags = form.tags.data db.session.add(newp) current_user.problems.append(newp) db.session.commit() flash(u'Tebrikler Probleminiz eklendi, Problemler sayfasından görebilirsiniz', 'success') except: db.session.rollback() flash(u'Bir hata oluştu lütfen daha sonra deneyin', 'error') return render_template('author_panel_add.html', title=u'Yeni soru ekle', form=form) problem_sort_list = {'id', 'title', 'count', 'difficulty'} problem_column_list = [('id', u'id'), ('title', u'başlık'), ('tags', u'İlgili konular'), ('count', u'Çözüm sayısı'), ( 'difficulty', u'Zorluk')] def sort(model, query, sort_list): """ sort query with url args :param model: db model name :param query: sql alchemy query :param sort_list: allowed sort url args :return: sorted query if fails return query """ sort = request.args.get('sort', 'id') sort_desc = request.args.get('desc', 0, type=int) if sort not in sort_list: return query if sort_desc == 1: return query.order_by(desc(getattr(model, sort))) else: return query.order_by(getattr(model, sort)) init_admin()
Bites Desserts is a full service dessert catering company, out to make your next event the sweetest one yet. At least, if you live in California. They're serving up sweets to satisfy everyone's sweet tooth in Los Angeles, Orange County, San Francisco and beyond, making California a sweeter place, bite by bite. It's in their namesake, after all. Just what kind of sweet things does Bites Desserts have to offer? Well, if you ask sweetly, pretty much any kind of sweet you can imagine. If you and your guests are screaming for ice cream, Bites Desserts answers with an ice cream bar, with customizable sundaes and kid friendly push pops of their own design and flavor. But should you desire a dessert put through the fire, Bites Desserts has a s'mores bar with individual flames for roasting those marshmallows to a golden brown crisp. And we'd be remiss not to mention their customizable candy apple bar, and their customizable hot chocolate bar. What we're saying is, Bites Desserts got a whole lot of customizable dessert bars to sweeten the deal. And the deal is this: if you're looking to sweeten up your next event, you'll find Bites Desserts got just the sweets to eat. Trust us. They're the sweet treat connoisseurs.
# -*- coding: utf-8 -*- # Copyright (C) 2017 by Pedro Mendes, Virginia Tech Intellectual # Properties, Inc., University of Heidelberg, and University of # of Connecticut School of Medicine. # All rights reserved. # Copyright (C) 2010 - 2016 by Pedro Mendes, Virginia Tech Intellectual # Properties, Inc., University of Heidelberg, and The University # of Manchester. # All rights reserved. # Copyright (C) 2008 - 2009 by Pedro Mendes, Virginia Tech Intellectual # Properties, Inc., EML Research, gGmbH, University of Heidelberg, # and The University of Manchester. # All rights reserved. # Copyright (C) 2006 - 2007 by Pedro Mendes, Virginia Tech Intellectual # Properties, Inc. and EML Research, gGmbH. # All rights reserved. import COPASI import unittest from types import * class Test_CFunctionParameter(unittest.TestCase): def setUp(self): self.functions=COPASI.CRootContainer.getFunctionList() self.function=self.functions.findFunction("Iso Uni Uni") self.assert_(self.function!=None) self.assert_(self.function.__class__==COPASI.CFunction) self.parameters=self.function.getVariables() self.assert_(self.parameters!=None) self.assert_(self.parameters.__class__==COPASI.CFunctionParameters) index=self.parameters.findParameterByName("Keq",COPASI.CFunctionParameter.FLOAT64) self.parameter=self.parameters.getParameter(index) self.assert_(self.parameter!=None) self.assert_(self.parameter.__class__==COPASI.CFunctionParameter) def test_getKey(self): key=self.parameter.getKey() self.assert_(type(key)==StringType) def test_getType(self): b=self.parameter.getType() self.assert_(type(b)==IntType) self.assert_(b==COPASI.CFunctionParameter.FLOAT64) def test_setType(self): t=COPASI.CFunctionParameter.INT32 self.parameter.setType(t) self.assert_(self.parameter.getType()==t) def test_getUsage(self): b=self.parameter.getUsage() self.assert_(type(b)==IntType) self.assert_(b==COPASI.CFunctionParameter.PARAMETER) def test_setUsage(self): t=COPASI.CFunctionParameter.VOLUME self.parameter.setUsage(t) self.assert_(self.parameter.getUsage()==t) def suite(): tests=[ "test_getKey" ,"test_getType" ,"test_setType" ,"test_getUsage" ,"test_setUsage" ] return unittest.TestSuite(map(Test_CFunctionParameter,tests)) if(__name__ == '__main__'): unittest.TextTestRunner(verbosity=2).run(suite())
Wedding Address Labels: Print your own wedding invitation address labels with this editable text address template. This address label features a beautiful ornate design in black. *PLEASE NOTE: This is not compatible with mail merge. Please try the free sample before purchasing to see how it works. Avery is a copyright of Avery Products Corporation.
import sys import shlex from sqlalchemy import literal_column, cast, desc, Unicode from sqlalchemy.dialects.postgresql import array from skylines.database import db from skylines.lib.types import is_unicode PATTERNS = [ (u'{}', 5), # Matches token exactly (u'{}%', 3), # Begins with token (u'% {}%', 2), # Has token at word start (u'%{}%', 1), # Has token ] def search_query(cls, tokens, weight_func=None, include_misses=False, ordered=True): # Read the searchable columns from the table (strings) columns = cls.__searchable_columns__ # Convert the columns from strings into column objects columns = [getattr(cls, c) for c in columns] # The model name that can be used to match search result to model cls_name = literal_column('\'{}\''.format(cls.__name__)) # Filter out id: tokens for later ids, tokens = process_id_option(tokens) # If there are still tokens left after id: token filtering if tokens: # Generate the search weight expression from the # searchable columns, tokens and patterns if not weight_func: weight_func = weight_expression weight = weight_func(columns, tokens) # If the search expression only included "special" tokens like id: else: weight = literal_column(str(1)) # Create an array of stringified detail columns details = getattr(cls, '__search_detail_columns__', None) if details: details = [cast(getattr(cls, d), Unicode) for d in details] else: details = [literal_column('NULL')] # Create a query object query = db.session.query( cls_name.label('model'), cls.id.label('id'), cls.name.label('name'), array(details).label('details'), weight.label('weight')) # Filter out specific ids (optional) if ids: query = query.filter(cls.id.in_(ids)) # Filter out results that don't match the patterns at all (optional) if not include_misses: query = query.filter(weight > 0) # Order by weight (optional) if ordered: query = query.order_by(desc(weight)) return query db.Model.search_query = classmethod(search_query) def combined_search_query(models, tokens, include_misses=False, ordered=True): models, tokens = process_type_option(models, tokens) # Build sub search queries queries = [model.search_query( tokens, include_misses=include_misses, ordered=False) for model in models] # Build combined search query query = queries[0] if len(queries) > 1: query = query.union(*queries[1:]) # Order by weight (optional) if ordered: query = query.order_by(desc('weight')) return query def process_type_option(models, tokens): """ This function looks for "type:<type>" in the tokens and filters the searchable models for the requested types. Returns the filtered list of models. """ # Filter for type: and types: tokens types, new_tokens = __filter_prefixed_tokens('type', tokens) # Filter the list of models according to the type filter new_models = [model for model in models if model.__name__.lower() in types] # Return original models list if there are no matching models if len(new_models) == 0: return models, new_tokens # Return filtered models and tokens return new_models, new_tokens def process_id_option(tokens): """ This function looks for "id:<id>" in the tokens, removes them from the token list and returns a list of ids. """ # Filter for id: and ids: tokens ids, new_tokens = __filter_prefixed_tokens('id', tokens) # Convert ids to integers def int_or_none(value): try: return int(value) except ValueError: return None ids = [int_or_none(id) for id in ids] ids = [id for id in ids if id is not None] # Return ids and tokens return ids, new_tokens def __filter_prefixed_tokens(prefix, tokens): len_prefix = len(prefix) # The original tokens without the prefixed tokens new_tokens = [] # The contents that were found after the prefixed tokens contents = [] # Iterate through original tokens to find prefixed tokens for token in tokens: _token = token.lower() if _token.startswith(prefix + ':'): contents.append(_token[(len_prefix + 1):]) elif _token.startswith(prefix + 's:'): contents.extend(_token[(len_prefix + 2):].split(',')) else: new_tokens.append(token) # Strip whitespace from the types contents = map(str.strip, contents) return contents, new_tokens def text_to_tokens(search_text): assert is_unicode(search_text) try: if sys.version_info[0] == 2: return [str.decode('utf8') for str in shlex.split(search_text.encode('utf8'))] else: return shlex.split(search_text) except ValueError: return search_text.split(' ') def escape_tokens(tokens): # Escape % and _ properly tokens = [t.replace(u'%', u'\\%').replace(u'_', u'\\_') for t in tokens] # Use * as wildcard character tokens = [t.replace(u'*', u'%') for t in tokens] return tokens def weight_expression(columns, tokens): expressions = [] # Use entire search string as additional token if len(tokens) > 1: tokens = tokens + [u' '.join(tokens)] for column in columns: for token in tokens: len_token = len(token) for pattern, weight in PATTERNS: # Inject the token in the search pattern token_pattern = pattern.format(token) # Adjust the weight for the length of the token # (the long the matched token, the greater the weight) weight *= len_token # Create the weighted ILIKE expression expression = column.weighted_ilike(token_pattern, weight) # Add the expression to list expressions.append(expression) return sum(expressions) def process_results_details(models, results): return [process_result_details(models, result._asdict()) for result in results] def process_result_details(models, result): models = {m.__name__: m for m in models} model = models.get(result['model'], None) if not model: return result details = getattr(model, '__search_detail_columns__', [None]) if len(details) != len(result['details']): return result for key, value in zip(details, result['details']): if isinstance(key, str): result[key] = value return result
USA Payday Loans Fort Washington PA - Get Cash Quick No Fax Needed! Are you from Fort Washington Pennsylvania? We would like to help you! You can get a cash advance loan from $100 to $1000 in few minutes if you are resident of Fort Washington Pennsylvania. Best of all, you can apply for it at home or office. Please fill out the form Apply Now on our site, enter Fort Washington and PA as your city/state. In few minutes we will check our wide network of respectable lenders for the loan you need.
import json from typing import Dict, Any, List, Optional from slack_sdk.scim.v1.group import Group from slack_sdk.scim.v1.internal_utils import _to_snake_cased from slack_sdk.scim.v1.user import User class Errors: code: int description: str def __init__(self, code: int, description: str) -> None: self.code = code self.description = description def to_dict(self) -> dict: return {"code": self.code, "description": self.description} class SCIMResponse: url: str status_code: int headers: Dict[str, Any] raw_body: str body: Dict[str, Any] snake_cased_body: Dict[str, Any] errors: Optional[Errors] @property def snake_cased_body(self) -> Dict[str, Any]: if self._snake_cased_body is None: self._snake_cased_body = _to_snake_cased(self.body) return self._snake_cased_body @property def errors(self) -> Optional[Errors]: errors = self.snake_cased_body.get("errors") if errors is None: return None return Errors(**errors) def __init__( self, *, url: str, status_code: int, raw_body: str, headers: dict, ): self.url = url self.status_code = status_code self.headers = headers self.raw_body = raw_body self.body = ( json.loads(raw_body) if raw_body is not None and raw_body.startswith("{") else None ) self._snake_cased_body = None def __repr__(self): dict_value = {} for key, value in vars(self).items(): dict_value[key] = value.to_dict() if hasattr(value, "to_dict") else value if dict_value: # skipcq: PYL-R1705 return f"<slack_sdk.scim.v1.{self.__class__.__name__}: {dict_value}>" else: return self.__str__() # --------------------------------- # Users # --------------------------------- class SearchUsersResponse(SCIMResponse): users: List[User] @property def users(self) -> List[User]: return [User(**r) for r in self.snake_cased_body.get("resources")] def __init__(self, underlying: SCIMResponse): self.underlying = underlying self.url = underlying.url self.status_code = underlying.status_code self.headers = underlying.headers self.raw_body = underlying.raw_body self.body = underlying.body self._snake_cased_body = None class ReadUserResponse(SCIMResponse): user: User @property def user(self) -> User: return User(**self.snake_cased_body) def __init__(self, underlying: SCIMResponse): self.underlying = underlying self.url = underlying.url self.status_code = underlying.status_code self.headers = underlying.headers self.raw_body = underlying.raw_body self.body = underlying.body self._snake_cased_body = None class UserCreateResponse(SCIMResponse): user: User @property def user(self) -> User: return User(**self.snake_cased_body) def __init__(self, underlying: SCIMResponse): self.underlying = underlying self.url = underlying.url self.status_code = underlying.status_code self.headers = underlying.headers self.raw_body = underlying.raw_body self.body = underlying.body self._snake_cased_body = None class UserPatchResponse(SCIMResponse): user: User @property def user(self) -> User: return User(**self.snake_cased_body) def __init__(self, underlying: SCIMResponse): self.underlying = underlying self.url = underlying.url self.status_code = underlying.status_code self.headers = underlying.headers self.raw_body = underlying.raw_body self.body = underlying.body self._snake_cased_body = None class UserUpdateResponse(SCIMResponse): user: User @property def user(self) -> User: return User(**self.snake_cased_body) def __init__(self, underlying: SCIMResponse): self.underlying = underlying self.url = underlying.url self.status_code = underlying.status_code self.headers = underlying.headers self.raw_body = underlying.raw_body self.body = underlying.body self._snake_cased_body = None class UserDeleteResponse(SCIMResponse): def __init__(self, underlying: SCIMResponse): self.underlying = underlying self.url = underlying.url self.status_code = underlying.status_code self.headers = underlying.headers self.raw_body = underlying.raw_body self.body = underlying.body self._snake_cased_body = None # --------------------------------- # Groups # --------------------------------- class SearchGroupsResponse(SCIMResponse): groups: List[Group] @property def groups(self) -> List[Group]: return [Group(**r) for r in self.snake_cased_body.get("resources")] def __init__(self, underlying: SCIMResponse): self.underlying = underlying self.url = underlying.url self.status_code = underlying.status_code self.headers = underlying.headers self.raw_body = underlying.raw_body self.body = underlying.body self._snake_cased_body = None class ReadGroupResponse(SCIMResponse): group: Group @property def group(self) -> Group: return Group(**self.snake_cased_body) def __init__(self, underlying: SCIMResponse): self.underlying = underlying self.url = underlying.url self.status_code = underlying.status_code self.headers = underlying.headers self.raw_body = underlying.raw_body self.body = underlying.body self._snake_cased_body = None class GroupCreateResponse(SCIMResponse): group: Group @property def group(self) -> Group: return Group(**self.snake_cased_body) def __init__(self, underlying: SCIMResponse): self.underlying = underlying self.url = underlying.url self.status_code = underlying.status_code self.headers = underlying.headers self.raw_body = underlying.raw_body self.body = underlying.body self._snake_cased_body = None class GroupPatchResponse(SCIMResponse): def __init__(self, underlying: SCIMResponse): self.underlying = underlying self.url = underlying.url self.status_code = underlying.status_code self.headers = underlying.headers self.raw_body = underlying.raw_body self.body = underlying.body self._snake_cased_body = None class GroupUpdateResponse(SCIMResponse): group: Group @property def group(self) -> Group: return Group(**self.snake_cased_body) def __init__(self, underlying: SCIMResponse): self.underlying = underlying self.url = underlying.url self.status_code = underlying.status_code self.headers = underlying.headers self.raw_body = underlying.raw_body self.body = underlying.body self._snake_cased_body = None class GroupDeleteResponse(SCIMResponse): def __init__(self, underlying: SCIMResponse): self.underlying = underlying self.url = underlying.url self.status_code = underlying.status_code self.headers = underlying.headers self.raw_body = underlying.raw_body self.body = underlying.body self._snake_cased_body = None
Want to be notified of new releases in RackhamLeNoir/gamebuino-jezzball? GameBuino implementation of the Jezzball game. This is a tribute to the TI-89 implementation I used to play for hours.
import datetime, logging import csv import pyproj def load_csv(data): rows = iter(data) header = next(rows) latitude_idx = next(i for i,v in enumerate(header) if v == "latitude") longitude_idx = next(i for i,v in enumerate(header) if v == "longitude") speed_idx = next(i for i,v in enumerate(header) if v == "speed") altitude_idx = next(i for i,v in enumerate(header) if v == "altitude") time_idx = next(i for i,v in enumerate(header) if v == "recorded_at") hort_acc_idx = next(i for i,v in enumerate(header) if v == "hort_accuracy") vert_acc_idx = next(i for i,v in enumerate(header) if v == "vert_accuracy") src_node_idx = next(i for i,v in enumerate(header) if v == "src") dst_node_idx = next(i for i,v in enumerate(header) if v == "dst") src_proj = pyproj.Proj(init='epsg:4326') dst_proj = pyproj.Proj(init='epsg:2950') observations = [] accuracy = [] link = [] previous_id = -1 previous_time = None for row in data: current_id = row[0] if current_id != previous_id: if observations: logging.info("loading %s", previous_id) yield { 'observations': observations, 'accuracy': accuracy, 'id': previous_id, 'link': link } observations = [] accuracy = [] link = [] previous_id = current_id previous_time = None #if current_id != "9160": # continue current_time = datetime.datetime.strptime(row[time_idx], '%Y-%m-%d %H:%M:%S') while (previous_time is not None and previous_time + datetime.timedelta(seconds=1) < current_time): observations.append(None) accuracy.append(None) link.append(None) previous_time += datetime.timedelta(seconds=1) previous_time = current_time try: coord = pyproj.transform(src_proj, dst_proj, float(row[longitude_idx]), float(row[latitude_idx])) except RuntimeError: previous_id = -1 continue obs = [coord[0], coord[1], float(row[speed_idx])] quantile = 1.96 acc = [float(row[hort_acc_idx])/quantile, float(row[vert_acc_idx])/quantile] observations.append(obs) accuracy.append(acc) link.append((int(row[src_node_idx]), int(row[dst_node_idx]))) def load_all(files, max_count): for filepath in files: with open(filepath) as csvfile: data = csv.reader(csvfile) for trajectory in load_csv(data): yield trajectory max_count -= 1 if max_count == 0: return
At New York University, student clubs declared a boycott of the school’s Tel Aviv campus. At the University of Michigan, two professors refused to recommend students seeking to study at Tel Aviv University. On Thursday, the college council at Pitzer College, a private liberal arts school in California, voted to take away the opportunity for students to study together with Jewish and Arab Israelis at the University of Haifa. The debate at Pitzer, part of the Claremont college system, and the developments in New York and Michigan stem from an effort by the anti-Israel Boycott, Divestment, and Sanctions Movement (BDS). While the movement describes itself as a nonviolent campaign aimed at getting Israel to adhere to certain demands, including a withdrawal from what it calls “Arab lands,” its leadership openly seeks the elimination of Israel as a Jewish state. The campaign also includes an academic component. “Ending study abroad programs provides a real opportunity to get some tangible wins,” he said. While the BDS movement gained traction after the Second Intifada, it has met with the most success on college campuses, where it is framed as a social justice opportunity, enabling students to fight for an oppressed minority. In most cases where the BDS movement has taken hold, student governments have passed divestment resolutions with no teeth and pro-Palestinian student groups have protested pro-Israel speakers. When the student senate declined to voice opposition to the faculty, the issue went to the college council – a recommending body made up of students, faculty and staff. Nearly two-thirds of the council voted on Thursday to end the program. Almost immediately, Oliver vetoed the recommendation. “By singling out Israel, the recommendation itself is prejudiced,” he said. Similarly, at NYU, the administration also stated its opposition to any academic boycott of Israel. And the University of Michigan vowed to discipline both professors who withheld their letters of recommendation for students based on their political views. Hurwitz said university presidents, provosts and chancellors have become strong allies in the fight for academic freedom and against BDS. For many years, AJC Project Interchange has sent delegations of university administrators on tours of Israel to meet with educators, religious leaders, and government officials. Marvin Krislov, president of Pace University, traveled to Israel with Project Interchange in 2016. He said although campus policies can vary, guidelines for determining a study abroad destination typically focus on the health and safety of students and the educational value of a program. Ron Robin, president of the University of Haifa, said students are at risk of missing out on a vital learning opportunity. The Haifa partnership offers Pitzer students courses in a variety of subjects taught in English at University of Haifa’s International School, intensive Hebrew and Arabic language classes before the study abroad experience, and an internship program. Every student should consider the program, including those in favor of BDS, he said. At the University of Haifa, 35 percent of the students are Arab Israelis, Robin said. Jewish and Arab students work together seamlessly on extracurricular activities and community service projects, he said. That reality counters the BDS narrative that Israeli institutions deserve to be boycotted because Israel marginalizes its minority populations, he said. Hurwitz said study abroad administrators should be concerned about excluding a destination that offers an opportunity for Jews, Muslims, and Christians, Israelis and Palestinians to study under the same roof. “You’re harming the academic community when you’re limiting the free exchange of ideas,” he said. He expects Pitzer to be the first of many schools where this debate unfolds. Indeed, earlier this week, a banner calling for an end to study abroad in Israel went up on campus at the University of California, Santa Cruz. Miriam F. Elman, an associate professor of political science at Syracuse University, soon will become the executive director of the Academic Engagement Network, an organization of American university and college professors that oppose the BDS movement. She said it’s important to recognize what’s happening at Pitzer is part of a coordinated campaign by the BDS movement. She points to a seven-page toolkit for campus organizers that includes familiar ideas such as denying letters of recommendation or terminating study abroad programs in Israel. While the bedrock principle of academic freedom allows professors and students to advocate for BDS, she said, it does not allow instructors’ political perspectives to trump students’ opportunities to learn.
import os import time import tempfile import functools from . import metafile def rate_limit(interval): """ Rate limiting decorator which allows the wrapped function to be called at most once per `interval`. """ def decorator(fn): last_called = [0.0] # This is a list because primitives are constant within the closure. @functools.wraps(fn) def wrapper(*args, **kwargs): elapsed = time.time() - last_called[0] remaining = interval - elapsed if remaining > 0: time.sleep(remaining) last_called[0] = time.time() return fn(*args, **kwargs) return wrapper return decorator def locate(root, match_function, ignore_dotfiles=True): ''' Yields all filenames within `root` for which match_function returns True. ''' for path, dirs, files in os.walk(root): for filename in (os.path.abspath(os.path.join(path, filename)) for filename in files if match_function(filename)): if ignore_dotfiles and os.path.basename(filename).startswith('.'): pass else: yield filename def ext_matcher(*extensions): ''' Returns a function which checks if a filename has one of the specified extensions. ''' return lambda f: os.path.splitext(f)[-1].lower() in set(extensions) def _add_source(meta): meta['info']['source'] = 'PTH' def make_torrent(path, passkey, output_dir=None): ''' Creates a torrent suitable for uploading to PTH. - `path`: The directory or file to upload. - `passkey`: Your tracker passkey. - `output_dir`: The directory where the torrent will be created. If unspecified, {} will be used. '''.format(tempfile.tempdir) if output_dir is None: output_dir = tempfile.tempdir torrent_path = tempfile.mktemp(dir=output_dir, suffix='.torrent') torrent = metafile.Metafile(torrent_path) announce_url = 'https://please.passtheheadphones.me/{}/announce'.format(passkey) torrent.create(path, [announce_url], private=True, callback=_add_source) return torrent_path
White 135F (57C) Cover Plate Assembly for Viking Mirage Fire Sprinklers provides aesthetically-pleasing concealment to select Viking concealed fire sprinklers with rated temperatures of 135F (57C) to 205F (96C). White 135F (57C) Cover Plate Assembly for Viking Mirage Fire Sprinklers activates at 135F (57C) to facilitate timely activation of fire sprinkler heads with temperatures rated at 135F (57C) to 205F (96C). This White 135F (57C) Cover Plate Assembly for Viking Mirage Fire Sprinklers is listed by UL, FM, and/or other organizations depending on the sprinkler. When this white fire sprinkler cover plate reaches a specified temperature the solder holding the plate's decorative exterior on the frame melts, and the two separate. Cover plates in stock finishes other than white are available on our site, and custom-finished may be available by special order - call us at 888-361-6662 or click here to contact us. Click here to view the data sheet for this White 135F (57C) Cover Plate Assembly for Viking Mirage Fire Sprinklers.
#! /usr/bin/env python3 # -*- coding: utf-8 -*- # server_gui.py ############################################################################# # Copyright (C) Labomedia February 2015 # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franproplin Street, Fifth Floor, Boston, MA 02110-1301, USA. # ############################################################################# import pygame import sys pygame.init() clock = pygame.time.Clock() BLACK = 0, 0, 0 WHITE = 255, 255, 255 CIEL = 0, 200, 255 RED = 255, 0, 0 ORANGE = 255, 100, 0 GREEN = 0, 255, 0 class Button: '''Ajout d'un bouton avec un texte sur img Astuce: ajouter des espaces dans les textes pour avoir une même largeur de boutons dx, dy décalage du bouton par rapport au centre action si click Texte noir ''' def __init__(self, fond, text, color, font, dx, dy): self.fond = fond self.text = text self.color = color self.font = font self.dec = dx, dy self.state = False # enable or not self.title = self.font.render(self.text, True, BLACK) textpos = self.title.get_rect() textpos.centerx = self.fond.get_rect().centerx + self.dec[0] textpos.centery = self.dec[1] self.textpos = [textpos[0], textpos[1], textpos[2], textpos[3]] self.rect = pygame.draw.rect(self.fond, self.color, self.textpos) self.fond.blit(self.title, self.textpos) def update_button(self, fond, action=None): self.fond = fond mouse_xy = pygame.mouse.get_pos() over = self.rect.collidepoint(mouse_xy) if over: action() if self.color == RED: self.color = GREEN self.state = True elif self.color == GREEN: # sauf les + et -, pour que ce soit toujours vert if len(self.text) > 5: # 5 char avec les espaces self.color = RED self.state = False # à la bonne couleur self.rect = pygame.draw.rect(self.fond, self.color, self.textpos) self.fond.blit(self.title, self.textpos) def display_button(self, fond): self.fond = fond self.rect = pygame.draw.rect(self.fond, self.color, self.textpos) self.fond.blit(self.title, self.textpos) class Game: def __init__(self): self.screen = pygame.display.set_mode((640, 480)) self.level = 1 self.loop = True # Définition de la police self.big = pygame.font.SysFont('freesans', 48) self.small = pygame.font.SysFont('freesans', 36) self.create_fond() self.create_button() def update_textes(self): self.textes = [ ["Buggy Server", ORANGE, self.big, 0, 50], ["Level", BLACK, self.small, 0, 150], [str(self.level), BLACK, self.small, 0, 200]] def create_fond(self): # Image de la taille de la fenêtre self.fond = pygame.Surface(self.screen.get_size()) # En bleu self.fond.fill(CIEL) def create_button(self): self.reset_button = Button(self.fond, " Reset ", RED, self.small, 0, 300) self.start_button = Button(self.fond, " Start ", RED, self.small, 0, 360) self.quit_button = Button(self.fond, " Quit ", RED, self.small, 0, 420) self.moins_button = Button(self.fond, " - ", GREEN, self.small, -100, 200) self.plus_button = Button(self.fond, " + ", GREEN, self.small, 100, 200) def display_text(self, text, color, font, dx, dy): '''Ajout d'un texte sur fond. Décalage dx, dy par rapport au centre. ''' mytext = font.render(text, True, color) # True pour antialiasing textpos = mytext.get_rect() textpos.centerx = self.fond.get_rect().centerx + dx textpos.centery = dy self.fond.blit(mytext, textpos) def plus(self): self.level += 1 if self.level == 6: self.level = 5 def moins(self): self.level += -1 if self.level == 0: self.level = 1 def infinite_loop(self): while self.loop: self.create_fond() # Boutons self.reset_button.display_button(self.fond) self.start_button.display_button(self.fond) self.quit_button.display_button(self.fond) self.moins_button.display_button(self.fond) self.plus_button.display_button(self.fond) for event in pygame.event.get(): if event.type == pygame.MOUSEBUTTONDOWN: self.reset_button.update_button(self.fond, action=reset) self.start_button.update_button(self.fond, action=start) self.quit_button.update_button(self.fond, action=gamequit) self.moins_button.update_button(self.fond, action=self.moins) self.plus_button.update_button(self.fond, action=self.plus) self.update_textes() for text in self.textes: self.display_text(text[0], text[1], text[2], text[3], text[4]) # Ajout du fond dans la fenêtre self.screen.blit(self.fond, (0, 0)) # Actualisation de l'affichage pygame.display.update() # 10 fps clock.tick(10) def reset(): print("reset") def start(): print("start") def gamequit(): print("Quit") pygame.quit() sys.exit() if __name__ == '__main__': game = Game() game.infinite_loop()
Your JOIKIN comes alive! Play with your JOIKIN now! Name your Joikin, meet your friend’s Joikin, play and build your Parade. The campaign is opened to all signed up users who enrolled and agreed to the campaign's T&C. Employees or agencies of Pixajoy Sdn. Bhd., its group companies, their family members or anyone else connected with the campaign may not enter the campaign. Entrants of the campaign shall be deemed to have accepted these Terms and Conditions. By submitting personal information, entrants agree to receive emails from Pixajoy containing offers and developments that we think may interest you. You will be given the opportunity to unsubscribe on every email that we sent. The campaign is opened from 00:00 18th June 2018 GMT+8 and the closing date of the campaign is 23:59 on 22nd July 2018 GMT+8. Entries received after this period will not be considered. All actions required must be completed and orders of any products on Pixajoy's website must be placed between the campaign period to be eligible for the chance tickets. Each entrant may submit multiple entries but he/she cannot win more than once. The more chance tickets collected, the greater chances of winning. Grand Prize Winners and Special Prize Winners will be chosen from a random draw of entries received in accordance with these Terms and Conditions. The draw will be performed by a random computer process. The draw will take place on 26th July 2018 GMT+8. The announcement of the draw will be published on Pixajoy's Facebook page and Instagram on 26th July 2018 GMT+8. Special Prize: 30 Winners with USD100 worth of Pixajoy's product vouchers with the bundle of 1 unit of 40 pages Cool Image Wrap 11"x 8" Hardcover Photobook, 1 unit of Photo White Mug and 1 unit of Photo Pillow. Pixajoy retains the right to substitute the prize with another prize of similar value in the event when the original prize offered is not available. The winner will be notified by email on or before 26th July 2018 GMT+8 and must provide a relevant information to claim the prize. If the winner does not respond to Pixajoy within 10 days of being notified, then the winner's prize will be forfeited and Pixajoy shall be entitled to select another winner in accordance with the process described above (and that winner will have to respond to the notification of winning within 10 days or else their prize will also be forfeited). If a winner rejected their prize or the entry is invalid or in breach of these Terms and Conditions, the winner's prize will be forfeited and Pixajoy shall be entitled to select another winner. 1 Winner each week of Pixajoy's product voucher worth of USD150 with 1 unit of 30 pages Stylish Layflat 11"x8" Hardcover Photobook with Lamination and Winner's unique Joikin in LARGE size. The weekly winner will be notified by email within 48 hours after each week announcement and must provide a relevant information to claim the prize. If a winner does not respond to Pixajoy within 10 days of being notified, then the winner's prize will be forfeited and Pixajoy shall be entitled to select another winner in accordance with the process described above (and that winner will have to respond to notification of winning within 10 days or else their prize will also be forfeited). If a winner rejected their prize or the entry is invalid or in breach of these Terms and Conditions, the winner's prize will be forfeited and Pixajoy shall be entitled to select another winner. Joikin collection is eligible only upon voucher redemption and order placed during the campaign period (Voucher purchased will not be entitled to collect Joikin). A unique Joikin will ship together with the order placed during the campaign period on 18th June 2018 to 22nd July 2018. The winner may be required to take part in promotional activity related to the campaign and the winner shall participate in such activity on Pixajoy's reasonable request. The winner consents to the use by Pixajoy and its related companies, both before and after the closing date of the campaign for an unlimited time, of the winner's voice, image, photograph and name for publicity purposes (in any medium, including still photographs and films, on the Internet including any websites hosted by Pixajoy and its related companies) and in advertising, marketing or promotional material without additional compensation or prior notice and, in entering the campaign, all entrants consent to the same. Judges’ and draw decision are final, conclusive and no further correspondence shall be entertained. Pixajoy accepts no responsibility for any costs associated with the prize and not specifically included in the prize (including, without limitation, shipping fees, taxation incurred for the cash prize). All the personal information obtained during the campaign is protected under PDPA and governed under Malaysia Law. Pixajoy accepts no responsibility for any damage, loss, liabilities, injury or disappointment incurred or suffered by entrants as a result of entering the campaign or accepting the prize. Pixajoy further disclaims liability for any injury or damage to entrants or any other person's computer relating to or resulting from participation in or downloading any materials in connection with the campaign. The Pixajoy Management reserves the rights to change these Terms & Conditions with or without prior notice. For enquiries, please contact Pixajoy at [email protected] or call +604-4426650.
# Main class # Coded in Python 2.7.10 with PyGame # by Brett Burley-Inners # Update :: 11/19/2015 import pygame, time, random, sys import player, skyChunk def main(): # Initial setup pygame.init() font = pygame.font.SysFont("monospace", 15) pygame.key.set_repeat(1, 5) clock = pygame.time.Clock() # clock object for fps/ticks display_width = 320 # default width (pixels) display_height = 240 # default height (pixels) gameScreen = pygame.display.set_mode((display_width, display_height)) pygame.display.set_caption("The Sky is Falling") # Colors white = (255, 255, 255) darkGray = (50, 50, 50) darkerGray = (25, 25, 25) lightGray = (150, 150, 150) rLightGray = (200, 200, 200) rrLightGray = (220, 220, 220) black = (0, 0, 0) darkRed = (150, 0, 0) lightBlue = (55, 210, 225) # Keep the game loop running RUNNING = True notPlaying = True # for the menu loop skyIsFalling = True # for the loop to make stuff fall # Initialize a few variables tickCounter = 0 # count the number of ticks score = 0 xChange = 0 # change in x-coordinate to move player along x-axis xPosition = display_width / 2 # player start location size = 20 # size of player fallingSkies = [] # list of falling sky objects on the screen # The Player! thePlayer = player.Player(gameScreen, 15, xPosition, display_height - 35, lightGray, display_width) # to display Play, Quit, and Score messages def message(text, color, x, y): messageToDisplay = font.render(text, True, color) gameScreen.blit(messageToDisplay, [x, y]) # Game loop while RUNNING: clock.tick(30) # number of times the screen refreshes each second while notPlaying: gameScreen.fill(darkerGray) message("'RETURN' to Play.", rLightGray, 5, 5) message("'Q' to Quit.", rLightGray, 5, 20) pygame.display.update() for event in pygame.event.get(): if event.type == pygame.KEYDOWN: if event.key == pygame.K_q: pygame.key.set_repeat() return if event.key == pygame.K_RETURN: notPlaying = False skyIsFalling = True for event in pygame.event.get(): if event.type == pygame.KEYDOWN: if event.key == pygame.K_LEFT and not isOverLeftBound: xChange -= 10 if event.key == pygame.K_RIGHT and not isOverRightBound: xChange += 10 elif event.key == pygame.K_q: pygame.key.set_repeat() return gameScreen.fill(darkerGray) # Score display message(("Score: " + str(score)), rLightGray, 10, display_height - 18) # Movement logic: xPosition += xChange # add the change in direction to current position thePlayer.redrawPlayer(xPosition) # redraw Player at new position isOverLeftBound = thePlayer.isOverLeftBound() # check left bound isOverRightBound = thePlayer.isOverRightBound() # check right bound xChange = 0 # set change back to 0 (stops accelerating effect) tickCounter += 1 # Sky fall loop (appends FallingSky object every 10 ticks) if skyIsFalling and tickCounter > 10: # Append FallingSky objects to the list fallingSkies.append(skyChunk.SkyChunk(gameScreen, random.randrange(5, 15), random.randrange(1, display_width), -5, lightBlue, random.randrange(1, 2), score, display_height, fallingSkies)) tickCounter = 0 # Using the list of FallingSky objects for i in fallingSkies: i.fall() # makes them move score += i.returnScore() if len(fallingSkies) > 1000: del fallingSkies[0] # remove first item if list is too large if i.collideWithPlayer(thePlayer.getPlayerX(), thePlayer.getPlayerY(), thePlayer.getPlayerSize()): skyIsFalling = False del fallingSkies[:] # clear the entire list notPlaying = True score = 0 # reset the score # *screen tick* pygame.display.update() # That's all, folks! if __name__ == "__main__": main()
On his part, the Pope Francis has exposed the different temptations faced by the pastoral workers and has given a hint to overcome such temptations by imitating the incarnation aspect of God. He also shows the charity services in the history of Christianism: The Church and the Christians have engaged in the many charity services to show immense love for humanity inspired by God who became man. Such edifications, in terms of going out of oneself and one’s personal comforts, shown by the Christians and by their joyful sacrifice of their lives and time for others is yet another witness to overcome one’s selfishness and give more fully oneself in the service for humanity. The Christians of this generation, no exception to Sengol, tend to define the signs of time as problems that barricade the advancement in pastoral works and create a situation where there is nothing possible at all. Finally, they get discouraged and lose the missionary enthusiasm. Pope Francis innovatively highlights the temptations faced by the pastoral workers as those which need to be traversed in the light of the Gospel and by the Christian faith. He further encourages every Christian saying that there are temptations to be traversed but not to hinder the Christians in the journey of salvation.
"""Module containing compute environment layer management code Todo: * Add convert tool to config * Use verify module * Skip if exists * Alphabetize """ import os import subprocess from time import sleep import numpy from osgeo import gdal from LmCommon.common.lmconstants import (LMFormat, DEFAULT_NODATA, ENCODING) from LmCompute.common.lmconstants import ( CONVERT_JAVA_CMD, CONVERT_TOOL, ME_CMD) WAIT_SECONDS = 30 # ............................................................................. def convert_and_modify_ascii_to_tiff(asc_file_name, tiff_file_name, scale=None, multiplier=None, nodata_value=127, data_type='int'): """Converts an ASCII file to a GeoTiff. Args: asc_file_name (str): The file name of the existing ASCII grid to convert. tiff_file_name (str): The file path for the new tiff file. scale (None or tuple): If provided, must be a tuple of the scale minimum and maximum values. multiplier (numeric): If provided, multiply all data values in teh grid by this number. nodata_value: The no data value to use for the new value-adjusted layer. data_type: The data type for the resulting raster. """ if data_type.lower() == 'int': np_type = numpy.int8 gdal_type = gdal.GDT_Byte else: raise Exception('Unknown data type') src_ds = gdal.Open(asc_file_name) band = src_ds.GetRasterBand(1) band.GetStatistics(0, 1) in_nodata_value = band.GetNoDataValue() data = src_ds.ReadAsArray(0, 0, src_ds.RasterXSize, src_ds.RasterYSize) # If scale if scale is not None: scale_min, scale_max = scale lyr_min = band.GetMinimum() lyr_max = band.GetMaximum() def scale_func(cell_value): """Function to scale layer values. """ if cell_value == in_nodata_value: return nodata_value return (scale_max - scale_min) * ( (cell_value - lyr_min) / (lyr_max - lyr_min)) + scale_min data = numpy.vectorize(scale_func)(data) # If multiply elif multiplier is not None: def multiply_func(cell_value): """Function to multiply layer values. """ if cell_value == in_nodata_value: return nodata_value return multiplier * cell_value data = numpy.vectorize(multiply_func)(data) data = data.astype(np_type) driver = gdal.GetDriverByName('GTiff') dst_ds = driver.Create( tiff_file_name, src_ds.RasterXSize, src_ds.RasterYSize, 1, gdal_type) dst_ds.GetRasterBand(1).WriteArray(data) dst_ds.GetRasterBand(1).SetNoDataValue(nodata_value) dst_ds.GetRasterBand(1).ComputeStatistics(True) dst_ds.SetProjection(src_ds.GetProjection()) dst_ds.SetGeoTransform(src_ds.GetGeoTransform()) driver = None dst_ds = None src_ds = None # ............................................................................. def convert_ascii_to_mxe(lyr_dir): """Converts a directory of ASCII files to MXEs. lyr_dir: A directory containing ASCII grids that should be converted. """ # Run Maxent converter me_convert_cmd = '{0} {1} {2} -t {3} asc {3} mxe'.format( CONVERT_JAVA_CMD, ME_CMD, CONVERT_TOOL, lyr_dir) convert_proc = subprocess.Popen(me_convert_cmd, shell=True) while convert_proc.poll() is None: print('Waiting for layer conversion (asc to mxe) to finish...') sleep(WAIT_SECONDS) # ............................................................................. def convert_layers_in_dir(layer_dir): """Converts all layers in directory from tiffs to asciis and mxes Args: layer_dir (str):The directory to traverse through looking for layers to convert """ mxe_dirs = set([]) for my_dir, _, files in os.walk(layer_dir): for file_name in files: tiff_file_name = os.path.join(my_dir, file_name) basename, ext = os.path.splitext(tiff_file_name) if ext.lower() == LMFormat.GTIFF.ext: ascii_file_name = '{}{}'.format(basename, LMFormat.ASCII.ext) mxe_file_name = '{}{}'.format(basename, LMFormat.MXE.ext) if not os.path.exists(ascii_file_name): print('Converting: {}'.format(tiff_file_name)) convert_tiff_to_ascii(tiff_file_name, ascii_file_name) if not os.path.exists(mxe_file_name): mxe_dirs.add(my_dir) for lyr_dir in mxe_dirs: print('Converting ASCIIs in {} to MXEs'.format(lyr_dir)) convert_ascii_to_mxe(lyr_dir) # ............................................................................. def convert_tiff_to_ascii(tiff_file_name, asc_file_name, header_precision=6): """Converts an existing GeoTIFF file into an ASCII grid. Args: tiff_file_name (str): The path to an existing GeoTIFF file asc_file_name (str): The output path for the new ASCII grid header_precision (int): The number of decimal places to keep in the ASCII grid headers. Setting to None skips. Note: Headers must match exactly for Maxent so truncating them eliminates floating point differences Todo: Evaluate if this can all be done with GDAL. """ # Use GDAL to generate ASCII Grid drv = gdal.GetDriverByName('AAIGrid') ds_in = gdal.Open(tiff_file_name) # Get header information from tiff file left_x, x_res, _, ul_y, _, y_res = ds_in.GetGeoTransform() left_y = ul_y + (ds_in.RasterYSize * y_res) cols = ds_in.RasterXSize rows = ds_in.RasterYSize # Force a NODATA value if missing from TIFF before copying to ASCII nodata = ds_in.GetRasterBand(1).GetNoDataValue() if nodata is None: ds_in.GetRasterBand(1).SetNoDataValue(DEFAULT_NODATA) nodata = DEFAULT_NODATA # If header precision is not None, round vlaues if header_precision is not None: left_x = round(left_x, header_precision) left_y = round(left_y, header_precision) x_res = round(x_res, header_precision) options = ['FORCE_CELLSIZE=True'] drv.CreateCopy(asc_file_name, ds_in, 0, options) ds_in = None # Rewrite ASCII header with tiff info output = [] output.append('ncols {}\n'.format(cols)) output.append('nrows {}\n'.format(rows)) output.append('xllcorner {}\n'.format(left_x)) output.append('yllcorner {}\n'.format(left_y)) output.append('cellsize {}\n'.format(x_res)) output.append('NODATA_value {}\n'.format(int(nodata))) past_header = False with open(asc_file_name, 'r', encoding=ENCODING) as asc_in: for line in asc_in: low_line = line.lower() if not past_header and any([ low_line.startswith(test_str) for test_str in [ 'ncols', 'nrows', 'xllcorner', 'yllcorner', 'cellsize', 'dx', 'dy', 'nodata_value']]): pass else: past_header = True output.append(line) # Rewrite ASCII Grid with open(asc_file_name, 'w', encoding=ENCODING) as asc_out: for line in output: asc_out.write(line) # ............................................................................. def process_layers_json(layer_json, sym_dir=None): """Process layer JSON and return file names. Args: layer_json (json): A JSON object with an entry for layers (list) and a mask. Each layer should be an object with an identifier and / or url. sym_dir: If provided, symbolically link the layers in this directory. Note: Assumes that layer_json is an object with layers and mask """ layers = [] for lyr_obj in layer_json['layer']: layers.append(lyr_obj['path']) lyr_ext = os.path.splitext(layers[0])[1] if sym_dir is not None: new_layers = [] for i, layer_i in enumerate(layers): new_file_name = os.path.join( sym_dir, "layer{}{}".format(i, lyr_ext)) if not os.path.exists(new_file_name): os.symlink(layer_i, new_file_name) new_layers.append(new_file_name) return new_layers return layers
Alloy wheels have become popular, nowadays. These kinds of wheels appear to be attractive to a wide selection of car owners as they offer numerous advantages. Nevertheless, they also have a couple disadvantages, as well. The very first advantage is that they’re not much heavier than steel wheels. This lightness offers a much easier plunging experience for automobile owners. Light wheels also translate into a much greater fuel economy as the auto will likely be pulling lesser weight, hence directly enhancing the fuel economy of the car. This really is an enormous plus for almost any automobile owner nowadays. Lighter wheels are an effective way as it reduces the amount for money that Alloy wheel refurb Leeds must be spent on care for the automobile’s tires to extend the lifespan of the car’s tires also, another plus for all auto owners. The next edge that wheels made from alloy offers is amazing appearances. We all want our cars to stand out in the remaining cars which are driven by our neighbors, in the end, and with such wheels you’ll have the ability to raise the style part of your vehicle by leaps and bounds. Common wheel problems including corrosion and rust can impact the fitness of the wheels. Fortunately, these issues whatsoever not affect wheels made from aluminum alloy. However, it’s also essential to keep the drawbacks in mind. Price is the primary drawback you will discover. Alloy wheels are extremely expensive to create. In comparison to steel, if you need these wheels in your own car, you’ll need to cover a substantial sum. Steel is the most frequently encountered kind of alloy used in wheels. While certainly very powerful and permanent, as previously mentioned above, alloy isn’t as durable or quite as strong as steel. If the automobile suffers injury that is major, afterward alloy rims can bend. However, if one is still determined to work with alloy wheels in their own car, then it is essential to allow them to understand specialists who can take good care of all kinds of alloy wheel repair. All things considered, nobody wishes to end up broken down on the road because they don’t know the wheel repair technicians. So, there are frequently many different types of dilemmas that the car might face. A car owner can discover the appropriate experts for maintaining and fixing such wheels with extensive research plus some Internet searching.
# Copyright 2016 Jochen Kursawe. See the LICENSE file at the top-level directory # of this distribution and at https://github.com/kursawe/MCSTracker/blob/master/LICENSE. """In this the main tracking functions are defined """ import sys import os from .maximum_common_subgraph_finder import * import mesh from mesh.in_out import _natural_keys import glob import copy import warnings from networkx.algorithms.components.connected import connected_component_subgraphs def track(mesh_one, mesh_two): """Find a mapping between the cell ids in both frames and assigns the global ids accordingly. Parameters ---------- mesh_one : Mesh type First mesh mesh_two : Mesh type Second mesh Returns ------- mapped_ids : the ids of elements that were identified in both meshes """ subgraph_finder = LocalisedSubgraphFinder(mesh_one, mesh_two) subgraph_finder.find_maximum_common_subgraph() post_processor = PostProcessor(mesh_one, mesh_two, subgraph_finder.largest_mappings) post_processor.index_global_ids_from_largest_mappings() post_processor.tidy_current_mapping() mapped_ids = post_processor.post_process_with_data() return mapped_ids def track_and_write_sequence(input_path, output_path, start_number = 1, number_meshes = None): """Reads a sequence and writes the tracked data into consecutive meshes Cells that are present in multiple frames will have the same global ids, and each other cell will have a distinct non-recurring global id. Parameters ---------- input_path : string filename of seedwater-segmented data frames, without the file-endings and numberings output_path : string filename where the output should be saved, without file ending this name will be extended with a number and .mesh for each segmented frame start_number : int mesh number to be started with (indexing starts at one) number_meshes : int index of the last mesh we want to track (indexing starts at one) """ mesh_sequence = mesh.read_sequence_from_data(input_path, start_number, number_meshes) previous_sequence = mesh.read_sequence_from_data(input_path, start_number, number_meshes) next_sequence = mesh.read_sequence_from_data(input_path, start_number, number_meshes) # track all consecutive time frames individually step_sequence = [] for counter, this_mesh in enumerate(mesh_sequence): if counter > 0: previous_mesh = previous_sequence[counter -1] corresponding_mesh = next_sequence[counter] try: track(previous_mesh, corresponding_mesh) except FirstIndexException: print "Could not find first index in tracking step " + str(counter) step_sequence.append([previous_mesh, corresponding_mesh]) # give global ids to the first mesh global_ids = [] for counter, element in enumerate(mesh_sequence[0].elements): element.global_id = counter global_ids.append(counter) element.is_in_reduced_mcs_previous = False mesh_sequence[0].index_global_ids() # trace global ids through all the meshes, making new ones if necessary for counter, this_mesh in enumerate(mesh_sequence): if counter == 0: corresponding_mesh_next_step = step_sequence[counter][0] for element_counter, element in enumerate(this_mesh.elements): element.is_in_reduced_mcs_next = corresponding_mesh_next_step.elements[element_counter].is_in_reduced_mcs_next if counter > 0: previous_mesh = step_sequence[counter - 1][0] corresponding_mesh = step_sequence[counter - 1][1] if counter < len(step_sequence): corresponding_mesh_next_step = step_sequence[counter][0] for element_counter, element in enumerate(this_mesh.elements): corresponding_element = corresponding_mesh.get_element_with_frame_id(element.id_in_frame) this_global_id = corresponding_element.global_id if this_global_id is None: new_global_id = max(global_ids) + 1 global_ids.append( max(global_ids) + 1 ) element.global_id = new_global_id element.is_new = True else: previous_frame_id = previous_mesh.get_element_with_global_id(this_global_id).id_in_frame previous_global_id = mesh_sequence[counter - 1].get_element_with_frame_id(previous_frame_id).global_id element.global_id = previous_global_id try: element.is_in_reduced_mcs_previous = corresponding_element.is_in_reduced_mcs_previous except: element.is_in_reduced_mcs_previous = False if counter < len(step_sequence): try: element.is_in_reduced_mcs_next = corresponding_mesh_next_step.elements[element_counter].is_in_reduced_mcs_next except(AttributeError): element.is_in_reduced_mcs_next = False else: element.is_in_reduced_mcs_next = False this_mesh.index_global_ids() #now, save the mesh sequence for counter, this_mesh in enumerate(mesh_sequence): this_file_name = output_path + str(start_number + counter - 1) + '.mesh' this_mesh.save(this_file_name) def analyse_tracked_sequence(input_path): """collect summary statistics on tracked data Parameters ---------- input_path : string Path to the sequence that should be analysed. Sequences are numbered already tracked meshes. Returns ------- data_collector : DataCollector instance This object has member variables for various summary statistics """ mesh_sequence = mesh.load_sequence(input_path) return DataCollector(mesh_sequence) def plot_tracked_sequence( sequence_path, image_path, segmented_path, out_path ): """Plot a tracked sequence of meshes. This creates three types of plots for the entire sequence. The first type of plot overlays the experimental data, the segmentation, and the tracking outcome. Each tracked cell is given an individual colour and an id that is included in the overlay. The second type of plots illustrates the maximum common subgraphs. The third type of plots shows the tracked tesselation of polygons Parameters ---------- sequence_path : string path to the tracked mesh sequence (contains a series of .mesh files) image_path : string path to the sequence of original images segmented_path : string path where the overlay should be saved. Will be created if required. """ mesh_sequence = mesh.load_sequence( sequence_path ) list_of_image_files = glob.glob( os.path.join( image_path , '*.tif') ) list_of_image_files.sort(key=_natural_keys) list_of_segmented_files = glob.glob( os.path.join( segmented_path , '*.tif') ) list_of_segmented_files.sort(key=_natural_keys) # get maximal global id max_global_id = 0 for mesh_instance in mesh_sequence: this_max_global_id = mesh_instance.get_max_global_id() if this_max_global_id > max_global_id: max_global_id = this_max_global_id if not os.path.isdir(out_path): os.mkdir( out_path ) overlay_path = os.path.join(out_path, 'overlay') if not os.path.isdir(overlay_path): os.mkdir( overlay_path ) polygon_path = os.path.join(out_path, 'polygons') if not os.path.isdir(polygon_path): os.mkdir( polygon_path ) mcs_path = os.path.join(out_path, 'mcs') if not os.path.isdir(mcs_path): os.mkdir( mcs_path ) for mesh_counter, mesh_instance in enumerate( mesh_sequence ): this_image_path = list_of_image_files[mesh_counter] this_segmented_path = list_of_segmented_files[mesh_counter] out_file_name = os.path.split( this_image_path.replace('.tif', '_overlay.png') )[1] overlay_file_path = os.path.join(overlay_path, out_file_name) mesh_instance.plot_tracked_data(overlay_file_path, this_image_path, this_segmented_path, max_global_id) polygon_file_name = os.path.join( polygon_path, out_file_name ) mesh_instance.plot( polygon_file_name, color_by_global_id = True, total_number_of_global_ids = max_global_id) mcs_file_name = os.path.join( mcs_path, out_file_name ) mesh_instance.plot( mcs_file_name, color_by_global_id = True, total_number_of_global_ids = max_global_id, reduced_mcs_only = True ) class DataCollector(): """A class for analysing tracked sequences.""" def __init__(self, mesh_sequence): """The constructor of the DataCollector Parameters ---------- mesh_sequence : list of Mesh instances The entries should have global ids in them. """ self.mesh_sequence = mesh_sequence self.collect_all_steps() self.calculate_average_cell_area() self.generate_death_statistics() self.generate_centroid_statistics() self.generate_edge_difference_statistics() self.generate_tracking_statistics() self.generate_rosette_statistics() self.output_directory = None def set_output_directory(self, output_dir): """Sets the output dir. Parameters ---------- output_dir : string """ if not os.path.exists(output_dir): os.mkdir(output_dir) self.output_directory = output_dir def write_area_statistics(self): """Write the area statistics""" area_statistics = [] for this_mesh in self.mesh_sequence: this_area = this_mesh.calculate_total_area() this_number_cells = this_mesh.get_num_elements() this_average = this_area/this_number_cells area_statistics.append( this_average ) area_statistics_np = np.array(area_statistics) np.savetxt(os.path.join(self.output_directory, 'area_statistics.csv' ), area_statistics_np) def write_rearrangement_statistics(self): """Write the area statistics""" number_of_rearrangements = [] for step in self.steps: number_of_rearrangements.append( step.number_of_cells_gaining_edges + step.number_of_cells_loosing_edges ) rearrangement_statistics_np = np.array(number_of_rearrangements) np.savetxt(os.path.join(self.output_directory, 'rearrangement_statistics.csv' ), rearrangement_statistics_np) def write_tracked_cell_statistics(self): """Write tracked_cells_statistics""" number_of_tracked_cells = [] number_of_total_cells = [] these_data = np.zeros( (len(self.steps), 2 ), dtype = 'int') for step_counter, step in enumerate( self.steps ): these_data[step_counter, 0] = step.mesh_one.get_num_elements() these_data[step_counter, 1] = step.number_of_tracked_cells np.savetxt(os.path.join(self.output_directory, 'tracking_statistics.csv' ), these_data) def write_dying_cells(self): """make a list of all global ids that are removed""" np.savetxt( os.path.join(self.output_directory, 'dying_cells.csv'), self.global_ids_of_dying_cells ) def write_cell_area_statistics(self): """write the area evolution for each global id""" maximal_global_id = 0 for this_mesh in self.mesh_sequence: this_max_global_id = this_mesh.get_max_global_id() if this_max_global_id > maximal_global_id: maximal_global_id = this_max_global_id cell_area_data = np.zeros( (maximal_global_id + 1, len(self.mesh_sequence)) ) for mesh_counter, this_mesh in enumerate(self.mesh_sequence): for global_id in range(maximal_global_id + 1): try: this_element = this_mesh.get_element_with_global_id( global_id ) this_area = this_element.calculate_area() except KeyError: this_area = np.nan cell_area_data[global_id, mesh_counter] = this_area np.savetxt(os.path.join(self.output_directory, 'cell_area_statistics.csv' ), cell_area_data) def collect_all_steps(self): """Generate StepDataCollectors for each time step""" self.steps = [] for counter, this_mesh in enumerate(self.mesh_sequence): if counter > 0: previous_mesh = self.mesh_sequence[counter - 1] self.steps.append(StepDataCollector(previous_mesh, this_mesh, counter)) def generate_rosette_statistics(self): "Get the total number of rosettes in all meshes" self.number_of_rosettes = 0 for this_mesh in self.mesh_sequence: self.number_of_rosettes += this_mesh.count_rosettes() def generate_death_statistics(self): """Get the total number of dying cells in the sequence""" self.number_dying_cells = 0 self.global_ids_of_dying_cells = [] for step in self.steps: self.number_dying_cells += step.number_dying_cells self.global_ids_of_dying_cells += step.global_ids_of_dying_cells def generate_centroid_statistics(self): """Get statistics on centroid displacement""" self.centroid_displacements = self.steps[0].centroid_displacements for step in self.steps[1:]: step.centroid_displacements = np.hstack((self.centroid_displacements, step.centroid_displacements)) self.centroid_displacements /= np.sqrt(self.average_cell_area) self.maximal_centroid_displacement = np.max(self.centroid_displacements) self.minimal_centroid_displacement = np.min(self.centroid_displacements) self.average_centroid_displacement = np.mean(self.centroid_displacements) def calculate_average_cell_area(self): "Calculate the average area of all cells of all meshes in the sequence" total_area = 0 total_number_of_cells = 0 for this_mesh in self.mesh_sequence: total_area += this_mesh.calculate_total_area() total_number_of_cells += this_mesh.get_num_elements() self.average_cell_area = total_area/total_number_of_cells def generate_edge_difference_statistics(self): """Collect statistics on how many cells gain vs loose edges in this step""" self.number_of_cells_gaining_edges = 0 self.number_of_cells_loosing_edges = 0 for step in self.steps: self.number_of_cells_gaining_edges += step.number_of_cells_gaining_edges self.number_of_cells_loosing_edges += step.number_of_cells_loosing_edges def generate_tracking_statistics(self): """Generate statistics about number of tracked cells""" shared_global_ids = set(self.mesh_sequence[0].global_id_dictionary.keys()) for this_mesh in self.mesh_sequence[1:]: shared_global_ids.intersection_update(set(this_mesh.global_id_dictionary.keys())) self.number_of_tracked_cells = len(shared_global_ids) self.global_ids_of_tracked_cells = list(shared_global_ids) class StepDataCollector(): """A class to analyse two consecutive tracked meshes""" def __init__(self, mesh_one, mesh_two, step_number = 0): """The constructor of the StepDataCollector Parameters ---------- mesh_one : Mesh instance first mesh mesh_two : Mesh instance second_mesh step_number : int number of this step in the sequence """ self.mesh_one = mesh_one self.mesh_two = mesh_two self.step_number = step_number self.generate_tracking_statistics() self.generate_death_statistics() self.generate_centroid_statistics() self.generate_edge_difference_statistics() def generate_tracking_statistics(self): """Generate statistics about number of tracked cells""" mesh_one_global_ids = self.mesh_one.global_id_dictionary.keys() mesh_two_global_ids = self.mesh_two.global_id_dictionary.keys() shared_global_ids = set.intersection(set(mesh_one_global_ids), set(mesh_two_global_ids)) self.number_of_tracked_cells = len(shared_global_ids) self.global_ids_of_tracked_cells = list(shared_global_ids) def generate_death_statistics(self): """Collect the number of dying cells in this step """ self.number_dying_cells = 0 self.global_ids_of_dying_cells = [] for element in self.mesh_one.elements: if element.global_id not in self.mesh_two.global_id_dictionary.keys(): element_dyed = True if element.check_if_on_boundary(): element_dyed = False else: adjacent_element_ids = element.get_ids_of_adjacent_elements() for frame_id in adjacent_element_ids: adjacent_global_id = self.mesh_one.get_element_with_frame_id(frame_id).global_id if adjacent_global_id not in self.mesh_two.global_id_dictionary.keys(): element_dyed = False break if element_dyed: self.number_dying_cells +=1 self.global_ids_of_dying_cells.append(element.global_id) def generate_centroid_statistics(self): """Collect statistics on how much centroids move""" centroid_displacements = [] for element in self.mesh_one.elements: if element.global_id in self.mesh_two.global_id_dictionary.keys(): second_element_centroid = self.mesh_two.get_element_with_global_id(element.global_id).calculate_centroid() centroid_displacements.append(np.linalg.norm(second_element_centroid - element.calculate_centroid())) centroid_displacements_np = np.array(centroid_displacements) self.centroid_displacements = centroid_displacements_np centroid_displacements_rescaled = centroid_displacements_np/np.sqrt(self.mesh_one.calculate_average_element_area()) self.maximal_centroid_displacement = np.max(centroid_displacements_rescaled) self.minimal_centroid_displacement = np.min(centroid_displacements_rescaled) self.average_centroid_displacement = np.mean(centroid_displacements_rescaled) def generate_edge_difference_statistics(self): """Collect statistics on how many cells gain vs loose edges in this step""" self.number_of_cells_gaining_edges = 0 self.number_of_cells_loosing_edges = 0 for element in self.mesh_one.elements: if element.global_id in self.mesh_two.global_id_dictionary.keys(): second_element = self.mesh_two.get_element_with_global_id(element.global_id) if element.get_num_nodes() > second_element.get_num_nodes(): self.number_of_cells_gaining_edges += 1 elif element.get_num_nodes() < second_element.get_num_nodes(): self.number_of_cells_loosing_edges += 1 class PostProcessor(): """An object to postprocess a maximum common subgraph and identify rearrangements""" def __init__(self, mesh_one, mesh_two, largest_mappings ): """The constructor of the post processor Parameters ---------- mesh_one : Mesh instance the first frame represented as mesh mesh_two : Mesh instance the second frame represented as mesh largest_mappings : list of dictionaries the list of equivalent largest mappings that the subgraph finder returned """ self.largest_mappings = largest_mappings self.mapped_ids = [] """All currently present global ids""" self.mesh_one = mesh_one self.network_one = mesh_one.generate_network() self.mesh_two = mesh_two self.network_two = mesh_two.generate_network() self.preliminary_mappings = {} """A dictionary of the same style as TrackingState.id_map. Keys are mesh_one frame ids and values are mesh_two frame_ids""" def get_multiple_images( self, list_of_arguments, preliminary_mapping = {} ): """Get a list of all images of the given arguments. Parameters ---------- list_of_arguments : list of ints list containing frame_ids in mesh_one preliminary_mapping : dict mapping of cells between the two frames for which the global ids have not yet been set Returns ------- list_of_images : list of ints list containing all frame_ids in mesh_two of elements that are images of frame_ids in list_of_arguments """ list_of_images = [] for frame_id in list_of_arguments: global_id = self.mesh_one.get_element_with_frame_id(frame_id).global_id if global_id is not None: list_of_images.append(self.mesh_two.get_element_with_global_id(global_id).id_in_frame ) else: list_of_images.append(preliminary_mapping[frame_id]) return list_of_images def post_process_with_data(self): """Post process the maximum common subgraph, 'fill in the gaps', and return the full list of global ids Identifies T1 Swaps and maps the involved cells Returns ------- global_ids : list if ints list of all global ids present after post-processing """ # self.index_global_ids_from_largest_mappings() network_one = self.mesh_one.generate_network_of_unidentified_elements() self.stable_fill_in_by_adjacency() self.resolve_division_events() self.index_global_ids() return self.mapped_ids def stable_fill_in_by_adjacency(self): """Fill in untracked elements. This method sets up a registry of untracked cells and how many tracked neighbours they have. This registry is saved under self.connectivity_vector, which safes for each element in the first mesh the number of tracked neighbours in the first mesh. Based on this registry it will attempt to map cells in a way that maximises the number of preserved neighbours upon tracking. This is achieved by combining self.connectivity_vector with a boolean vector self.actual_connectivy_tested that saves whether the number of preserved neighbours under the best possible mapping has been found. The method also uses a current_best_match that has the connectivity self.maximal actual connectivity. """ self.make_connectivity_vector() extension_found_with_relaxed_condition = True while extension_found_with_relaxed_condition: mapping_has_changed = True while mapping_has_changed: old_mapping = self.preliminary_mappings.copy() self.already_inspected_cells = np.zeros_like(self.connectivity_vector, dtype = 'bool') while self.check_mapping_is_extendible(): self.maximal_actual_connectivity = 0 self.current_best_match = None self.actual_connectivity_tested = np.zeros_like( self.connectivity_vector, dtype = 'bool' ) while ( self.get_maximal_connectivity() > self.maximal_actual_connectivity and self.get_maximal_connectivity() > 1 ): next_frame_id = self.pick_next_cell() mapping_candidate, actual_connectivity = self.alternative_find_safe_mapping_candidate_for_single_cell( next_frame_id ) element_index = self.mesh_one.frame_id_dictionary[next_frame_id] self.actual_connectivity_tested[element_index] = True if mapping_candidate is not None: if actual_connectivity > self.maximal_actual_connectivity: self.maximal_actual_connectivity = actual_connectivity self.current_best_match = ( next_frame_id, mapping_candidate ) else: self.already_inspected_cells[element_index] = True if self.current_best_match is not None: self.extend_preliminary_mapping( self.current_best_match[0], self.current_best_match[1] ) if self.preliminary_mappings == old_mapping: mapping_has_changed = False else: mapping_has_changed = True self.already_inspected_cells = np.zeros_like(self.connectivity_vector, dtype = 'bool') self.maximal_actual_connectivity = 0 self.current_best_match = None self.actual_connectivity_tested = np.zeros_like( self.connectivity_vector, dtype = 'bool' ) while ( self.get_maximal_connectivity() > self.maximal_actual_connectivity and self.get_maximal_connectivity() > 1 ): next_frame_id = self.pick_next_cell() mapping_candidate, actual_connectivity = self.alternative_find_safe_mapping_candidate_for_single_cell( next_frame_id, relaxed_condition = True ) element_index = self.mesh_one.frame_id_dictionary[next_frame_id] self.actual_connectivity_tested[element_index] = True if mapping_candidate is not None: if actual_connectivity >= 2: self.maximal_actual_connectivity = actual_connectivity self.current_best_match = ( next_frame_id, mapping_candidate ) else: self.already_inspected_cells[element_index] = True if self.current_best_match is not None: self.extend_preliminary_mapping( self.current_best_match[0], self.current_best_match[1] ) extension_not_yet_found = False extension_found_with_relaxed_condition = True else: extension_found_with_relaxed_condition = False def get_maximal_connectivity(self): """Helper method for stable_fill_in_by_adjacency. It it returns the maximal connectivity to the mcs among cells that have not yet been inspected for actual connectivity, i.e. the possible number of preserved neighbours under the best-possible mapping. Returns ------- maximal_connectivity : int maximal connectivity among not yet inspected cells. """ not_yet_visited_cells = np.logical_and( self.already_inspected_cells == False, self.actual_connectivity_tested == False ) maximal_connectivity = np.max( self.connectivity_vector[not_yet_visited_cells]) return maximal_connectivity def pick_next_cell(self): """Pick a next cell for inspection for actual connectivity Returns a cell that has not yet been inspected and for which the actual connectivity has not yet been tested. Returns ------- next_cell : int frame id of the cell that is to be inspected next. """ maximal_connectivity = self.get_maximal_connectivity() assert(maximal_connectivity > 1) not_yet_visited_cells = np.logical_and( self.already_inspected_cells == False, self.actual_connectivity_tested == False ) possible_indices = np.where( np.logical_and(self.connectivity_vector == maximal_connectivity, not_yet_visited_cells ) ) next_frame_id = self.mesh_one.elements[possible_indices[0][0]].id_in_frame return next_frame_id def check_mapping_is_extendible(self): """Returns whether the current mapping is extendible. Returns True if there are any cells that have not yet been inspected and for which the connectivity is larger than one Returns ------- mapping_is_extendible : bool True if the mapping is extendible. """ mapping_is_extendible = np.sum(np.logical_and( self.already_inspected_cells == False, self.connectivity_vector > 1 )) > 0 return mapping_is_extendible def make_connectivity_vector(self): """Make a connectivity vector. The connectivity vector is used throughout the method stable_fill_in_by_adjacency. For each cell in the first mesh it saves an integer number denoting how many tracked neighbours that cell has. The connectivity vector is stored as a member variable of the post processor. """ connectivity_vector = np.zeros(self.mesh_one.get_num_elements(), dtype = 'int') for counter, element in enumerate(self.mesh_one.elements): if element.global_id is None: full_set_of_currently_mapped_neighbours = self.mesh_one.get_already_mapped_adjacent_element_ids( element.id_in_frame ) connectivity_vector[counter] = len(full_set_of_currently_mapped_neighbours) else: connectivity_vector[counter] = 0 self.connectivity_vector = connectivity_vector def extend_preliminary_mapping(self, next_frame_id, mapping_candidate): """Extend the preliminary mapping. Once stable_fill_in_by_adjacency has found a new mapping this method is called to add the mapping to the preliminary mapping. It will update the connectivity vector for any cells around the cell corresponding to next_frame_id and reset their already_inspected vector. Parameters ---------- next_frame_id : int frame id of cell in first mesh that is to be mapped mapping_candidate : int frame id of cell in second mesh that is to be mapped """ centroid_position = self.mesh_two.get_element_with_frame_id(mapping_candidate).calculate_centroid() new_centroid_position = np.array(centroid_position) new_centroid_position[1] = 326 - centroid_position[1] assert(next_frame_id not in self.preliminary_mappings) self.preliminary_mappings[next_frame_id] = mapping_candidate new_neighbour_ids = self.mesh_one.get_not_yet_mapped_shared_neighbour_ids( [next_frame_id], self.preliminary_mappings.keys() ) element_index = self.mesh_one.frame_id_dictionary[next_frame_id] self.connectivity_vector[element_index] = 0 for neighbour_id in new_neighbour_ids: element_index = self.mesh_one.frame_id_dictionary[neighbour_id] self.connectivity_vector[element_index] += 1 self.already_inspected_cells[element_index] = False def alternative_find_safe_mapping_candidate_for_single_cell(self, frame_id, relaxed_condition = False ): """This method finds a possible mapping candidate for a single cell. It is a helper method of stable_fill_in_by_adjacency. It returns a mapping candidate if the number of gained tracked neighbours is less than the number of preserved neighbours - 1. If relaxed_condition is True, it returns a mapping candidate if the number of gained tracked neighbours is less than the number of preserved tracked neighbours. Parameters ---------- frame_id : int integer of the cell for which we try to find a mapping candidate relaxed_condition : bool If True, the number of gained tracked neighbours must be less than the number of preserved tracked neighbours. If False, the number of gained tracked neighbours must be less than the number of preserved tracked neighbours - 1. Returns ------- mapping_candidate : int frame id in second mesh that indicates the mapping candidate current_neighbour_number : int number of preserved neighbours """ mapping_candidate = None element_one = self.mesh_one.get_element_with_frame_id(frame_id) if ( frame_id not in self.preliminary_mappings ): full_set_of_currently_mapped_neighbours = self.mesh_one.get_already_mapped_adjacent_element_ids( frame_id, self.preliminary_mappings.keys() ) # get mapping candidates by all shared neighbours of currently mapped neighbours images_of_already_mapped_neighbours = self.get_multiple_images( full_set_of_currently_mapped_neighbours, self.preliminary_mappings ) mapping_candidates = self.mesh_two.get_not_yet_mapped_shared_neighbour_ids( images_of_already_mapped_neighbours, self.preliminary_mappings.values() ) full_neighbour_number = len( full_set_of_currently_mapped_neighbours ) current_neighbour_number = len( full_set_of_currently_mapped_neighbours ) if len(mapping_candidates) == 0: mapping_candidates = set() old_reduced_image_sets = [images_of_already_mapped_neighbours] while ( ( len(mapping_candidates) == 0 ) and ( current_neighbour_number > 2 ) ): # They don't have a shared neighbour, see whether we can get better mapping candidates if we take one of the # mapped neighbours out to allow for rearrangement new_reduced_image_sets = [] for image_set in old_reduced_image_sets: for image in image_set: reduced_images_of_already_mapped_neighbours = [item for item in image_set if item != image ] mapping_candidates.update( self.mesh_two.get_not_yet_mapped_shared_neighbour_ids( reduced_images_of_already_mapped_neighbours, self.preliminary_mappings.values() )) new_reduced_image_sets.append(list(reduced_images_of_already_mapped_neighbours)) current_neighbour_number = current_neighbour_number - 1 old_reduced_image_sets = list(new_reduced_image_sets) filtered_mapping_candidates = [] for candidate in mapping_candidates: additional_neighbour_count = self.get_additional_neighbour_count( candidate, images_of_already_mapped_neighbours, self.preliminary_mappings.values() ) element_two = self.mesh_two.get_element_with_frame_id(candidate) if relaxed_condition: if additional_neighbour_count < full_neighbour_number: filtered_mapping_candidates.append( candidate ) else: if additional_neighbour_count < full_neighbour_number - 1: filtered_mapping_candidates.append( candidate ) if len(filtered_mapping_candidates) == 1: mapping_candidate = filtered_mapping_candidates[0] return mapping_candidate, current_neighbour_number def find_safe_mapping_candidate_for_single_cell(self, frame_id, preliminary_mapping, min_neighbour_number = 3 ): """Finds a mapping candidate for the cell with frame_id Helper to altered_fill_in_by_adjacency which only gets calles upon division resolution. Parameters ---------- frame_id : int frame_id of cell in network one for which a mapping candidate is needed preliminary_mapping : dict existing mappings from network one to network 2 min_neighbour_number : int minimal number or connections to already mapped neighbours that the new mapping needs to preserve Returns ------- mapping_candidate : int frame_id in network two that has minimal_number_of_connections to already mapped neighbours of the element in mesh_one with frame_id. Returns None if no mapping candidate could be found. """ mapping_candidate = None # loop over the nodes in the connected component_one element_one = self.mesh_one.get_element_with_frame_id(frame_id) if ( frame_id not in preliminary_mapping ): full_set_of_currently_mapped_neighbours = self.mesh_one.get_already_mapped_adjacent_element_ids( frame_id, preliminary_mapping.keys() ) if len( full_set_of_currently_mapped_neighbours ) >= min_neighbour_number: # get mapping candidates by all shared neighbours of currently mapped neighbours images_of_already_mapped_neighbours = self.get_multiple_images( full_set_of_currently_mapped_neighbours, preliminary_mapping ) mapping_candidates = self.mesh_two.get_not_yet_mapped_shared_neighbour_ids( images_of_already_mapped_neighbours, preliminary_mapping.values() ) if len(mapping_candidates) == 0: mapping_candidates = set() current_neighbour_number = len( full_set_of_currently_mapped_neighbours ) old_reduced_image_sets = [images_of_already_mapped_neighbours] while ( len(mapping_candidates) == 0 and current_neighbour_number > min_neighbour_number ): # They don't have a shared neighbour, see whether we can get better mapping candidates if we take one of the # mapped neighbours out to allow for rearrangement new_reduced_image_sets = [] for image_set in old_reduced_image_sets: for image in image_set: reduced_images_of_already_mapped_neighbours = [item for item in image_set if item != image ] assert( len( reduced_images_of_already_mapped_neighbours ) >= min_neighbour_number ) mapping_candidates.update( self.mesh_two.get_not_yet_mapped_shared_neighbour_ids( reduced_images_of_already_mapped_neighbours, preliminary_mapping.values() )) new_reduced_image_sets.append(list(reduced_images_of_already_mapped_neighbours)) current_neighbour_number = current_neighbour_number - 1 old_reduced_image_sets = list(new_reduced_image_sets) filtered_mapping_candidates = [] for candidate in mapping_candidates: additional_neighbour_count = self.get_additional_neighbour_count( candidate, images_of_already_mapped_neighbours, preliminary_mapping.values() ) element_two = self.mesh_two.get_element_with_frame_id(candidate) polygon_numbers_add_up = element_two.get_num_nodes() < ( element_one.get_num_nodes() + additional_neighbour_count + 2 ) if additional_neighbour_count < 3 and additional_neighbour_count < min_neighbour_number and polygon_numbers_add_up: filtered_mapping_candidates.append( candidate ) if len(filtered_mapping_candidates) == 1: mapping_candidate = filtered_mapping_candidates[0] return mapping_candidate def get_additional_neighbour_count(self, candidate_id, expected_neighbours, mapped_cells): """See how many additional neighbours the cell with candidate_id in mesh_two has (within all already mapped cells). Parameters ---------- candidate_id : int id_in_frame of cell in mesh_two expected_neighbours : list of ints cells in mesh two that we expect to be neighbours of candidate mapped_cells : list of ints frame ids in mesh two that have been mapped but whose global ids have not been set Returns ------- additional_neighbour_count : int number of mapped neighbours of element with candidate_id that are not in expected_neighbours """ additional_neighbour_count = 0 candidates_mapped_neighbours = self.mesh_two.get_already_mapped_adjacent_element_ids( candidate_id, mapped_cells ) for neighbour in candidates_mapped_neighbours: if neighbour not in expected_neighbours: additional_neighbour_count += 1 return additional_neighbour_count def altered_fill_in_by_adjacency(self, network_one): """Fill in unmapped cells by adjacency to existing mapping. Takes a network of unmapped cells in the first mesh, and fills in the cell-to-cell mapping between them based on adjacency with already mapped cells. This method has been replaced by stable_fill_in_by_adjacency and is now only used in the division resolution step. Parameters ---------- network_one : networkx Graph instance subgraph of the network corresponding to mesh_one """ preliminary_mappings = self.altered_get_mappings_by_adjacency(network_one) for node in preliminary_mappings: self.preliminary_mappings[node] = preliminary_mappings[node] def altered_get_mappings_by_adjacency(self, connected_component_one): """Gets a preliminary mapping based on the adjacency to already mapped nodes. Helper method for fill_in_by_adjacency and identify_division_event. Same as altered_fill_in_by_adjacency this method is now only used in the division resolution step Parameters ---------- connected_component_one : networkx Graph instance subgraph of the network corresponding to mesh_one. network of ummapped cells Returns ------- preliminary_mapping : dict keys are frame ids in mesh_one, values are frame_ids in mesh_two """ preliminary_mapping = {} self.extend_current_preliminary_mapping(connected_component_one, preliminary_mapping, minimal_number_of_neighbours=4) self.extend_current_preliminary_mapping(connected_component_one, preliminary_mapping, minimal_number_of_neighbours=3) self.extend_current_preliminary_mapping(connected_component_one, preliminary_mapping, minimal_number_of_neighbours=2) # self.extend_current_preliminary_mapping(connected_component_one, preliminary_mapping, minimal_number_of_neighbours=1) return preliminary_mapping def extend_current_preliminary_mapping(self, network_one, preliminary_mapping, minimal_number_of_neighbours=3): """This fills in any unmapped nodes in network one into preliminary mapping, ensuring that any new mapping has at least minimal_number_of_neighbours tracked neighbours. As submethod to altered_fill_in_by_adjacency this method only gets called upon division resolution. Parameters ---------- network_one : networkx.Graph instance network of unmapped frame ids in mesh one preliminary_mapping : dict int->int already known mappings from network one minimal_number_of_neighbours : int the minimum number of connections to already mapped cells that the mapping needs to preserve. """ attempted_fill_in_counter = {} for node in network_one.nodes(): attempted_fill_in_counter[node] = 0 not_all_neighbours_mapped = True while not_all_neighbours_mapped: not_all_neighbours_mapped = False for node in network_one.nodes(): if node not in preliminary_mapping and node not in self.preliminary_mappings: mapping_candidate = self.find_safe_mapping_candidate_for_single_cell( node, preliminary_mapping, minimal_number_of_neighbours ) if mapping_candidate is not None and mapping_candidate not in preliminary_mapping.values(): preliminary_mapping[node] = mapping_candidate else: # this element is still not uniquely identifiable. If all its neighbours have been mapped, then # this means that it actually does not exist in mesh 2, so we stop looking for a match. # otherwise, try again. if len(self.mesh_one.get_element_with_frame_id(node).get_ids_of_adjacent_elements() ) > 2: not_yet_mapped_neighbours = self.mesh_one.get_not_yet_mapped_shared_neighbour_ids([ node ]) no_not_yet_mapped_neighbours = 0 for neighbour_id in not_yet_mapped_neighbours: if neighbour_id not in preliminary_mapping: no_not_yet_mapped_neighbours += 1 if no_not_yet_mapped_neighbours > 0 and attempted_fill_in_counter[node] < 5: not_all_neighbours_mapped = True attempted_fill_in_counter[node] += 1 def tidy_current_mapping(self): """This function resets all global id's that only have one connection to the current maximum common subgraph, or two isolated connections, or or members of a small extension to the mcs that contains maximally three cells and has only one connection to the mcs, or connected components of less than ten members. """ isolated_vector = np.zeros( len(self.mesh_one.elements), dtype = 'bool' ) for element_counter, element in enumerate( self.mesh_one.elements ): if element.global_id is not None: # if element.global_id == 166: # import pdb; pdb.set_trace() if self.is_isolated( element ): isolated_vector[ element_counter ] = True mapped_neighbours = self.mesh_one.get_already_mapped_adjacent_element_ids( element.id_in_frame ) if len(mapped_neighbours) == 2: first_neighbour_element = self.mesh_one.get_element_with_frame_id( mapped_neighbours[0] ) second_neighbour_element = self.mesh_one.get_element_with_frame_id( mapped_neighbours[1] ) if self.is_isolated(first_neighbour_element) or self.is_isolated(second_neighbour_element): isolated_vector[element_counter] = True self.remove_global_ids_by_boolean_mask(isolated_vector) isolated_vector[:] = False # Now, let's deal with connected components network_one = self.mesh_one.generate_network_of_identified_elements() connected_components_in_network_one = list( nx.connected_component_subgraphs(network_one) ) # import pdb; pdb.set_trace() for connected_component in connected_components_in_network_one: if len(connected_component) < 10: for frame_id in connected_component: index = self.mesh_one.frame_id_dictionary[frame_id] isolated_vector[index] = True self.remove_global_ids_by_boolean_mask(isolated_vector) self.reindex_global_ids() # # apply reduced_mcs flags: for element in self.mesh_one.elements: if element.global_id in self.mapped_ids: element.is_in_reduced_mcs_next = True else: element.is_in_reduced_mcs_next = False for element in self.mesh_two.elements: if element.global_id in self.mapped_ids: element.is_in_reduced_mcs_previous = True else: element.is_in_reduced_mcs_previous = False def reindex_global_ids(self): """Reindexes the global ids such that the maximal global id corresponds to the total number of tracked cells. This method ensures a contiuous count of global ids. """ # currently, the mapped ids are not a continuous count, let's change that new_mapped_ids = [] for counter, mapped_id in enumerate(self.mapped_ids): self.mesh_one.get_element_with_global_id(mapped_id).global_id = counter self.mesh_two.get_element_with_global_id(mapped_id).global_id = counter new_mapped_ids.append(counter) # index the change self.mesh_one.index_global_ids() self.mesh_two.index_global_ids() self.mapped_ids = new_mapped_ids def remove_global_ids_by_boolean_mask(self, boolean_mask): """Remove global ids from all elements for which boolean_map is True Parameters ---------- boolean_map : nd_array, dtype = 'bool' mask for elements in the mesh_one elements vector for which we plan to remove the global ids """ for element_counter, element in enumerate( self.mesh_one.elements ): if boolean_mask[ element_counter ]: this_global_id = element.global_id self.mesh_two.get_element_with_global_id(this_global_id).global_id = None element.global_id = None del self.largest_mappings[0][element.id_in_frame] self.mapped_ids.remove(this_global_id) # index the change self.mesh_one.index_global_ids() self.mesh_two.index_global_ids() def is_isolated(self, element): """This function determines whether the element is isolated in mesh_one or not. Parameters ---------- element : mesh.Element instance a element in a mesh, has to be an element in mesh_one Returns ------- is_isolated : bool True if the element is isolated """ adjacent_elements = element.get_ids_of_adjacent_elements() already_mapped_adjacent_elements = [] for element_id in adjacent_elements: if self.mesh_one.get_element_with_frame_id(element_id).global_id is not None: already_mapped_adjacent_elements.append(element_id) if len( already_mapped_adjacent_elements ) == 1 or len(already_mapped_adjacent_elements) == 0: is_isolated = True elif len( already_mapped_adjacent_elements ) == 2: if not self.network_one.has_edge( already_mapped_adjacent_elements[0], already_mapped_adjacent_elements[1]): is_isolated = True else: is_isolated = False elif len( already_mapped_adjacent_elements ) == 3: number_edges = 0 if self.network_one.has_edge( already_mapped_adjacent_elements[0], already_mapped_adjacent_elements[1]): number_edges+=1 if self.network_one.has_edge( already_mapped_adjacent_elements[1], already_mapped_adjacent_elements[2]): number_edges+=1 if self.network_one.has_edge( already_mapped_adjacent_elements[0], already_mapped_adjacent_elements[2]): number_edges+=1 if number_edges < 2: is_isolated = True else: is_isolated = False else: is_isolated = False return is_isolated def index_global_ids(self): """add the preliminary mapping to the meshes, i.e. fill in the global ids for all mapped cells""" # import pdb; pdb.set_trace() for element_one_id in self.preliminary_mappings: current_maximal_global_id = max( self.mapped_ids ) new_global_id = current_maximal_global_id + 1 element_one = self.mesh_one.get_element_with_frame_id(element_one_id) element_one.global_id = new_global_id element_two = self.mesh_two.get_element_with_frame_id(self.preliminary_mappings[element_one_id]) element_two.global_id = new_global_id self.mapped_ids.append(new_global_id) self.mesh_one.index_global_ids() self.mesh_two.index_global_ids() self.reindex_global_ids() def index_global_ids_from_largest_mappings(self): """Index global ids using all mappings that are contained in all largest mappings""" preserved_mappings = {} for key in self.largest_mappings[0]: pair_is_in_other_mappings = True value = self.largest_mappings[0][key] for mapping in self.largest_mappings: if key not in mapping: pair_is_in_other_mappings = False break elif mapping[key] != value: pair_is_in_other_mappings = False break if pair_is_in_other_mappings: preserved_mappings[key] = value for global_id, frame_one_id in enumerate(preserved_mappings): self.mesh_one.get_element_with_frame_id(frame_one_id).global_id = global_id self.mesh_two.get_element_with_frame_id(self.largest_mappings[0][frame_one_id]).global_id = global_id # if global_id == 166: # import pdb; pdb.set_trace(); self.mapped_ids.append(global_id) self.mesh_two.index_global_ids() self.mesh_one.index_global_ids() def identify_division(self, connected_component_one, connected_component_two): """Identifies the mother and daughter cells of a division event, and adds the remaining cells to the preliminary mapping. Parameters ---------- connected_component_one : networkx Graph instance subgraph of the network corresponding to mesh_one connected_component_two : networkx Graph instance subgraph of the network corresponding to mesh_two """ mappings_based_on_adjacency = self.altered_get_mappings_by_adjacency(connected_component_one) # mappings_based_on_adjacency = self.get_mappings_by_adjacency(connected_component_one, connected_component_two) bordering_cells_mapping = self.find_bordering_cells_of_division( mappings_based_on_adjacency ) potential_mother_cells = self.mesh_one.get_not_yet_mapped_shared_neighbour_ids( bordering_cells_mapping.keys() ) mother_cell = None daughter_cells = None if len(potential_mother_cells) == 0: # In this case one of the daughter cells is triangular. # In this case it is not possible to say by adjacency only which cell is the mother cell, # Need to make geometric argument new_potential_mother_cells = bordering_cells_mapping.keys() potential_daughter_cells = bordering_cells_mapping.values() # add the triangular cell # this `+' is a list concatenation potential_daughter_cells += self.mesh_two.get_not_yet_mapped_shared_neighbour_ids( bordering_cells_mapping.values() ) mother_cell, daughter_cells = self.identify_division_event(new_potential_mother_cells, potential_daughter_cells, mappings_based_on_adjacency) connected_component_one.remove_node( mother_cell ) connected_component_two.remove_nodes_from( daughter_cells ) self.altered_fill_in_by_adjacency( connected_component_one ) elif len(potential_mother_cells) == 1: potential_mother_cell = potential_mother_cells[0] if potential_mother_cell in mappings_based_on_adjacency: del mappings_based_on_adjacency[potential_mother_cell] for frame_id in mappings_based_on_adjacency: self.preliminary_mappings[frame_id] = mappings_based_on_adjacency[frame_id] else: potential_daughter_cells = self.mesh_two.get_not_yet_mapped_shared_neighbour_ids( bordering_cells_mapping.values() ) # assert ( len(potential_daughter_cells) > 1) if len( potential_daughter_cells ) <= 1 : raise Exception("could not resolve division event") elif len(potential_daughter_cells) == 3: mother_cell, daughter_cells = self.identify_division_event(potential_mother_cells, potential_daughter_cells, mappings_based_on_adjacency) # connected_component_one.remove_node( mother_cell ) connected_component_two.remove_nodes_from( daughter_cells ) # self.altered_fill_in_by_adjacency( connected_component_one ) elif len(potential_daughter_cells) == 4 : self.altered_fill_in_by_adjacency( connected_component_one ) else: raise Exception("could not resolve division event") # if mother_cell is not None and daughter_cells is not None and daughter_cells != 12: # division_resolved = True # else: # division_resolved = False def find_bordering_cells_of_division(self, preliminary_mapping): """Find the bordering cells of a division in a preliminary mapping. Looks for cells that gain an edge in the mapping. Parameters ---------- preliminary_mapping : dict keys are frame ids in mesh_one, values are frame_ids in mesh_two. This preliminary mapping must contain the cells adjacent to the dividing cell. Returns ------- bordering_cells : dict mapping of the cells adjacent to the division """ bordering_cells = {} for cell_one in preliminary_mapping: num_edges_one = self.mesh_one.get_element_with_frame_id(cell_one).get_num_nodes() num_edges_two = self.mesh_two.get_element_with_frame_id(preliminary_mapping[cell_one]).get_num_nodes() if num_edges_two == num_edges_one + 1: bordering_cells[cell_one] = preliminary_mapping[cell_one] return bordering_cells def identify_division_event(self, potential_mother_cells, potential_daughter_cells, preliminary_mapping ): """Identify which of the potential mother cells and potential daughter cells are the actual mother and daughter cells of the division Parameters ---------- potential_mother_cells : list list of frame ids in mesh_one of potential mother cells potential_daughter cells : list list of frame ids in mesh_two of potential daughter cells preliminary_mapping : dict preliminary mapping that contains at least the two mother cells Returns ------- mother_cell : int frame_id of the mother cell in mesh_one daughter_cells : list list containing the frame ids of the two daughter cells of the division """ definite_daughter_cell_set = self.mesh_two.get_inclusive_not_yet_mapped_shared_neighbour_ids(potential_daughter_cells) # following if statement is to cover case of triangular cells if len( definite_daughter_cell_set ) == 1: definite_daughter_cell = definite_daughter_cell_set.pop() elif len( definite_daughter_cell_set ) == 4: # Only one of the provided cells will be triangular # if you reached this position in the code for frame_id in definite_daughter_cell_set: if self.mesh_two.get_element_with_frame_id(frame_id).get_num_nodes() == 3: definite_daughter_cell = frame_id break else: raise Exception("could not resolve division event") if definite_daughter_cell is None or definite_daughter_cell == 0 : raise Exception("could not resolve division event") if len(potential_daughter_cells) <= 1 : raise Exception("could not resolve division event") potential_daughter_cells.remove( definite_daughter_cell ) inverse_preliminary_mapping = { value: key for key, value in preliminary_mapping.items() } closest_centroid_distance = sys.float_info.max for frame_id in potential_daughter_cells: merged_element = self.merge_elements( self.mesh_two.get_element_with_frame_id(definite_daughter_cell), self.mesh_two.get_element_with_frame_id(frame_id) ) merged_centroid = merged_element.calculate_centroid() this_mother_cell = self.mesh_one.get_element_with_frame_id(inverse_preliminary_mapping[frame_id]) this_distance = np.linalg.norm(merged_centroid - this_mother_cell.calculate_centroid()) if this_distance < closest_centroid_distance: definite_mother_cell = this_mother_cell.id_in_frame second_definite_daughter_cell = frame_id closest_centroid_distance = this_distance return definite_mother_cell, [definite_daughter_cell, second_definite_daughter_cell] def resolve_division_events(self): """Resolve division events. This method will find all connected components of untracked cells in the second mesh. If a connected component is not at the boundary the mothod resolve_division_event_for_connected_component is called to attempt to resolve the division. """ # for frame_one_id in self.largest_mappings[0]: # self.preliminary_mappings[frame_one_id] = self.largest_mappings[0][frame_one_id] # self.preliminary_mappings = copy.copy(self.largest_mappings[0]) # first, identify any cells that are in network two but are not mapped network_two = self.mesh_two.generate_network_of_unidentified_elements(self.preliminary_mappings.values()) connected_components_in_network_two = list( nx.connected_component_subgraphs(network_two) ) for connected_component in connected_components_in_network_two: #check whether component is at mesh boundary: component_is_on_boundary = False for node in connected_component: if self.mesh_two.get_element_with_frame_id(node).check_if_on_boundary(): component_is_on_boundary = True break if not component_is_on_boundary: self.resolve_division_event_for_connected_component(connected_component) # self.reindex_global_ids() # then, get all their neighbouring cells, and all inverse images of neighbouring cells # make a connected component out of both # remove both from preliminary mappings # identify division event on both connected components def resolve_division_event_for_connected_component(self, connected_component): """This method will extend the connected component in network two by all it's first-order adjacent elements. It will then find the corresponding tracked elements to these adjacent elements in the first mesh. It will then construct a connected component of the corresponding elements in the first mesh and subsequently add any of their shared neighbours. Finally, it will remove all tracked cells in the first connected component from the preliminary mapping and pass both connected components to the method identify_division. If identify_division fails a warning is given the preliminary mapping is returned to it's oroginal state. This means that the preliminar mapping remains unaltered if division resolution fails. Parameters ---------- connected_component : list of ints list of frame ids of elements in network two that form a connected component. """ # collect_cells_for_connected_component_two adjacent_elements = [] for node in connected_component: this_element = self.mesh_two.get_element_with_frame_id(node) if not this_element.check_if_on_boundary(): adjacent_elements += this_element.get_ids_of_adjacent_elements() else: print 'element to remove is on boundary' unique_adjacent_elements = np.unique(np.array(adjacent_elements)) preliminary_adjacent_elements = list(set(unique_adjacent_elements).intersection( self.preliminary_mappings.values() )) mcs_adjacent_elements = list(set(unique_adjacent_elements).intersection( self.largest_mappings[0].values() )) # collect cells for connected_component_one inverse_preliminary_mapping = { value : key for key, value in self.preliminary_mappings.items() } inverse_largest_mapping = { value : key for key, value in self.largest_mappings[0].items() } inverse_images_of_preliminary_adjacent_elements = [ inverse_preliminary_mapping[frame_id] for frame_id in preliminary_adjacent_elements] inverse_images_of_mcs_adjacent_elements = [ inverse_largest_mapping[frame_id] for frame_id in mcs_adjacent_elements] unmapped_elements_belonging_to_connected_component_in_network_one = [] for element_id in inverse_images_of_preliminary_adjacent_elements: unmapped_elements_belonging_to_connected_component_in_network_one += self.mesh_one.get_not_yet_mapped_shared_neighbour_ids([element_id]) for element_id in inverse_images_of_mcs_adjacent_elements: unmapped_elements_belonging_to_connected_component_in_network_one += self.mesh_one.get_not_yet_mapped_shared_neighbour_ids([element_id]) unmapped_elements_belonging_to_connected_component_in_network_one = list(np.unique(np.array(unmapped_elements_belonging_to_connected_component_in_network_one))) unmapped_elements_belonging_to_connected_component_in_network_one += inverse_images_of_preliminary_adjacent_elements unmapped_elements_belonging_to_connected_component_in_network_one += inverse_images_of_mcs_adjacent_elements unmapped_elements_belonging_to_connected_component_in_network_two = [node for node in connected_component] + preliminary_adjacent_elements + mcs_adjacent_elements # remove the collected cells from the mapping old_mappings = dict() for frame_id in unmapped_elements_belonging_to_connected_component_in_network_one: if frame_id in self.preliminary_mappings: old_mappings[frame_id] = self.preliminary_mappings[frame_id] elif frame_id in self.largest_mappings[0]: old_mappings[frame_id] = self.largest_mappings[0][frame_id] global_id = self.mesh_one.get_element_with_frame_id(frame_id).global_id try: self.mesh_two.get_element_with_global_id(global_id).global_id = None self.mapped_ids.remove(global_id) except KeyError: pass self.mesh_one.get_element_with_frame_id(frame_id).global_id = None try: del( self.preliminary_mappings[frame_id] ) except KeyError: pass self.mesh_one.index_global_ids() self.mesh_two.index_global_ids() # make the connected components connected_component_one = self.network_one.subgraph( unmapped_elements_belonging_to_connected_component_in_network_one ) connected_component_two = self.network_one.subgraph( unmapped_elements_belonging_to_connected_component_in_network_two ) # pass to our connected component function try: self.identify_division(connected_component_one, connected_component_two) except: warnings.warn("could not resolve division event") for frame_id in old_mappings: self.preliminary_mappings[frame_id] = old_mappings[frame_id] def merge_elements(self, element_one, element_two): """Merge two elements into a bigger element, taking out the shared nodes. This function will leave the nodes untouched, i.e. their information about elements will not be updated. The original elements will also not be affected. Parameters ---------- element_one : Element instance first element that we would like to merge element_two : Element instance second element that we would like to merge Returns ------- merged_element : Element instance A new element over the existing nodes. Is not part of the element vectors in the nodes. """ new_element_nodes = [] for local_index, node in enumerate(element_one.nodes): if ( element_one.id_in_frame in node.get_adjacent_element_ids() and element_two.id_in_frame in node.get_adjacent_element_ids() ): next_node = element_one.nodes[ (local_index + 1)%element_one.get_num_nodes() ] if ( element_one.id_in_frame in next_node.get_adjacent_element_ids() and element_two.id_in_frame in next_node.get_adjacent_element_ids() ): new_element_nodes.append(node) one_edge_id = node.id break else: previous_node = element_one.nodes[ element_one.get_num_nodes() - 1 ] new_element_nodes.append(previous_node) one_edge_id = previous_node.id else: new_element_nodes.append(node) # we find the local index of the found node in the other cell for local_index, node in enumerate(element_two.nodes): if node.id == one_edge_id: second_element_local_index = local_index break # loop through the second element nodes reached_other_side = False while reached_other_side == False: second_element_local_index = ( second_element_local_index + 1 )%element_two.get_num_nodes() next_node = element_two.nodes[second_element_local_index] if ( element_one.id_in_frame in next_node.get_adjacent_element_ids() and element_two.id_in_frame in next_node.get_adjacent_element_ids() ): new_element_nodes.append(next_node) second_edge_id = next_node.id reached_other_side = True else: new_element_nodes.append(next_node) # we find the local index of the found node in the other cell for local_index, node in enumerate(element_one.nodes): if node.id == second_edge_id: first_element_local_index = local_index break for local_index in range( first_element_local_index + 1, element_one.get_num_nodes() ): new_element_nodes.append(element_one.nodes[local_index]) # We add the nodes to the element after instantiation, so that the element is not added to the node merged_element = mesh.Element([]) merged_element.nodes = new_element_nodes assert( merged_element.calculate_area() > 0 ) return merged_element def evaluate_tracking(first_mesh, second_mesh, ground_truth): """Evaluate the tracking. Parameters ---------- first_mesh : Mesh instance this is a mesh that has global ids in it second_mesh : Mesh instance another mesh with global ids in it ground truth : dictionary, keys and values are integers Keys are frame ids in first_mesh, values are frame ids in second_mesh Returns ------- success_boolean : bool True if less than four cells in ground_truth are not tracked, and if all tracked cells correspond to pairings in ground_truth number_tracked_cells : int Number of correctly tracked cells between first_mesh and second_mesh Warning ------- This function is not tested! """ correctly_tracked_cells = [] incorrectly_tracked_cells = [] missing_cells = [] for first_element in first_mesh.elements: # and that the mapping coincides with the ground truth for all tracked ids first_frame_id = first_element.id_in_frame if first_frame_id in ground_truth: if first_element.global_id is None: missing_cells.append(first_frame_id) else: this_global_id = first_element.global_id second_element = second_mesh.get_element_with_global_id(this_global_id) second_frame_id = second_element.id_in_frame if second_frame_id == ground_truth[first_frame_id]: correctly_tracked_cells.append(first_frame_id) else: incorrectly_tracked_cells.append(first_frame_id) success_boolean = ( len(missing_cells) < 4 and len(incorrectly_tracked_cells) == 0 ) number_tracked_cells = len(correctly_tracked_cells) number_incorrectly_tracked_cells = len(incorrectly_tracked_cells) return success_boolean, number_tracked_cells, number_incorrectly_tracked_cells def find_maximum_common_subgraph(mesh_one, mesh_two): """Find a mapping between the cell ids in both frames and assigns the global ids according to their maximum common subgraph. Writes global_id entries for all identified elements in both meshes. Parameters ---------- mesh_one : Mesh type First mesh mesh_two : Mesh type Second mesh Returns ------- mapped_ids : dict (int->int) the ids of elements that were identified in both meshes """ subgraph_finder = LocalisedSubgraphFinder(mesh_one, mesh_two) subgraph_finder.find_maximum_common_subgraph() post_processor = PostProcessor(mesh_one, mesh_two, subgraph_finder.largest_mappings) post_processor.tidy_current_mapping() post_processor.index_global_ids_from_largest_mappings() mesh_two.index_global_ids() mesh_one.index_global_ids() return post_processor.mapped_ids
Because these hereby forums have turned into theconspiracy forums, I thought I'd also do a little bit of conspiracying. Credit goes to the Deviantart community and whoever drew these pictures. I'm going to go more into depth with the next image (I hope) but here it shows Wall-E finding Groot, because we all know that Wall-E finds a plant in the movie and then Eve is extremely eager to keep it and take it back to space, but what if that plant he found was really Groot, as suggested by the image, and Eve was so eager to take it back to space because she knew this. What if Wall-E thought she wanted it to help rebuild humanity, because plants are a necessity for life, but Eve actually had other plans, this being the reason the captain actually sent her down there but just had a coverup for everyone else. See, here it shows that Groot started as the plant Wall-E found in the movie Wall-E, that explains why Eve was so eager to save it, maybe it wasn't just because it was the last plant on Earth and that plant was going to help humanity survive. We shall never know. He then grows up into a bigger plant and then becomes Groot. This next part leads me to my next theory, maybe Gepetto is actually a bad guy because he used Groot himself to make Pinocchio thus the real reason Pinocchio came alive and it really had nothing to do with the fairy. But Pinocchio had full vocab. Groot can't say anything except "I IS GROOT!" What if that was what the fairy truly did. Wall-e could only say his name. Groot could only say his name. Neither of them had a magical fairy visit them, it makes sense. In Echo Ridge.. Why you need to know mate?! You wanna stalk me?! I made this conspiracy up all by myself with the photos as inspiration. In my house. It's nice. Lemme just set the mood. You are already dead. This is hell. Bump. Come see how smart I am. Any other Groot Lovers out there?
import re import logging import subprocess import sys import json from urllib import quote_plus from myvdlclass.plugins.base import Extention from myvdlclass.lib.curl import CUrl, HTTPErrorEx logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) class MailRU(Extention): enabled=True ident="mailru" ##re_ident = re.compile("""\<meta name="twitter:player" content="(.*?)"\/\>""") re_ident = re.compile("""\<meta name=".*?" content="https:\/\/rutube\.ru\/play\/embed\/(\d+)"\/\>""") cookies_jar_file = "/tmp/myvdl-mailru-cookies.jar" default_headers = { #'Host': 'mail.ru', 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:49.0) Gecko/20100101 Firefox/49.0', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'en-US,en;q=0.5', 'Connection': 'keep-alive', 'Upgrade-Insecure-Requests': '1', 'Cache-Control': 'max-age=0', } @classmethod def get_url_re(cls): return re.compile('^http(s|):\/\/(www\.|my\.|)mail\.ru') def __init__(self, url, engine, *args, **kwargs): self.url = url self.engine = engine def find_ident(self): """ ## Get by http://zasasa.com/ru/skachat_video_s_mail.ru.php ##http://my.mail.ru/video/inbox/www.kristina/29/31.html url = "http://videoapi.my.mail.ru/videos/inbox/www.kristina/29/31.json" ##https://my.mail.ru/v/thisishorosho_tv/video/_groupvideo/769.html url = "http://videoapi.my.mail.ru/videos/v/thisishorosho_tv/_groupvideo/769.json" ##https://my.mail.ru/list/xakepx/video/199/283.html url = "http://videoapi.my.mail.ru/videos/list/xakepx/199/283.json" ##https://my.mail.ru/mail/gromow1981/video/_myvideo/1395.html url = "http://videoapi.my.mail.ru/videos/mail/gromow1981/_myvideo/1395.json" ##https://my.mail.ru/corp/afisha/video/trailers/15375.html url = "http://videoapi.my.mail.ru/videos/corp/afisha/trailers/15375.json" """ url = "http://videoapi.my.mail.ru/videos/" dt = re.findall("http(?:s|)://my.mail.ru/video/(.*)\.html$", self.url) if len(dt) > 0: return url+dt[0]+".json" dt = re.findall("http(?:s|)://my.mail.ru/(.*)\.html$", self.url) if len(dt) > 0: return url+dt[0]+".json" return None def start(self): api_url = self.find_ident() if api_url is None: print "MAIL.RU: Unsupported url!" return None params = self.curl_get_default_params() try: answ = CUrl.download(api_url, 'compressed', **params) data = json.loads(answ) #print "DATA", json.dumps(data, indent=4) except Exception as err: print "MAIL.RU: Can't load video data, may be wrong url?" return None flname = "%s" % re.sub("""[\"\,\.\'\s\t\&\;\$\*]+""", "_", data["meta"]["title"]) hq = 0 url = None for v in data["videos"]: hq_ = int(v["key"].replace("p", "")) if hq_ > hq: hq = hq_ url = v["url"] if url is None: print "MAIL.RU: No video found!" flext = re.findall("""\/\d+\.(.*?)\?""", url)[0] flname += ".%s" % flext print "MAIL.RU: DOWNLOADING:", url CUrl.download(url, 'globoff', 'compressed', print_status=True, output=flname, **params) print print "Saved as: %s" % flname def curl_get_default_params(self, **kwargs): params = { 'headers': self.default_headers, 'cookie-jar': self.cookies_jar_file, 'cookie': self.cookies_jar_file, } params.update(kwargs) return params
The is a custom shield with some key variances from the traditional look. First, 4 pearls are gold. Next, the wreath is brown instead of black paint, blood drops on swords and multicolor lamp/flame. It comes with mounting hooks and free personal engraving on the back. This is a Camo Edition, mixed with purple and gold.
# Copyright 2014 Cisco Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # @author: Padmanabhan Krishnan, Cisco Systems, Inc. # # Service Constants import dfa.server.services.constants as services_const AUTO_NWK_CREATE = True DEVICE = '' SCHED_POLICY = 'max_sched' VLAN_ID_MIN = services_const.VLAN_ID_MIN VLAN_ID_MAX = services_const.VLAN_ID_MAX MOB_DOMAIN_NAME = 'md0' HOST_PROF = 'serviceNetworkUniversalDynamicRoutingESProfile' HOST_FWD_MODE = 'proxy-gateway' PART_PROF = 'vrf-common-universal-external-dynamic-ES' EXT_PROF = 'externalNetworkUniversalDynamicRoutingESProfile' EXT_FWD_MODE = 'anycast-gateway' IN_IP_START = '100.100.2.0/24' IN_IP_END = '100.100.20.0/24' OUT_IP_START = '200.200.2.0/24' OUT_IP_END = '200.200.20.0/24' DUMMY_IP_SUBNET = '9.9.9.0/24' IN_SERVICE_SUBNET = 'FwServiceInSub' IN_SERVICE_NWK = 'FwServiceInNwk' SERV_PART_NAME = 'CTX-ext' OUT_SERVICE_SUBNET = 'FwServiceOutSub' OUT_SERVICE_NWK = 'FwServiceOutNwk' DUMMY_SERVICE_RTR = 'DUMMY_SRVC_RTR' DUMMY_SERVICE_NWK = 'DUMMY_SRVC_NWK' TENANT_EDGE_RTR = 'Cisco_TenantEdge' FW_TENANT_EDGE = 'TE' FW_CR_OP = 'CREATE' FW_DEL_OP = 'DELETE' RESULT_FW_CREATE_INIT = 'FAB_CREATE_PEND' RESULT_FW_CREATE_DONE = 'FAB_CREATE_DONE' RESULT_FW_DELETE_INIT = 'FAB_DELETE_PEND' RESULT_FW_DELETE_DONE = 'FAB_DELETE_DONE' FW_CONST = 'Firewall' INIT_STATE_STR = 'INIT' OS_IN_NETWORK_CREATE_FAIL = 'OS_IN_NETWORK_CREATE_FAIL' OS_INIT_STATE = OS_IN_NETWORK_CREATE_FAIL OS_IN_NETWORK_CREATE_SUCCESS = 'OS_IN_NETWORK_CREATE_SUCCESS' OS_OUT_NETWORK_CREATE_FAIL = 'OS_OUT_NETWORK_CREATE_FAIL' OS_OUT_NETWORK_CREATE_SUCCESS = 'OS_OUT_NETWORK_CREATE_SUCCESS' OS_DUMMY_RTR_CREATE_FAIL = 'OS_DUMMY_RTR_CREATE_FAIL' OS_DUMMY_RTR_CREATE_SUCCESS = 'OS_DUMMY_RTR_CREATE_SUCCESS' OS_CREATE_SUCCESS = OS_DUMMY_RTR_CREATE_SUCCESS DCNM_IN_NETWORK_CREATE_FAIL = 'DCNM_IN_NETWORK_CREATE_FAIL' DCNM_INIT_STATE = DCNM_IN_NETWORK_CREATE_FAIL DCNM_IN_NETWORK_CREATE_SUCCESS = 'DCNM_IN_NETWORK_CREATE_SUCCESS' DCNM_IN_PART_UPDATE_FAIL = 'DCNM_IN_PART_UPDATE_FAIL' DCNM_IN_PART_UPDATE_SUCCESS = 'DCNM_IN_PART_UPDATE_SUCCESS' DCNM_OUT_PART_CREATE_FAIL = 'DCNM_OUT_PART_CREATE_FAIL' DCNM_OUT_PART_CREATE_SUCCESS = 'DCNM_OUT_PART_CREATE_SUCCESS' DCNM_OUT_NETWORK_CREATE_FAIL = 'DCNM_OUT_NETWORK_CREATE_FAIL' DCNM_OUT_NETWORK_CREATE_SUCCESS = 'DCNM_OUT_NETWORK_CREATE_SUCCESS' DCNM_OUT_PART_UPDATE_FAIL = 'DCNM_OUT_PART_UPDATE_FAIL' DCNM_OUT_PART_UPDATE_SUCCESS = 'DCNM_OUT_PART_UPDATE_SUCCESS' DCNM_CREATE_SUCCESS = DCNM_OUT_PART_UPDATE_SUCCESS # FABRIC_PREPARE_SUCCESS = DCNM_OUT_PART_UPDATE_SUCCESS FABRIC_PREPARE_SUCCESS = 'FABRIC_PREPARE_SUCCESS' OS_IN_NETWORK_DEL_FAIL = 'OS_IN_NETWORK_DEL_FAIL' OS_IN_NETWORK_DEL_SUCCESS = 'OS_IN_NETWORK_DEL_SUCCESS' OS_OUT_NETWORK_DEL_FAIL = 'OS_OUT_NETWORK_DEL_FAIL' OS_OUT_NETWORK_DEL_SUCCESS = 'OS_OUT_NETWORK_DEL_SUCCESS' OS_DUMMY_RTR_DEL_FAIL = 'OS_DUMMY_RTR_DEL_FAIL' OS_DUMMY_RTR_DEL_SUCCESS = 'OS_DUMMY_RTR_DEL_SUCCESS' OS_DEL_SUCCESS = 'OS_DUMMY_RTR_DEL_SUCCESS' DCNM_IN_NETWORK_DEL_FAIL = 'DCNM_IN_NETWORK_DEL_FAIL' DCNM_IN_NETWORK_DEL_SUCCESS = 'DCNM_IN_NETWORK_DEL_SUCCESS' DCNM_IN_PART_UPDDEL_FAIL = 'DCNM_IN_PART_UPDDEL_FAIL' DCNM_IN_PART_UPDDEL_SUCCESS = 'DCNM_IN_PART_UPDDEL_SUCCESS' DCNM_OUT_PART_DEL_FAIL = 'DCNM_OUT_PART_DEL_FAIL' DCNM_OUT_PART_DEL_SUCCESS = 'DCNM_OUT_PART_DEL_SUCCESS' DCNM_OUT_NETWORK_DEL_FAIL = 'DCNM_OUT_NETWORK_DEL_FAIL' DCNM_OUT_NETWORK_DEL_SUCCESS = 'DCNM_OUT_NETWORK_DEL_SUCCESS' DCNM_OUT_PART_UPDDEL_FAIL = 'DCNM_OUT_PART_UPDDEL_FAIL' DCNM_OUT_PART_UPDDEL_SUCCESS = 'DCNM_OUT_PART_UPDDEL_SUCCESS' DCNM_DELETE_SUCCESS = DCNM_IN_NETWORK_DEL_SUCCESS INIT = 0 MAX_STATE = FABRIC_PREPARE_SUCCESS # 17 INIT_STATE = 100 OS_IN_NETWORK_STATE = INIT_STATE + 1 OS_OUT_NETWORK_STATE = OS_IN_NETWORK_STATE + 1 OS_DUMMY_RTR_STATE = OS_OUT_NETWORK_STATE + 1 OS_COMPL_STATE = OS_DUMMY_RTR_STATE DCNM_IN_NETWORK_STATE = OS_DUMMY_RTR_STATE + 1 DCNM_IN_PART_UPDATE_STATE = DCNM_IN_NETWORK_STATE + 1 DCNM_OUT_PART_STATE = DCNM_IN_PART_UPDATE_STATE + 1 DCNM_OUT_NETWORK_STATE = DCNM_OUT_PART_STATE + 1 DCNM_OUT_PART_UPDATE_STATE = DCNM_OUT_NETWORK_STATE + 1 FABRIC_PREPARE_DONE_STATE = DCNM_OUT_PART_UPDATE_STATE + 1 # The below is for debug display fw_state_fn_dict = {} fw_state_fn_dict[INIT_STATE] = 'INIT_STATE' fw_state_fn_dict[OS_IN_NETWORK_STATE] = 'OS_IN_NETWORK_CREATE_STATE' fw_state_fn_dict[OS_OUT_NETWORK_STATE] = 'OS_OUT_NETWORK_CREATE_STATE' fw_state_fn_dict[OS_DUMMY_RTR_STATE] = 'OS_DUMMY_RTR_CREATE_STATE' fw_state_fn_dict[DCNM_IN_NETWORK_STATE] = 'DCNM_IN_NETWORK_CREATE_STATE' fw_state_fn_dict[DCNM_IN_PART_UPDATE_STATE] = 'DCNM_IN_PART_UPDATE_STATE' fw_state_fn_dict[DCNM_OUT_PART_STATE] = 'DCNM_OUT_PART_CREATE_STATE' fw_state_fn_dict[DCNM_OUT_NETWORK_STATE] = 'DCNM_OUT_NETWORK_CREATE_STATE' fw_state_fn_dict[DCNM_OUT_PART_UPDATE_STATE] = 'DCNM_OUT_PART_UPDATE_STATE' fw_state_fn_dict[FABRIC_PREPARE_DONE_STATE] = 'FABRIC_PREPARE_DONE_STATE' fw_state_fn_del_dict = {} fw_state_fn_del_dict[INIT_STATE] = 'INIT_STATE' fw_state_fn_del_dict[OS_IN_NETWORK_STATE] = 'OS_IN_NETWORK_DELETE_STATE' fw_state_fn_del_dict[OS_OUT_NETWORK_STATE] = 'OS_OUT_NETWORK_DELETE_STATE' fw_state_fn_del_dict[OS_DUMMY_RTR_STATE] = 'OS_DUMMY_RTR_DELETE_STATE' fw_state_fn_del_dict[DCNM_IN_NETWORK_STATE] = 'DCNM_IN_NETWORK_DELETE_STATE' fw_state_fn_del_dict[DCNM_IN_PART_UPDATE_STATE] = 'DCNM_IN_PART_UPDDEL_STATE' fw_state_fn_del_dict[DCNM_OUT_PART_STATE] = 'DCNM_OUT_PART_DELETE_STATE' fw_state_fn_del_dict[DCNM_OUT_NETWORK_STATE] = 'DCNM_OUT_NETWORK_DELETE_STATE' fw_state_fn_del_dict[DCNM_OUT_PART_UPDATE_STATE] = 'DCNM_OUT_PART_UPDDEL_STATE' fw_state_fn_del_dict[FABRIC_PREPARE_DONE_STATE] = 'FABRIC_PREPARE_DONE_STATE'
Plant geraniums close to tomatoes. Many bugs hate the chemicals in geraniums and will keep away from them, so for those who plant geraniums around your tomato patch and sparsely in your garden, the caterpillars and worms can have a troublesome time stepping into the tomato crops. This is especially true when you have your backyard enclosed. Keep away from chemical compounds in your backyard. Hold the toxins out of the meals and the water provide. Among the finest components about organic gardening is eliminating chemical compounds out of your meals provide. There are a lot of options to chemical fertilizers and pesticides. Nearly any drawback could be cured with the right management. An awesome tip for getting probably the most out of your organic backyard is to use soaker hoses. These hoses may be left on for a number of hours on low strain, which will save you time from having to stand with a regular hose or filling up a watering can. This offers you time to work in another space of your garden. So, whether you are a new or skilled gardener, you’ve now bought some ideas you could implement in your garden. Few things in life are extra satisfying than working the soil; and it’s even more satisfying when you are able to do it nature’s means. Rotate your crops to prevent permanent populations of pests in your garden. As with any ecosystem, pests need a sure period of time to nest and construct up a proper population inside a garden. These pests are specially fitted to one environment and one meals source. By switching their meals source you’ll be able to basically hold your pest population down just because they are unable to adapt to the new kind of plant. Apply equal parts of dried plant materials and inexperienced into your compost pile. Examples of green plant materials are spent flowers, fruit and vegetable waste, grass clippings, weeds, and leaves. Sawdust, straw, cardboard, paper and wood pulp are all examples of dried plant material. Your compost pile ought to by no means comprise meat, ashes or charcoal. As you possibly can see from the above listing of ideas, natural gardening can be very helpful in getting the perfect and freshest produce doable out of your plants. After following the following tips, you will now not be new to the world of organic gardening, however you will turn out to be an organic gardening skilled. In your organic garden, attempt utilizing floating row covers to prevent moths from laying eggs on your vegetation. Floating row covers, that are constituted of lightweight material that has been specially designed to allow light and water to penetrate it, can be used as an effective cowl for your crops to stop moths from laying eggs. This helps to protect your crops from caterpillar harm later within the growing season. To keep rodents, deer, and different pesky animals away out of your natural garden, use natural repellents. A spray made out of hot peppers can be effective. In any other case strive a spray containing urine or rotten eggs. Regardless of what you employ, apply the sprays liberally and reapply commonly especially after a rain bathe. Making rich, natural compost in your backyard doesn’t take special tools. All you actually need to do is dump your kitchen scraps, garden trimmings and leaves in a pile, and let nature take its course. It might take a bit longer, however in the end you will have a wealthy, healthy compost. Keep in mind to disinfect any tools that you’ve got utilized in contaminated soil. If you wish to grow greens, but shouldn’t have the room, think about planting vegetables that grow on a vine.
#mini-AngeCryption #Ange Albertini 2014, BSD Licence - with the help of Jean-Philippe Aumasson import struct import sys import binascii source_file, target_file, result_file, key = "logo11w.png", "duckduckgo.png", "angecrypted.png", "AngeCryptionKey!" from Crypto.Cipher import AES BS = 16 pad = lambda s: s if (len(s) % 16) == 0 else s + (16 - len(s) % 16) * "\0" with open(source_file, "rb") as f: s = pad(f.read()) with open(target_file, "rb") as f: t = pad(f.read()) p = s[:BS] # our first plaintext block ecb_dec = AES.new(key, AES.MODE_ECB) assert BS >= 16 size = len(s) - BS # our dummy chunk type # 4 letters, first letter should be lowercase to be ignored chunktype = 'rmll' # PNG signature, chunk size, our dummy chunk type c = PNGSIG = '\x89PNG\r\n\x1a\n' + struct.pack(">I",size) + chunktype c = ecb_dec.decrypt(c) IV = "".join([chr(ord(c[i]) ^ ord(p[i])) for i in range(BS)]) cbc_enc = AES.new(key, AES.MODE_CBC, IV) result = cbc_enc.encrypt(s) #write the CRC of the remaining of s at the end of our dummy block result = result + struct.pack(">I", binascii.crc32(result[12:]) % 0x100000000) #and append the actual data of t, skipping the sig result = result + t[8:] #we have our result, key and IV #generate the result file cbc_dec = AES.new(key, AES.MODE_CBC, IV) with open(result_file, "wb") as f: f.write(cbc_dec.decrypt(pad(result))) print " ".join("%02X" % ord(i) for i in IV) #generate the script with open("crypt.py", "wb") as f: f.write("""from Crypto.Cipher import %(AES)s AES = %(AES)s.new(%(key)s, %(AES)s.MODE_CBC, %(IV)s) with open(%(source)s, "rb") as f: d = f.read() d = AES.encrypt(d) with open("encrypted.png", "wb") as f: f.write(d)""" % { 'AES': AES.__name__.split(".")[-1], 'key':`key`, 'IV':`IV`, 'source':`result_file`, 'target':`target_file`} )
Thank you for joining us in a shopping event that will benefit Chicago City Day School and women entrepreneurs. personal accessories, jewelry, home decor and kitchen. Highlighted here are the items we thought you'd like best, but there's more! Enjoy browsing the entire Tuttle View site. All purchases made will support Chicago City Day School. FREE SHIPPING on all purchases made through this event. Passionate about coloring the world - even outside the lines. These giant posters are the ultimate fun project for anyone who likes to color. Spread them on the floor or hang them on the wall and invite everyone to join in. These posters also makes a mega-awesome party project. They are a great way to introduce art and encourage collaboration. They are also just a good reason to shut off the TV, get creative and go crazy with color. Grace was inspired by her daughter who would repeatedly waste paper by creating stories in her notebook, then crumbling up the pages, only to start a new story all over again. Grace invented the original, reusable Chalk-A-Doodle™ Book. 4 ButterStix included with each book. Fast easy decorating - peel 'n stick vinyl decals! Mark Edge began crafting jewelry in the seventh grade. Today, his distinguished jewelry is handcrafted with a bevy of materials that are deftly transformed into timeless, yet distinctive one-of-a-kind pieces. The roots of the BEATRIZ BALL Collection began in 1991, when I began working hand-in-hand with skilled metalware artisans in the outskirts of Mexico City. Made of eco-friendly, 100% recycled aluminum. distinction. Beautiful products you can truly feel good about using and giving. Celebrate unique and individual style with our trend-right decorative accents. Concrete lighting and planters ideal for outdoor use. High-quality, reversible bags and totes.
#!/usr/bin/env python3.5 # -*- coding: utf-8 -*- """ This is the note module, taking care of all note related functions. Note data is found in data/notes.json. """ from telegram.ext import CommandHandler from modules import helper, strings def save_note(bot, update, args): notes = helper.loadjson(loc_notesjson) chat_id = str(update.message.chat_id) try: notes[chat_id] except KeyError: notes[chat_id] = {} if len(args) >= 2: # add note to note repo notename = args[0] del args[0] note_data = " ".join(args) notes[chat_id][notename] = note_data print("Added new note \"" + notename + "\" with content \"" + note_data + "\".") else: update.message.reply_text(strings.errBadFormat) helper.dumpjson(loc_notesjson, notes) def get_note(bot, update, args): notes = helper.loadjson(loc_notesjson) chat_id = str(update.message.chat_id) try: notes[chat_id] except KeyError: notes[chat_id] = {} if len(args) == 1: msg = "" try: msg = notes[chat_id][args[0]] except KeyError: msg = errNoNoteFound + args[0] update.message.reply_text(msg) else: update.message.reply_text(strings.errBadFormat) def all_notes(bot, update, args): notes = helper.loadjson(loc_notesjson) chat_id = str(update.message.chat_id) try: notes[chat_id] except KeyError: notes[chat_id] = {} msg = "No notes in this chat." if len(notes[chat_id]) > 0: msg = msgNotesForChat for note in notes[chat_id]: msg += "\n" + note update.message.reply_text(msg) save_handler = CommandHandler("save", save_note, pass_args=True) get_handler = CommandHandler("get", get_note, pass_args=True) note_handler = CommandHandler("note", all_notes, pass_args=True) loc_notesjson = "./data/notes.json" msgNotesForChat = "These are the notes i have saved for this chat: \n" errNoNoteFound = "No note found by the name of "
Accessories are positioned at a particular place on a player model, since the anime girl is a custom model it is likely bigger therefore the cat ears are inside the model and likely not visible when equipped. Unfortunately we cannot fix this as if we moved the position to work with the anime model then on the default player models it will be messed up. I am curious to know how our members browse the forums. Do you find new posts by using the Discover Page? Do you browse individual forum areas like Introductions, General Discussion, Share? Some members may do all of these but what do you find yourself doing more frequently? Yes, this is a known bug I was debugging it yesterday it seems like this unequip bug is only occurring for Gold VIPs. I have added Ban as a punishment to the Trolling/Harassing rules as it does make sense that if after following the normal mutes/gags if it continues it definitely should lead to a ban. Repeating maps bug/changelevel bug should now be fixed on all servers 🤞 Voting for the next map will display once 5 minutes is left or 10 rounds have passed. Use timeleft command. jb_newyork_defy was removed from the pool due to many players experiencing texture errors. I will close this post now since your issue was resolved on discord. When was the last time prior to today did you connect to the server? I believe the issue should now be resolved on TTT so please join the server and click server website again. If the csgo blog page opened then try joining a different server such as mini games http://steam:/connect/mg.DEFYclan.com Once on mini games try opening the servers website if it leads you to our rules page then you should be good to go. Damage is now enabled on freeday. Rebels can no longer request a heal.
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, TYPE_CHECKING from azure.core.configuration import Configuration from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy from .._version import VERSION if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential class ResourceManagementClientConfiguration(Configuration): """Configuration for ResourceManagementClient. Note that all parameters used to create this instance are saved as instance attributes. :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The ID of the target subscription. :type subscription_id: str """ def __init__( self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any ) -> None: if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: raise ValueError("Parameter 'subscription_id' must not be None.") super(ResourceManagementClientConfiguration, self).__init__(**kwargs) self.credential = credential self.subscription_id = subscription_id self.api_version = "2019-07-01" self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) kwargs.setdefault('sdk_moniker', 'mgmt-resource/{}'.format(VERSION)) self._configure(**kwargs) def _configure( self, **kwargs: Any ) -> None: self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) self.authentication_policy = kwargs.get('authentication_policy') if self.credential and not self.authentication_policy: self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
Why not treat your Valentine to a yummy coffee beverage? Below are a few recipes from Susan Zimmers I love coffee! book. If you love coffee, check out our discussion forum topic I love coffee more than" http://www.bunnathome.com/community/coffee-coffee-coffee/i-love-my-coffee-more and tell us what youd give up to keep enjoying your coffee. Mix the syrup, coffee and milk together in a tempered glass coffee mug. Top with whipped cream and a cherry. Shake the coffee, syrups, and ice in a martini shaker. Strain into a chilled stylish martini glass. Top with dollops of sweetened whip cream. Garnish with cocoa powder and a few fresh raspberries. Serve with a spoon! Combine the coffee and the syrups in a 12-ounce latte mug or tall tempered glass. Fill the rest of the way with the steamed milk. Stir, and serve immediately. Enjoy and sip slowly ( like a turtle). You can learn more about I love coffee! by Susan Zimmer at http://www.ilovecoffeebook.com/.
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from google.appengine.ext import ndb from distutils import log from gaecookie.decorator import no_csrf from gaepermission.decorator import login_not_required from tekton.gae.middleware.json_middleware import JsonUnsecureResponse, JsonResponse from tema.tema_model import TemaForm, Tema __author__ = 'Bea' @login_not_required @no_csrf def index(): query = Tema.query_ordenada_por_titulo() temas = query.fetch() for tema in temas: key = tema.key key_id = key.id() form = TemaForm() temas = [form.fill_with_model(t) for t in temas] return JsonResponse(temas) @login_not_required @no_csrf def salvar(_resp, **propriedades): form = TemaForm(**propriedades) erros = form.validate() if erros: _resp.set_status(400) return JsonUnsecureResponse(erros) tema = form.fill_model() tema.put() dct = form.fill_with_model(tema) log.info(dct) return JsonUnsecureResponse(dct) @login_not_required @no_csrf def editar(_resp, **propriedades): form = TemaForm(**propriedades) erros = form.validate() if erros: _resp.set_status(400) return JsonUnsecureResponse(erros) tema = ndb.Key(Tema, int(propriedades['tema_id'])).get() tema.titulo = propriedades['titulo'] tema.descricao = propriedades['descricao'] tema.put() dct = form.fill_with_model(tema) log.info(dct) return JsonUnsecureResponse(dct) @login_not_required @no_csrf def deletar(tema_id): key = ndb.Key(Tema, int(tema_id)) key.delete()
​As is the case in just about every industry, competition among top tire manufacturers is constant and spirited. Each manufacturer vies for recognition and top placements across the broad spectrum of tire categories. Historically, this ongoing competition has meant that the subjective and objective rankings in each tire category are pretty volatile. Just as one tire establishes itself as the go-to, another will edge its performance and (sometimes also) undercut its price. And while there have been perennial top tires in certain categories, shifts in rankings are far more the norm. What is definitely uncommon is for one tire to achieve a firm and almost inarguable position as best in class for years at a time, and over multiple generations of tires. But that's exactly what Michelin has done in the street performance tire category. Two recent performance tire tests have reemphasized Michelin's hold on the top of the street performance tire market, and more specifically, Michelin Pilot Super Sport supremacy. As we suggested some time ago in our own Michelin Pilot Super Sport review, the "PSS" is a generational sort of performance tire. At the time of the review, we predicted that it would take a monumental effort from the competition to surpass its breadth of capabilities. Tire Rack recently pitted the Pilot Super Sport against three new competitors in a comparison test -- the Kumho Ecsta PS91, Pirelli P Zero (current "PZ4" generation), and Goodyear Eagle F1 Asymmetric 3. Each of these performance tires came to market long after the Pilot Super Sport, and therefore had the advantage of the PSS as a developmental reference point. Nevertheless, the Michelin emerged the clear favorite overall. See the full Tire Rack comparison test here, and check out the video below. Across the pond, UK publication Auto Express tested the Michelin Pilot Sport 4 against eight worthy competitors. (The Pilot Sport 4 is said to be the successor to the PSS.) Here again, the Michelin performance tire achieved the top placement with the P Zero just behind. Pirelli has made serious performance and qualitative gains with their latest P Zero generation. Prior to the introduction of this latest "PZ4" generation, the P Zero was starting to be comprehensively overshadowed by more modern options. The latest P Zero iteration is the real deal, and as an original equipment tire for a number of performance vehicles, definitely value-added to those cars. The Goodyear Eagle F1 Asymmetric 3 displayed commendable performance and qualities in both tests. This tire should give Goodyear a new foothold and some credibility in the performance tire category moving forward. The Continental ContiSportContact 5 was featured in the Auto Express test, and it is the Continental ExtremeContact DW that continues to be overlooked in our view. The ExtremeContact DW remains our preferred street performance tire from Continental despite its maximum performance limitations. For more information see our Continental ExtremeContact DW review. Overall, tire matters have never been better for performance driving enthusiasts!
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import django.utils.timezone def forward(apps, schema_editor): Surety_Scheme = apps.get_model('geneaprove', 'Surety_Scheme') SSPart = apps.get_model('geneaprove', 'Surety_Scheme_Part') PPart = apps.get_model('geneaprove', 'Place_Part_Type') RType = apps.get_model('geneaprove', 'Repository_Type') EType = apps.get_model('geneaprove', 'Event_Type') ETRole = apps.get_model('geneaprove', 'Event_Type_Role') CPT = apps.get_model('geneaprove', 'Characteristic_Part_Type') CIT = apps.get_model('geneaprove', 'Citation_Part_Type') GT = apps.get_model('geneaprove', 'Group_Type') db_alias = schema_editor.connection.alias s = Surety_Scheme( description="This is the default surety scheme, based on five levels of certainty", name="Default scheme") s.save() SSPart.objects.using(db_alias).bulk_create([ SSPart(name="very high", scheme=s, sequence_number=5), SSPart(name="high", scheme=s, sequence_number=4), SSPart(name="normal", scheme=s, sequence_number=3), SSPart(name="low", scheme=s, sequence_number=2), SSPart(name="very low", scheme=s, sequence_number=1)]) PPart.objects.using(db_alias).bulk_create([ PPart(gedcom="ADR1", name="address"), PPart(gedcom="ADR2", name="address2"), PPart(gedcom="CITY", name="city"), PPart(gedcom="CTRY", name="country"), PPart(gedcom="", name="county"), PPart(gedcom="MAP", name="GPS coordinates"), PPart(gedcom="", name="monument"), PPart(gedcom="", name="province"), PPart(gedcom="STAE", name="state"), PPart(gedcom="POST", name="zipcode"), PPart(gedcom="WWW", name="website"), PPart(gedcom="EMAIL", name="email"), PPart(gedcom="FAX", name="fax"), PPart(gedcom="PHON", name="phone"), PPart(gedcom="WEB", name="website"), PPart(gedcom="NOTE", name="note"), PPart(gedcom="FORM", name="place hierarchy"), ]) CIT.objects.using(db_alias).bulk_create([ CIT(gedcom='TITL', name='title'), CIT(gedcom='CHAN', name='last change'), CIT(gedcom='DATE', name='date'), CIT(gedcom='PAGE', name='page'), CIT(gedcom='QUAY', name='quality'), CIT(gedcom='TEXT', name='text'), CIT(gedcom='AUTH', name='author'), CIT(gedcom='PUBL', name='publisher')]) RType.objects.using(db_alias).bulk_create([ RType(description="", name="album"), RType(description="", name="archive"), RType(description="", name="bookstore"), RType(description="", name="cemetery"), RType(description="", name="church"), RType(description="", name="collection"), RType(description="", name="library"), RType(description="", name="web site")]) EType.objects.using(db_alias).bulk_create([ EType(gedcom="", name="acquittal"), EType(gedcom="ADOP", name="adoption"), EType(gedcom="CHRA", name="adult christening"), EType(gedcom="ANUL", name="annulment"), EType(gedcom="", name="arrest"), EType(gedcom="BAPM", name="baptism"), EType(gedcom="BARM", name="bar mitzvah"), EType(gedcom="BASM", name="bas mitzvah"), EType(gedcom="BIRT", name="birth"), EType(gedcom="BLES", name="blessing"), EType(gedcom="BURI", name="burial"), EType(gedcom="CENS", name="census"), EType(gedcom="CHR", name="christening"), EType(gedcom="", name="civil union"), EType(gedcom="CONF", name="confirmation"), EType(gedcom="", name="conviction"), EType(gedcom="CREM", name="cremation"), EType(gedcom="DEAT", name="death"), EType(gedcom="DIV", name="divorce"), EType(gedcom="DIVF", name="divorce filed"), EType(gedcom="EMIG", name="emigration"), EType(gedcom="ENGA", name="engagement"), EType(gedcom="FCOM", name="first communion"), EType(gedcom="GRAD", name="graduation"), EType(gedcom="IMMI", name="immigration"), EType(gedcom="", name="indictement"), EType(gedcom="MARB", name="marriage bans"), EType(gedcom="MARR", name="marriage"), EType(gedcom="MARC", name="marriage contract"), EType(gedcom="MARL", name="marriage license"), EType(gedcom="MARS", name="marriage settlement"), EType(gedcom="_MIL", name="military service"), EType(gedcom="EDUC", name="education"), EType(gedcom="_DEG", name="diploma"), EType(gedcom="NATU", name="naturalization"), EType(gedcom="ORDN", name="ordination"), EType(gedcom="EVEN", name="other event"), EType(gedcom="PROB", name="probate"), EType(gedcom="", name="religious conversion"), EType(gedcom="RESI", name="residence"), EType(gedcom="RETI", name="retirement"), EType(gedcom="", name="voyage"), EType(gedcom="WILL", name="will")]) birth = EType.objects.get(gedcom="BIRT") adoption = EType.objects.get(gedcom="ADOP") ETRole.objects.using(db_alias).bulk_create([ ETRole(name="principal", type=None), ETRole(name="father", type=birth), ETRole(name="mother", type=birth), ETRole(name="adopting", type=adoption), ETRole(name="not adopting", type=adoption), ]) CPT.objects.using(db_alias).bulk_create([ CPT(gedcom="", is_name_part=False, name="address"), CPT(gedcom="NOTE", is_name_part=False, name="note"), CPT(gedcom="FACT", is_name_part=False, name="other"), CPT(gedcom="_IMG", is_name_part=False, name="image"), CPT(gedcom="OCCU", is_name_part=False, name="occupation"), CPT(gedcom="", is_name_part=False, name="AFN"), CPT(gedcom="", is_name_part=False, name="cause of death"), CPT(gedcom="CAST", is_name_part=False, name="cast name"), CPT(gedcom="PROP", is_name_part=False, name="property (real-estate,...)"), CPT(gedcom="", is_name_part=False, name="email"), CPT(gedcom="", is_name_part=False, name="ethnicity"), CPT(gedcom="", is_name_part=False, name="language"), CPT(gedcom="", is_name_part=False, name="literacy"), CPT(gedcom="", is_name_part=False, name="living"), CPT(gedcom="", is_name_part=False, name="marital status"), CPT(gedcom="", is_name_part=False, name="medical condition"), CPT(gedcom="", is_name_part=False, name="nationality"), CPT(gedcom="NCHI", is_name_part=False, name="number of children"), CPT(gedcom="NMR", is_name_part=False, name="number of marriages"), CPT(gedcom="", is_name_part=False, name="patronymic"), CPT(gedcom="", is_name_part=False, name="personality"), CPT(gedcom="DSCR", is_name_part=False, name="physical description"), CPT(gedcom="RELI", is_name_part=False, name="religion"), CPT(gedcom="IDNO", is_name_part=False, name="national identification number"), CPT(gedcom="NATI", is_name_part=False, name="national or tribe origin"), CPT(gedcom="RFN", is_name_part=False, name="record file number"), CPT(gedcom="AFN", is_name_part=False, name="ancestral file number"), CPT(gedcom="RIN", is_name_part=False, name="RIN"), CPT(gedcom="SEX", is_name_part=False, name="sex"), CPT(gedcom="TYPE", is_name_part=False, name="type"), CPT(gedcom="SSN", is_name_part=False, name="social security number"), CPT(gedcom="", is_name_part=False, name="telephone"), CPT(gedcom="TITL", is_name_part=False, name="title"), CPT(gedcom="REFN", is_name_part=False, name="reference number"), CPT(gedcom="", is_name_part=True, name="dit name"), CPT(gedcom="", is_name_part=True, name="farm name"), CPT(gedcom="", is_name_part=True, name="matronymic name"), CPT(gedcom="", is_name_part=True, name="mononame"), CPT(gedcom="SURN", is_name_part=True, name="surname"), CPT(gedcom="GIVN", is_name_part=True, name="given name"), CPT(gedcom="_MIDL", is_name_part=True, name="middle name"), CPT(gedcom="NPFX", is_name_part=True, name="name prefix"), CPT(gedcom="NICK", is_name_part=True, name="nickname"), CPT(gedcom="SPFX", is_name_part=True, name="surname prefix"), CPT(gedcom="NSFX", is_name_part=True, name="name suffix"), CPT(gedcom="", is_name_part=True, name="religious name")]) GT.objects.using(db_alias).bulk_create([ GT(gedcom="", name="association"), GT(gedcom="", name="caste"), GT(gedcom="", name="children of union"), GT(gedcom="", name="friends"), GT(gedcom="", name="neighbors"), GT(gedcom="", name="passenger list"), GT(gedcom="", name="passengers"), GT(gedcom="", name="same person")]) class Migration(migrations.Migration): dependencies = [ ('geneaprove', '0002_auto_20180314_0957') ] operations = [ migrations.RunPython(forward) ]
The Targus 15.6” Classic Slim Briefcase is a thin, lightweight case equipped to store all of your essentials on the go. Ideal for the casual business executive, this functional case features a classic look with red accents. The spacious front pocket includes smartly organized compartments to store your smartphone, pens, business cards, adapters, cords, and other accessories. Constructed of durable polyester material, the case includes metal, contoured zipper pulls, and comfortable padded handles and shoulder strap. The large back slip pocket is great for storing files and documents while the included trolley strap makes it easy to stack this case onto rolling luggage. The Targus 16” Classic Topload is a thin, lightweight case equipped to store all of your essentials on-the-go. The padded laptop compartment is designed to accommodate laptops with up to 16” screens. Ideal for the casual business executive, this lightweight and functional case features a classic look with red accents. The spacious front pocket includes smartly-organized compartments to store your mobile phone, pens, business cards, adapters, cords and other midsized accessory items. The large back slip pocket is good for storing files and documents while the included trolley strap makes it easy to stack this case onto rolling luggage. Constructed of durable polyester material, the case includes metal, contoured zipper pulls and comfortable, padded handles and shoulder strap. This case also includes a convenient trolley strap to stack case on rolling luggage for easy travel. Slim design for the style-conscious minimalist who likes to travel light with only the essentials. Size: 16 3/4W" x 12 3/4H" x 3 1/4D" Carton/Dimensions: 18L" x 12W" x 14H"
########################################################### # # Copyright (c) 2005, Southpaw Technology # All Rights Reserved # # PROPRIETARY INFORMATION. This software is proprietary to # Southpaw Technology, and is not to be reproduced, transmitted, # or disclosed in any way without written permission. # # # __all__ = ["FileException", "File", "FileAccess", "IconCreator", "FileGroup", "FileRange"] from pyasm.common import Common, Xml, TacticException, Environment, System, Config from pyasm.search import * from project import Project from subprocess import Popen, PIPE import sys, os, string, re, stat, glob try: #import Image from PIL import Image # Test to see if imaging actually works import _imaging HAS_PIL = True except: HAS_PIL = False try: import Image # Test to see if imaging actually works import _imaging HAS_PIL = True except: HAS_PIL = False # check if imagemagick is installed, and find exe if possible convert_exe = '' HAS_IMAGE_MAGICK = False if os.name == "nt": # prefer direct exe to not confuse with other convert.exe present on nt systems convert_exe_list = glob.glob('C:\\Program Files\\ImageMagick*') for exe in convert_exe_list: try: convert_process = Popen(['%s\\convert.exe'%exe,'-version'], stdout=PIPE, stderr=PIPE) convert_return,convert_err = convert_process.communicate() if 'ImageMagick' in convert_return: convert_exe = '%s\\convert.exe'%exe HAS_IMAGE_MAGICK = True except: print "Running %s failed" %exe if not convert_exe_list: # IM might not be in Program Files but may still be in PATH try: convert_process = Popen(['convert','-version'], stdout=PIPE, stderr=PIPE) convert_return,convert_err = convert_process.communicate() if 'ImageMagick' in convert_return: convert_exe = 'convert' HAS_IMAGE_MAGICK = True except: pass else: # in other systems (e.g. unix) 'convert' is expected to be in PATH try: convert_process = Popen(['convert','-version'], stdout=PIPE, stderr=PIPE) convert_return,convert_err = convert_process.communicate() if 'ImageMagick' in convert_return: convert_exe = 'convert' HAS_IMAGE_MAGICK = True except: pass if Common.which("ffprobe"): HAS_FFMPEG = True else: HAS_FFMPEG = False import subprocess class FileException(TacticException): pass class File(SObject): NORMAL_EXT = ['max','ma','xls' ,'xlsx', 'doc', 'docx','txt', 'rtf', 'odt','fla','psd', 'xsi', 'scn', 'hip', 'xml','eani','pdf', 'fbx', 'gz', 'zip', 'rar', 'ini', 'db', 'py', 'pyd', 'spt' ] VIDEO_EXT = ['mov','wmv','mpg','mpeg','m1v','m2v','mp2','mp4','mpa','mpe','mp4','wma','asf','asx','avi','wax', 'wm','wvx','ogg','webm','mkv','m4v','mxf','f4v','rmvb'] IMAGE_EXT = ['jpg','png','tif','tiff','gif','dds','dcm'] SEARCH_TYPE = "sthpw/file" BASE_TYPE_SEQ = "sequence" BASE_TYPE_DIR = "directory" BASE_TYPE_FILE = "file" def get_code(my): return my.get_value("code") def get_file_name(my): return my.get_value("file_name") def get_file_range(my): return my.get_value("file_range") def get_type(my): return my.get_value("type") def get_media_type_by_path(cls, path): tmp, ext = os.path.splitext(path) ext = ext.lstrip(".") ext = ext.lower() if ext in File.VIDEO_EXT: return "video" elif ext in File.NORMAL_EXT: return "document" else: return "image" get_media_type_by_path = classmethod(get_media_type_by_path) def get_sobject(my): '''get the sobject associated with this file''' search = Search(my.get_value("search_type")) search.add_id_filter(my.get_value("search_id")) sobject = search.get_sobject() return sobject def get_full_file_name(my): '''Gets the full file name. This is the same as get_file_name''' return my.get_file_name() def get_lib_dir(my,snapshot=None): '''go through the stored snapshot_code to get the actual path''' code = my.get_value("snapshot_code") from snapshot import Snapshot snapshot = Snapshot.get_by_code(code) return snapshot.get_lib_dir() def get_env_dir(my,snapshot=None): '''go through the stored snapshot_code to get the actual path''' code = my.get_value("snapshot_code") from snapshot import Snapshot snapshot = Snapshot.get_by_code(code) return snapshot.get_env_dir() def get_web_dir(my,snapshot=None): '''go through the stored snapshot_code to get the actual path''' code = my.get_value("snapshot_code") from snapshot import Snapshot snapshot = Snapshot.get_by_code(code) return snapshot.get_web_dir() def get_lib_path(my): filename = my.get_full_file_name() return "%s/%s" % (my.get_lib_dir(), filename) def get_env_path(my): '''path beginning with $TACTIC_ASSET_DIR''' filename = my.get_full_file_name() return "%s/%s" % (my.get_env_dir(), filename) def get_web_path(my): filename = my.get_full_file_name() return "%s/%s" % (my.get_web_dir(), filename) ################## # Static Methods ################## """ # DEPRERECATED PADDING = 10 # DEPRERECATED def add_file_code(file_path, file_code): ext = ".".join( File.get_extensions(file_path) ) padded_id = str(file_code).zfill(File.PADDING) file_path = file_path.replace(".%s" % ext, "_%s.%s" % (padded_id, ext) ) return file_path add_file_code = staticmethod(add_file_code) # DEPRERECATED def remove_file_code(file_path): new_path = re.compile(r'_(\w{%s})\.' % File.PADDING).sub(".", file_path) return new_path remove_file_code = staticmethod(remove_file_code) # DEPRERECATED def extract_file_code(file_path): p = re.compile(r'_(\w{%s})\.' % File.PADDING) m = p.search(file_path) if not m: return 0 groups = m.groups() if not groups: return 0 else: file_code = groups[0] # make sure there are only alpha/numberic characters if file_code.find("_") != -1: return 0 # make sure the first 3 are numeric if not re.match('^\d{3}\w+$', file_code): return 0 # strip out the leading zeros return file_code.lstrip("0") extract_file_code = staticmethod(extract_file_code) # DEPRERECATED def extract_file_path(file_path): '''return file path without the unique id''' p = re.compile(r'_(\w{%s})\.' % File.PADDING) m = p.search(file_path) if not m: return file_path groups = m.groups() if not groups: return file_path else: new_path = file_path.replace("_%s" % groups[0], "") return new_path extract_file_path = staticmethod(extract_file_path) # DEPRERECATED def has_file_code(file_path): file_code = File.extract_file_code(file_path) if file_code == 0: return False else: return True has_file_code = staticmethod(has_file_code) """ def get_extension(file_path): '''get only the final extension''' parts = os.path.basename(file_path).split(".") ext = parts[len(parts)-1] return ext get_extension = staticmethod(get_extension) def get_extensions(file_path): '''get all of the extensions after the first .''' parts = os.path.basename(file_path).split(".") ext = parts[1:len(parts)] return ext get_extensions = staticmethod(get_extensions) def get_by_snapshot(cls, snapshot, file_type=None): xml = snapshot.get_xml_value("snapshot") file_codes = xml.get_values("snapshot/file/@file_code") search = Search( cls.SEARCH_TYPE) search.add_filters("code", file_codes) if file_type: search.add_filter("type", file_type) return search.get_sobjects() get_by_snapshot = classmethod(get_by_snapshot) def get_by_filename(cls, filename, skip_id=None, padding=0): search = Search(cls.SEARCH_TYPE) # if this is a file range then convert file name to padding # FIXME: need some way to know what and where the padding is if padding: filename = re.sub("(.*\.)(\d+)", r"\1####", filename) search.add_filter("file_name", filename) project_code = Project.get_project_code() search.add_filter("project_code", project_code) if skip_id: search.add_where('id != %s'%skip_id) return search.get_sobject() get_by_filename = classmethod(get_by_filename) def get_by_snapshots(cls, snapshots, file_type=None): all_file_codes = [] for snapshot in snapshots: xml = snapshot.get_xml_value("snapshot") file_codes = xml.get_values("snapshot/file/@file_code") all_file_codes.extend(file_codes) search = Search( cls.SEARCH_TYPE) search.add_filters("code", all_file_codes) if file_type: search.add_filter("type", file_type) files = search.get_sobjects() # cache these for file in files: key = "%s|%s" % (file.get_search_type(),file.get_code()) SObject.cache_sobject(key, file) return files get_by_snapshots = classmethod(get_by_snapshots) # DEPRECATED """ def get_by_path(path): file_code = File.extract_file_code(path) if file_code == 0: return None search = Search(File.SEARCH_TYPE) search.add_id_filter(file_code) file = search.get_sobject() return file get_by_path = staticmethod(get_by_path) """ def get_by_path(path): asset_dir = Environment.get_asset_dir() path = path.replace("%s/" % asset_dir, "") relative_dir = os.path.dirname(path) file_name = os.path.basename(path) # NOTE: this does not work with base_dir_alias search = Search("sthpw/file") search.add_filter("relative_dir", relative_dir) search.add_filter("file_name", file_name) sobject = search.get_sobject() return sobject get_by_path = staticmethod(get_by_path) def create( file_path, search_type, search_id, file_type=None, requires_file=True, st_size=None, repo_type=None, search_code = None): exists = os.path.exists(file_path) isdir = os.path.isdir(file_path) if requires_file and not os.path.exists(file_path): raise FileException("File '%s' does not exist" % file_path) file_name = os.path.basename(file_path) file = File(File.SEARCH_TYPE) file.set_value("file_name", file_name) file.set_value("search_type", search_type) if search_code: file.set_value("search_code", search_code) # MongoDb if search_id and isinstance(search_id, int): file.set_value("search_id", search_id) if file_type: file.set_value("type", file_type) if isdir: file.set_value("base_type", File.BASE_TYPE_DIR) else: file.set_value("base_type", File.BASE_TYPE_FILE) project = Project.get() file.set_value("project_code", project.get_code()) if exists: if isdir: dir_info = Common.get_dir_info(file_path) size = dir_info.get("size") file.set_value("st_size", size) else: from stat import ST_SIZE size = os.stat(file_path)[ST_SIZE] file.set_value("st_size", size) elif st_size != None: file.set_value("st_size", st_size) if repo_type: file.set_value("repo_type", repo_type) file.commit() return file create = staticmethod(create) def makedirs(dir, mode=None): '''wrapper to mkdirs in case it ever needs to be overridden''' print "DEPRECATED: use System().makedirs()" return System().makedirs(dir,mode) makedirs = staticmethod(makedirs) def get_filesystem_name(name, strict=True): '''takes a name and converts it to a name that can be saved in the filesystem.''' filename = name filename = filename.replace("/", "__") filename = filename.replace("|", "__") filename = filename.replace(":", "__") filename = filename.replace("?", "__") filename = filename.replace("=", "__") if strict: filename = filename.replace(" ", "_") filename_base, ext = os.path.splitext(filename) ext = string.lower(ext) filename = "%s%s" % (filename_base, ext) return filename get_filesystem_name = staticmethod(get_filesystem_name) def process_file_path(file_path): '''makes a file path completely kosher with the file system. Only do it on basename or it would remove the : from C:/''' return Common.get_filesystem_name(file_path) process_file_path = staticmethod(process_file_path) def get_md5(path): '''get md5 checksum''' py_exec = Config.get_value("services", "python") if not py_exec: py_exec = "python" if isinstance(path, unicode): path = path.encode('utf-8') popen = subprocess.Popen([py_exec, '%s/src/bin/get_md5.py'%Environment.get_install_dir(), path], shell=False, stdout=subprocess.PIPE) popen.wait() output = '' value = popen.communicate() if value: output = value[0].strip() if not output: err = value[1] print err return output get_md5 = staticmethod(get_md5) def is_file_group(file_path): '''returns True if it is a file group''' return not (file_path.find('#') == -1 and file_path.find('%') == -1) is_file_group = staticmethod(is_file_group) class FileAccess(SObject): SEARCH_TYPE = "sthpw/file_access" def create(file): file_code = file.get_code() file_access = FileAccess(FileAccess.SEARCH_TYPE) file_access.set_value("file_code", file_code) security = WebContainer.get_security() user = security.get_user_name() file_access.set_value("login", user) file_access.commit() return file_access create = staticmethod(create) class IconCreator(object): '''Utility class that creates icons of an image or document in the same directory as the image''' def __init__(my, file_path): my.file_path = file_path # check if it exists if not os.path.exists( file_path ): raise FileException( \ "Error: file [%s] does not exist" % my.file_path ) my.tmp_dir = os.path.dirname(file_path) my.icon_path = None my.web_path = None my.texture_mode = False my.icon_mode = False def set_texture_mode(my): '''texture mode down res is 1/4 size''' my.texture_mode = True def set_icon_mode(my): '''icon mode down res is 1/4 size''' my.icon_mode = True def get_icon_path(my): return my.icon_path def get_web_path(my): return my.web_path def create_icons(my): my.execute() def execute(my): # check file name file_name = os.path.basename(my.file_path) ext = File.get_extension(file_name) type = string.lower(ext) if type == "pdf": my._process_pdf( file_name ) elif type in File.NORMAL_EXT: # skip icon generation for normal or video files pass elif type in File.VIDEO_EXT: try: my._process_video( file_name ) except IOError, e: '''This is an unknown file type. Do nothing and except as a file''' print "WARNING: ", e.__str__() Environment.add_warning("Unknown file type", e.__str__()) else: # assume it is an image try: my._process_image( file_name ) except IOError, e: '''This is an unknown file type. Do nothing and except as a file''' print "WARNING: ", e.__str__() Environment.add_warning("Unknown file type", e.__str__()) def _process_pdf(my, file_name): base, ext = os.path.splitext(file_name) icon_file_name = base + "_icon.png" tmp_icon_path = "%s/%s" % (my.tmp_dir, icon_file_name) if sys.platform == 'darwin': return else: if not Common.which("convert"): return try: my.file_path = my.file_path.encode('utf-8') import shlex, subprocess subprocess.call(['convert', '-geometry','80','-raise','2x2','%s[0]'%my.file_path,\ "%s"%tmp_icon_path]) except Exception, e: print "Error extracting from pdf [%s]" % e return # check that it actually got created if os.path.exists(tmp_icon_path): my.icon_path = tmp_icon_path else: print "Warning: [%s] did not get created from pdf" % tmp_icon_path def get_web_file_size(my): from pyasm.prod.biz import ProdSetting web_file_size = ProdSetting.get_value_by_key('web_file_size') thumb_size = (640, 480) if web_file_size: parts = re.split('[\Wx]+', web_file_size) thumb_size = (640, 480) if len(parts) == 2: try: thumb_size = (int(parts[0]), int(parts[1])) except ValueError: thumb_size = (640, 480) return thumb_size def _process_video(my, file_name): ffmpeg = Common.which("ffmpeg") if not ffmpeg: return thumb_web_size = my.get_web_file_size() thumb_icon_size = (120, 100) exts = File.get_extensions(file_name) base, ext = os.path.splitext(file_name) icon_file_name = "%s_icon.png" % base web_file_name = "%s_web.jpg" % base tmp_icon_path = "%s/%s" % (my.tmp_dir, icon_file_name) tmp_web_path = "%s/%s" % (my.tmp_dir, web_file_name) #cmd = '''"%s" -i "%s" -r 1 -ss 00:00:01 -t 1 -s %sx%s -vframes 1 "%s"''' % (ffmpeg, my.file_path, thumb_web_size[0], thumb_web_size[1], tmp_web_path) #os.system(cmd) import subprocess try: subprocess.call([ffmpeg, '-i', my.file_path, "-y", "-ss", "00:00:01","-t","1",\ "-s","%sx%s"%(thumb_web_size[0], thumb_web_size[1]),"-vframes","1","-f","image2", tmp_web_path]) if os.path.exists(tmp_web_path): my.web_path = tmp_web_path else: my.web_path = None except Exception, e: Environment.add_warning("Could not process file", \ "%s - %s" % (my.file_path, e.__str__())) pass try: subprocess.call([ffmpeg, '-i', my.file_path, "-y", "-ss", "00:00:01","-t","1",\ "-s","%sx%s"%(thumb_icon_size[0], thumb_icon_size[1]),"-vframes","1","-f","image2", tmp_icon_path]) if os.path.exists(tmp_icon_path): my.icon_path = tmp_icon_path else: my.icon_path = None except Exception, e: Environment.add_warning("Could not process file", \ "%s - %s" % (my.file_path, e.__str__())) pass def _process_image(my, file_name): base, ext = os.path.splitext(file_name) # get all of the extensions exts = File.get_extensions(file_name) frame = 0 if len(exts) == 2: try: frame = int(exts[0]) base = base.replace(".%s" % exts[0], '' ) except ValueError: frame = 0 if frame: icon_file_name = "%s_icon.%s.png" % (base, exts[0]) web_file_name = "%s_web.%s.jpg" % (base, exts[0]) else: icon_file_name = "%s_icon.png" % base web_file_name = "%s_web.jpg" % base tmp_icon_path = "%s/%s" % (my.tmp_dir, icon_file_name) tmp_web_path = "%s/%s" % (my.tmp_dir, web_file_name) # create the web image try: if my.texture_mode: my._resize_texture(my.file_path, tmp_web_path, 0.5) my.web_path = tmp_web_path # create the icon thumb_size = (120,100) try: my._resize_image(tmp_web_path, tmp_icon_path, thumb_size) except TacticException: my.icon_path = None else: my.icon_path = tmp_icon_path elif my.icon_mode: # just icon, no web # create the icon only thumb_size = (120,100) try: my._resize_image(my.file_path, tmp_icon_path, thumb_size) except TacticException: my.icon_path = None else: my.icon_path = tmp_icon_path else: thumb_size = my.get_web_file_size() try: my._resize_image(my.file_path, tmp_web_path, thumb_size) except TacticException: my.web_path = None else: my.web_path = tmp_web_path # create the icon thumb_size = (120,100) try: my._resize_image(tmp_web_path, tmp_icon_path, thumb_size) except TacticException: my.icon_path = None else: my.icon_path = tmp_icon_path # check icon file size, reset to none if it is empty # TODO: use finally in Python 2.5 if my.web_path: web_path_size = os.stat(my.web_path)[stat.ST_SIZE] if not web_path_size: my.web_path = None if my.icon_path: icon_path_size = os.stat(my.icon_path)[stat.ST_SIZE] if not icon_path_size: my.icon_path = None except IOError, e: Environment.add_warning("Could not process file", \ "%s - %s" % (my.file_path, e.__str__())) my.web_path = None my.icon_path = None def _extract_frame(my, large_path, small_path, thumb_size): pass def _resize_image(my, large_path, small_path, thumb_size): try: large_path = large_path.encode('utf-8') small_path = small_path.encode('utf-8') if HAS_IMAGE_MAGICK: # generate imagemagick command convert_cmd = [] convert_cmd.append(convert_exe) # png's and psd's can have multiple layers which need to be flattened to make an accurate thumbnail if large_path.lower().endswith('png'): convert_cmd.append('-flatten') if large_path.lower().endswith('psd'): large_path += "[0]" convert_cmd.extend(['-resize','%sx%s'%(thumb_size[0], thumb_size[1])]) # FIXME: needs PIL for this ... should use ImageMagick to find image size if HAS_PIL: try: im = Image.open(large_path) x,y = im.size except Exception, e: print "WARNING: ", e x = 0 y = 0 if x < y: # icons become awkward if height is bigger than width # add white background for more reasonable icons convert_cmd.extend(['-background','white']) convert_cmd.extend(['-gravity','center']) convert_cmd.extend(['-extent','%sx%s'%(thumb_size[0], thumb_size[1])]) convert_cmd.append('%s'%(large_path)) convert_cmd.append('%s'%(small_path)) subprocess.call(convert_cmd) # if we don't have ImageMagick, use PIL, if installed (in non-mac os systems) elif HAS_PIL: # use PIL # create the thumbnail im = Image.open(large_path) try: im.seek(1) except EOFError: is_animated = False else: is_animated = True im.seek(0) im = im.convert('RGB') x,y = im.size to_ext = "PNG" if small_path.lower().endswith('jpg') or small_path.lower().endswith('jpeg'): to_ext = "JPEG" if x >= y: im.thumbnail( (thumb_size[0],10000), Image.ANTIALIAS ) im.save(small_path, to_ext) else: #im.thumbnail( (10000,thumb_size[1]), Image.ANTIALIAS ) x,y = im.size # first resize to match this thumb_size base_height = thumb_size[1] h_percent = (base_height/float(y)) base_width = int((float(x) * float(h_percent))) im = im.resize((base_width, base_height), Image.ANTIALIAS ) # then paste to white image im2 = Image.new( "RGB", thumb_size, (255,255,255) ) offset = (thumb_size[0]/2) - (im.size[0]/2) im2.paste(im, (offset,0) ) im2.save(small_path, to_ext) # if neither IM nor PIL is installed, check if this is a mac system and use sips if so elif sys.platform == 'darwin': convert_cmd = ['sips', '--resampleWidth', '%s'%thumb_size[0], '--out', small_path, large_path] subprocess.call(convert_cmd) else: raise TacticException('No image manipulation tool installed') except Exception, e: print "Error: ", e # after these operations, confirm that the icon has been generated if not os.path.exists(small_path): raise TacticException('Icon generation failed') def _resize_texture(my, large_path, small_path, scale): # create the thumbnail try: im = Image.open(large_path) x,y = im.size resize = int( float(x) * scale ) im.thumbnail( (resize,10000), Image.ANTIALIAS ) im.save(small_path, "PNG") except: if sys.platform == 'darwin': cmd = "sips --resampleWidth 25%% --out %s %s" \ % (large_path, small_path) else: cmd = "convert -resize 25%% %s %s" \ % (large_path, small_path) os.system(cmd) if not os.path.exists(small_path): raise def add_icons(file_paths): new_file_paths=[] new_file_types=[] for file_path in file_paths: # create icons and add to the list creator = IconCreator(file_path) creator.create_icons() icon_path = creator.get_icon_path() new_file_paths.append(icon_path) new_file_types.append("icon") web_path = creator.get_web_path() new_file_paths.append(web_path) new_file_types.append("web") return new_file_paths, new_file_types add_icons = staticmethod(add_icons) class FileGroup(File): '''Handles groups of files. The file paths have the following syntax <file>.#### Where the number signs indicate padding to be replaced by the file_range The file_range parameter has the following syntax: 1-12 Means from files 1-12 ''' def check_paths(file_path, file_range): ''' check existence of files. this expects a FileRange object''' expanded = FileGroup.expand_paths(file_path, file_range) for expand in expanded: if not System().exists(expand): raise FileException("File '%s' does not exist!" % expand) return expanded check_paths = staticmethod(check_paths) def create( file_path, file_range, search_type, search_id, file_type=None ): expanded = FileGroup.check_paths(file_path, file_range) file_name = os.path.basename(file_path) file = File(File.SEARCH_TYPE) file.set_value("file_name", file_name) file.set_value("search_type", search_type) file.set_value("search_id", search_id) from stat import ST_SIZE total = 0 for expanded in expanded: size = os.stat(expanded)[ST_SIZE] total += size project = Project.get() file.set_value("project_code", project.get_code()) file.set_value("st_size", total) file.set_value("file_range", file_range.get_key()) if file_type: file.set_value("type", file_type) file.set_value("base_type", File.BASE_TYPE_SEQ) file.commit() return file create = staticmethod(create) def expand_paths( file_path, file_range ): '''expands the file paths, replacing # as specified in the file_range object''' file_paths = [] # frame_by is not really used here yet frame_start, frame_end, frame_by = file_range.get_values() # support %0.4d notation if file_path.find('#') == -1: for i in range(frame_start, frame_end+1, frame_by): expanded = file_path % i file_paths.append( expanded ) else: # find out the number of #'s in the path padding = len( file_path[file_path.index('#'):file_path.rindex('#')] )+1 for i in range(frame_start, frame_end+1, frame_by): expanded = file_path.replace( '#'*padding, str(i).zfill(padding) ) file_paths.append(expanded) return file_paths expand_paths = staticmethod(expand_paths) def extract_template_and_range(cls, paths): frame = None # do we extract a range? padding = 0 for i in range(12,0,-1): p = re.compile("(\d{%d,})" % i) path = paths[0].replace("\\", "/") basename = os.path.basename(path) dirname = os.path.dirname(path) m = p.search(basename) if m: frame = m.groups()[0] padding = len(frame) break if not frame: padding = 4 frame = 'x'*padding template = basename.replace(frame, '#'*padding) frange = [] last_frame = None p = re.compile("(\d{%s})" % padding) for path in paths: path = path.replace("\\", "/") basename = os.path.basename(path) m = p.search(basename) if m: frame = int(m.groups()[0]) else: frame = 0 # the first one is always added if last_frame == None: frange.append(frame) frange.append('-') frange.append(frame) last_frame = frame continue # the next ones are not diff = frame - last_frame if diff == 1: frange[-1] = frame else: frange.append(frame) frange.append('-') frange.append(frame) last_frame = frame template = "%s/%s" % (dirname,template) frange = "".join([str(x) for x in frange]) return template, frange extract_template_and_range = classmethod(extract_template_and_range) class FileRange(object): def __init__(my, frame_start=1, frame_end=1, frame_by=1): my.frame_start = frame_start my.frame_end = frame_end my.frame_by = frame_by assert(isinstance(frame_start, (int))) assert(isinstance(frame_end, (int))) assert(isinstance(frame_by, (int))) def get_frame_by(my): return my.frame_by def get_frame_start(my): return my.frame_start def get_frame_end(my): return my.frame_end def set_frame_by(my, frame_by): assert(isinstance(frame_by, (int))) my.frame_by = frame_by def set_duration(my, duration): my.frame_start = 1 my.frame_end = duration def get_num_frames(my): return (my.frame_end - my.frame_start + 1) / my.frame_by def get_key(my): return "%s-%s/%s" % (my.frame_start, my.frame_end, my.frame_by) def get_display(my): if my.frame_by == 1: return "%s-%s" % (my.frame_start, my.frame_end) else: return my.get_key() def get_values(my): return (my.frame_start, my.frame_end, my.frame_by) # static method def get(file_range): ''' build a FileRange obj from a string''' frame_by = 1 if file_range.find("/") != -1: file_range, frame_by = file_range.split("/") tmps = file_range.split("-") if len(tmps) > 2: raise FileException("Unable to determine file_range [%s]" %file_range) frame_start, frame_end = tmps[0], tmps[1] frame_start = int(frame_start) frame_end = int(frame_end) frame_by = int(frame_by) return FileRange(frame_start, frame_end, frame_by) get = staticmethod(get)
View similar properties for sale in Lawrenceville, GA. Active listings for sale in Lawrenceville, GA are shown below. Listing courtesy of North Georgia Land, Inc. You are searching properties for sale in Lawrenceville, GA. There are thousands of listings including homes, condos, townhomes, foreclosures, and new homes/new construction. View information about public schools and private schools serving in Lawrenceville, GA, plus reference community statistics and demographics.
# -*- coding: utf-8 -*- """ Copyright (C) 2013 Dariusz Suchojad <dsuch at zato.io> Licensed under LGPLv3, see LICENSE.txt for terms and conditions. """ from __future__ import absolute_import, division, print_function, unicode_literals # stdlib import logging # Zato from zato.admin.web.forms import ChangePasswordForm from zato.admin.web.forms.definition.cassandra import CreateForm, EditForm from zato.admin.web.views import change_password as _change_password, CreateEdit, Delete as _Delete, Index as _Index, method_allowed from zato.common import CASSANDRA from zato.common.odb.model import CassandraConn logger = logging.getLogger(__name__) class Index(_Index): method_allowed = 'GET' url_name = 'def-cassandra' template = 'zato/definition/cassandra.html' service_name = 'zato.definition.cassandra.get-list' output_class = CassandraConn paginate = True class SimpleIO(_Index.SimpleIO): input_required = ('cluster_id',) output_required = ('id', 'name', 'is_active', 'contact_points', 'port', 'exec_size', 'proto_version', 'default_keyspace') output_optional = ('username', 'cql_version') output_repeated = True def handle(self): return { 'default_port': CASSANDRA.DEFAULT.PORT.value, 'default_exec_size': CASSANDRA.DEFAULT.EXEC_SIZE.value, 'default_proto_version': CASSANDRA.DEFAULT.PROTOCOL_VERSION.value, 'create_form': CreateForm(), 'edit_form': EditForm(prefix='edit'), 'change_password_form': ChangePasswordForm() } class _CreateEdit(CreateEdit): method_allowed = 'POST' class SimpleIO(CreateEdit.SimpleIO): input_required = ('cluster_id', 'name', 'is_active', 'contact_points', 'port', 'exec_size', 'proto_version', 'default_keyspace') input_optional = ('username', 'cql_version', 'tls_ca_certs', 'tls_client_cert', 'tls_client_priv_key') output_required = ('id', 'name') def success_message(self, item): return 'Successfully {0} the connection [{1}]'.format(self.verb, item.name) class Create(_CreateEdit): url_name = 'definition-cassandra-create' service_name = 'zato.definition.cassandra.create' class Edit(_CreateEdit): url_name = 'definition-cassandra-edit' form_prefix = 'edit-' service_name = 'zato.definition.cassandra.edit' class Delete(_Delete): url_name = 'definition-cassandra-delete' error_message = 'Could not delete the connection' service_name = 'zato.definition.cassandra.delete' @method_allowed('POST') def change_password(req): return _change_password(req, 'zato.definition.cassandra.change-password')
The programme will end in December, but the ECB's policy stance will remain extremely accommodative in 2019. Despite a slight downward revision to its inflation forecast, the bank is preparing to end monthly asset purchases. The legislation seeks to expand the ambit of regulation for financial products such as bonds, stocks and derivatives. An announcement on tapering will be made in October, but the ECB will remain wary of reducing QE too fast and too soon. Currency markets ignored the ECB's cautious tone, interpreting plans for a discussion on QE as a step towards tapering.
# Copyright 2016, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from distutils import cygwinccompiler from distutils import extension from distutils import util import errno import os import os.path import pkg_resources import platform import re import shlex import shutil import sys import sysconfig import setuptools from setuptools.command import build_ext # TODO(atash) add flag to disable Cython use os.chdir(os.path.dirname(os.path.abspath(__file__))) sys.path.insert(0, os.path.abspath('.')) import protoc_lib_deps import grpc_version PY3 = sys.version_info.major == 3 # Environment variable to determine whether or not the Cython extension should # *use* Cython or use the generated C files. Note that this requires the C files # to have been generated by building first *with* Cython support. BUILD_WITH_CYTHON = os.environ.get('GRPC_PYTHON_BUILD_WITH_CYTHON', False) # There are some situations (like on Windows) where CC, CFLAGS, and LDFLAGS are # entirely ignored/dropped/forgotten by distutils and its Cygwin/MinGW support. # We use these environment variables to thus get around that without locking # ourselves in w.r.t. the multitude of operating systems this ought to build on. # We can also use these variables as a way to inject environment-specific # compiler/linker flags. We assume GCC-like compilers and/or MinGW as a # reasonable default. EXTRA_ENV_COMPILE_ARGS = os.environ.get('GRPC_PYTHON_CFLAGS', None) EXTRA_ENV_LINK_ARGS = os.environ.get('GRPC_PYTHON_LDFLAGS', None) if EXTRA_ENV_COMPILE_ARGS is None: EXTRA_ENV_COMPILE_ARGS = '-std=c++11' if 'win32' in sys.platform: if sys.version_info < (3, 5): # We use define flags here and don't directly add to DEFINE_MACROS below to # ensure that the expert user/builder has a way of turning it off (via the # envvars) without adding yet more GRPC-specific envvars. # See https://sourceforge.net/p/mingw-w64/bugs/363/ if '32' in platform.architecture()[0]: EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime32 -D_timeb=__timeb32 -D_ftime_s=_ftime32_s' else: EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime64 -D_timeb=__timeb64' else: # We need to statically link the C++ Runtime, only the C runtime is # available dynamically EXTRA_ENV_COMPILE_ARGS += ' /MT' elif "linux" in sys.platform or "darwin" in sys.platform: EXTRA_ENV_COMPILE_ARGS += ' -fno-wrapv -frtti' if EXTRA_ENV_LINK_ARGS is None: EXTRA_ENV_LINK_ARGS = '' if "linux" in sys.platform or "darwin" in sys.platform: EXTRA_ENV_LINK_ARGS += ' -lpthread' elif "win32" in sys.platform and sys.version_info < (3, 5): msvcr = cygwinccompiler.get_msvcr()[0] # TODO(atash) sift through the GCC specs to see if libstdc++ can have any # influence on the linkage outcome on MinGW for non-C++ programs. EXTRA_ENV_LINK_ARGS += ( ' -static-libgcc -static-libstdc++ -mcrtdll={msvcr} ' '-static'.format(msvcr=msvcr)) EXTRA_COMPILE_ARGS = shlex.split(EXTRA_ENV_COMPILE_ARGS) EXTRA_LINK_ARGS = shlex.split(EXTRA_ENV_LINK_ARGS) CC_FILES = [ os.path.normpath(cc_file) for cc_file in protoc_lib_deps.CC_FILES] PROTO_FILES = [ os.path.normpath(proto_file) for proto_file in protoc_lib_deps.PROTO_FILES] CC_INCLUDE = os.path.normpath(protoc_lib_deps.CC_INCLUDE) PROTO_INCLUDE = os.path.normpath(protoc_lib_deps.PROTO_INCLUDE) GRPC_PYTHON_TOOLS_PACKAGE = 'grpc_tools' GRPC_PYTHON_PROTO_RESOURCES_NAME = '_proto' DEFINE_MACROS = () if "win32" in sys.platform: DEFINE_MACROS += (('WIN32_LEAN_AND_MEAN', 1),) if '64bit' in platform.architecture()[0]: DEFINE_MACROS += (('MS_WIN64', 1),) elif "linux" in sys.platform or "darwin" in sys.platform: DEFINE_MACROS += (('HAVE_PTHREAD', 1),) # By default, Python3 distutils enforces compatibility of # c plugins (.so files) with the OSX version Python3 was built with. # For Python3.4, this is OSX 10.6, but we need Thread Local Support (__thread) if 'darwin' in sys.platform and PY3: mac_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') if mac_target and (pkg_resources.parse_version(mac_target) < pkg_resources.parse_version('10.9.0')): os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.9' os.environ['_PYTHON_HOST_PLATFORM'] = re.sub( r'macosx-[0-9]+\.[0-9]+-(.+)', r'macosx-10.9-\1', util.get_platform()) def package_data(): tools_path = GRPC_PYTHON_TOOLS_PACKAGE.replace('.', os.path.sep) proto_resources_path = os.path.join(tools_path, GRPC_PYTHON_PROTO_RESOURCES_NAME) proto_files = [] for proto_file in PROTO_FILES: source = os.path.join(PROTO_INCLUDE, proto_file) target = os.path.join(proto_resources_path, proto_file) relative_target = os.path.join(GRPC_PYTHON_PROTO_RESOURCES_NAME, proto_file) try: os.makedirs(os.path.dirname(target)) except OSError as error: if error.errno == errno.EEXIST: pass else: raise shutil.copy(source, target) proto_files.append(relative_target) return {GRPC_PYTHON_TOOLS_PACKAGE: proto_files} def extension_modules(): if BUILD_WITH_CYTHON: plugin_sources = [os.path.join('grpc_tools', '_protoc_compiler.pyx')] else: plugin_sources = [os.path.join('grpc_tools', '_protoc_compiler.cpp')] plugin_sources += [ os.path.join('grpc_tools', 'main.cc'), os.path.join('grpc_root', 'src', 'compiler', 'python_generator.cc')] + [ os.path.join(CC_INCLUDE, cc_file) for cc_file in CC_FILES] plugin_ext = extension.Extension( name='grpc_tools._protoc_compiler', sources=plugin_sources, include_dirs=[ '.', 'grpc_root', os.path.join('grpc_root', 'include'), CC_INCLUDE, ], language='c++', define_macros=list(DEFINE_MACROS), extra_compile_args=list(EXTRA_COMPILE_ARGS), extra_link_args=list(EXTRA_LINK_ARGS), ) extensions = [plugin_ext] if BUILD_WITH_CYTHON: from Cython import Build return Build.cythonize(extensions) else: return extensions setuptools.setup( name='grpcio-tools', version=grpc_version.VERSION, license='3-clause BSD', ext_modules=extension_modules(), packages=setuptools.find_packages('.'), install_requires=[ 'protobuf>=3.0.0', 'grpcio>={version}'.format(version=grpc_version.VERSION), ], package_data=package_data(), )
SAN FRANCISCO, December 18, 2008 – The Linux Foundation (LF), the nonprofit organization dedicated to accelerating the growth of Linux, today announced that Linux kernel developer Theodore Ts’o has been named to the position of Chief Technology Officer at the Foundation. Ts’o is currently a Linux Foundation fellow, a position he has been in since December 2007. He is one of the most highly regarded members of the Linux and open source community and is known as the first North American kernel developer. Other current and past LF fellows include Steve Hemminger, Andrew Morton, Linus Torvalds and Andrew Tridgell. As CTO, Ts’o will lead all technical initiatives for the Linux Foundation, including oversight of the Linux Standard Base (LSB) and other workgroups such as Open Printing. He will also be the primary technical interface to LF members and the LF’s Technical Advisory Board, which represents the kernel community. Ts’o has been recognized throughout the Linux and open source communities for his contributions to free software, including being awarded the 2006 Award for the Advancement of Free Software by the Free Software Foundation (FSF). maintainer of e2fsprogs, the userspace utilities for the ext2, ext3, and ext4 filesystems. He is the founder and chair of the annual Linux Kernel Developers’ Summit and regularly teaches tutorials on Linux and other open source software. Ts’o was project leader for Kerberos, a network authentication system used by Red Hat Enteprise Linux, SUSE Enterprise Linux and Microsoft Windows. He was also a member of Security Area Directorate for the Internet Engineering Task Force where he chaired the IP Security (ipsec) Working Group and was a founding board member of the Free Standards Group (FSG). Ts’o studied computer science at MIT, where he received his degree in 1990.
# -*- coding: utf-8 -*- import os import re import bottle import string import inspect from bottle import static_file, template, url, request import dispatch static_path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + '/static' bottle.TEMPLATE_PATH.insert(0, os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) +'/views') app = application = bottle.Bottle() bottle.default_app.push(app) bottle.BaseTemplate.defaults['url'] = url @app.route('/', name='main') def main(): return template('main', devices=dispatch.devices, activities=dispatch.activities, request=request) @app.route('/static/<filename:path>', name='static') def static(filename): return static_file(filename, root=static_path) @app.route('/activity/<activity>', name='activity_view', method='GET') def activity_view(activity): return template('activity', activity=dispatch.activities[activity], devices=dispatch.devices, activities=dispatch.activities, request=request) @app.route('/activity/<activity:path>', name='activity', method='POST') def activity(activity): activity, command = activity.split('/') try: dispatch.activity(activity, command) except NameError, e: print "Input error:", e @app.route('/device/<device>', name='device_view', method='GET') def device_view(device): return template('device', device=dispatch.devices[device], devices=dispatch.devices, activities=dispatch.activities, request=request) @app.route('/device/<device:path>', name='device', method='POST') def device(device): device, command = device.split('/') try: result = dispatch.device(device, command) return result if result else "" except NameError, e: print "Input error:", e
This small walnut finished Radiobar was a popular entry-level set and is frequently found today. In 1937, it was equipped with the Philco 37-60 radio (5 tubes), or the 37-84B (4 tubes - see first photo below). The set in the third photo below appears to have either the Philco 37-33 or the Philco 38-93 chassis, again with 5 tubes. The cabinet was 44” high, 37” wide and 13” deep closed. In open position the cabinet was 54” high and 45” wide. Bar equipment consisted of six each of three sizes of glasses, four decanters, funnel, bitters bottle, shaker, stainless steel knife, bottle opener, corkscrew and a bin for ice.
""" Tests suite for the views of the announcements app. """ from datetime import timedelta from django.test import TestCase, Client from django.core.urlresolvers import reverse from django.contrib.auth import get_user_model from django.utils import timezone from ..models import (Announcement, AnnouncementTag) class AnnouncementViewsTestCase(TestCase): """ Tests case for the views. """ def setUp(self): """ Create some fixtures for the tests. """ now = timezone.now() past_now = now - timedelta(seconds=1) future_now = now + timedelta(seconds=100) author = get_user_model().objects.create_user(username='johndoe', password='illpassword', email='[email protected]') self.announcement_unpublished = Announcement.objects.create(title='Test 1', slug='test-1', author=author, content='Hello World!') self.announcement_published = Announcement.objects.create(title='Test 2', slug='test-2', author=author, content='Hello World!', pub_date=past_now) self.announcement_published = Announcement.objects.create(title='Test 3', slug='test-3', author=author, content='Hello World!', pub_date=now) self.announcement_published_in_future = Announcement.objects.create(title='Test 4', slug='test-4', author=author, content='Hello World!', pub_date=future_now) self.tag = AnnouncementTag.objects.create(name='Test tag', slug='test-tag') self.tag2 = AnnouncementTag.objects.create(name='Test tag 2', slug='test-tag-2') self.announcement_unpublished.tags.add(self.tag) self.announcement_published.tags.add(self.tag) self.announcement_published_in_future.tags.add(self.tag) def test_announcement_list_view_available(self): """ Test the availability of the "announcement list" view. """ client = Client() response = client.get(reverse('announcements:index')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'announcements/announcement_list.html') self.assertIn('announcements', response.context) self.assertQuerysetEqual(response.context['announcements'], ['<Announcement: Test 3>', '<Announcement: Test 2>']) def test_announcement_detail_view_available_with_published_announcement(self): """ Test the availability of the "announcement detail" view for a published announcement. """ client = Client() response = client.get(self.announcement_published.get_absolute_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'announcements/announcement_detail.html') self.assertIn('announcement', response.context) self.assertEqual(response.context['announcement'], self.announcement_published) def test_announcement_detail_view_unavailable_with_unpublished_announcement(self): """ Test the unavailability of the "announcement detail" view for an unpublished announcement. """ client = Client() response = client.get(self.announcement_unpublished.get_absolute_url()) self.assertEqual(response.status_code, 404) def test_announcement_preview_available_with_unpublished_announcement_if_authorized(self): """ Test the availability of the "announcement preview" view for an unpublished announcement if the current user is authorized to see the preview. """ client = Client() client.login(username='johndoe', password='illpassword') response = client.get(self.announcement_unpublished.get_absolute_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'announcements/announcement_detail.html') self.assertIn('announcement', response.context) self.assertEqual(response.context['announcement'], self.announcement_unpublished) def test_announcement_detail_view_unavailable_with_published_in_future_announcement(self): """ Test the availability of the "announcement detail" view for a published in future announcement. """ client = Client() response = client.get(self.announcement_published_in_future.get_absolute_url()) self.assertEqual(response.status_code, 404) def test_announcement_preview_available_with_published_in_future_announcement_if_authorized(self): """ Test the availability of the "announcement preview" view for a published in future announcement if the current user is authorized to see the preview. """ client = Client() client.login(username='johndoe', password='illpassword') response = client.get(self.announcement_published_in_future.get_absolute_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'announcements/announcement_detail.html') self.assertIn('announcement', response.context) self.assertEqual(response.context['announcement'], self.announcement_published_in_future) def test_latest_announcements_rss_feed_available(self): """ Test the availability of the "latest announcements" rss feed view. """ client = Client() response = client.get(reverse('announcements:latest_announcements_rss')) self.assertEqual(response.status_code, 200) def test_latest_announcements_atom_feed_available(self): """ Test the availability of the "latest announcements" atom feed" view. """ client = Client() response = client.get(reverse('announcements:latest_announcements_atom')) self.assertEqual(response.status_code, 200) def test_announcement_tag_list_view_available(self): """ Test the availability of the "announcement tag list" view. """ client = Client() response = client.get(reverse('announcements:tag_list')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'announcements/tag_list.html') self.assertIn('tags', response.context) self.assertEqual(str(response.context['tags']), str([self.tag, self.tag2])) def test_announcement_tag_detail_view_available(self): """ Test the availability of the "announcement tag detail" view. """ client = Client() response = client.get(reverse('announcements:tag_detail', kwargs={'slug': self.tag.slug})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'announcements/tag_detail.html') self.assertIn('tag', response.context) self.assertEqual(response.context['tag'], self.tag) self.assertIn('related_announcements', response.context) self.assertQuerysetEqual(response.context['related_announcements'], ['<Announcement: Test 3>']) def test_latest_tag_announcements_rss_feed_available(self): """ Test the availability of the "latest announcements for tag" rss feed view. """ client = Client() response = client.get(reverse('announcements:latest_tag_announcements_rss', kwargs={'slug': self.tag.slug})) self.assertEqual(response.status_code, 200) def test_latest_tag_announcements_atom_feed_available(self): """ Test the availability of the "latest announcements for tag" atom feed" view. """ client = Client() response = client.get(reverse('announcements:latest_tag_announcements_atom', kwargs={'slug': self.tag.slug})) self.assertEqual(response.status_code, 200)
This funny letter from Santa Claus, for adults or older children only, is a checklist of reasons why Santa won't be bringing any presents this year. I regret to inform you that I am unable to leave a present for you this year due to the reason noted. Address all appeals to the North Pole office, where they will be reviewed before the next holiday cycle. Fees may apply. __ Neither cookies nor milk were left, and/or were spoiled, stale, or otherwise substandard. __ Stocking contained foreign material including but not limited to: toe jam, lint, coal remnants, decade-old candy cane, pet hair. __ Advanced SantaScan™ technology detected "Nice List" forgery. __ At time of arrival on rooftop, subject was found to be awake. Per long-time St. Nick protocol, all parties must be in full slumber before Santa can deliver packages as scheduled. __ Extensive elf research revealed that you no longer believe in Santa Claus, or that you professed to younger believers that Santa is "not real."
# -*- coding: utf-8 -*- """ Local settings - Run in Debug mode - Use console backend for emails - Add Django Debug Toolbar - Add django-extensions as app """ import socket import os from .common import * # noqa # DEBUG # ------------------------------------------------------------------------------ DEBUG = env.bool('DJANGO_DEBUG', default=True) TEMPLATES[0]['OPTIONS']['debug'] = DEBUG # SECRET CONFIGURATION # ------------------------------------------------------------------------------ # See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key # Note: This key only used for development and testing. SECRET_KEY = env('DJANGO_SECRET_KEY', default='edf!yibz*vbd#%jrx^h!4xj*!axs107^sr9-8q%_8po@hwixa@') # Mail settings # ------------------------------------------------------------------------------ EMAIL_PORT = 1025 EMAIL_HOST = 'localhost' EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.console.EmailBackend') # CACHING # ------------------------------------------------------------------------------ CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': '' } } # django-debug-toolbar # ------------------------------------------------------------------------------ MIDDLEWARE += ('debug_toolbar.middleware.DebugToolbarMiddleware',) INSTALLED_APPS += ('debug_toolbar', ) INTERNAL_IPS = ['127.0.0.1', '10.0.2.2', '0.0.0.0', 'localhost' ] # tricks to have debug toolbar when developing with docker if os.environ.get('USE_DOCKER') == 'yes': ip = socket.gethostbyname(socket.gethostname()) INTERNAL_IPS += [ip[:-1] + "1"] DEBUG_TOOLBAR_CONFIG = { 'DISABLE_PANELS': [ 'debug_toolbar.panels.redirects.RedirectsPanel', ], 'SHOW_TEMPLATE_CONTEXT': True, } # django-extensions # ------------------------------------------------------------------------------ INSTALLED_APPS += ('django_extensions', ) # TESTING # ------------------------------------------------------------------------------ TEST_RUNNER = 'django.test.runner.DiscoverRunner' # Your local stuff: Below this line define 3rd party library settings # ------------------------------------------------------------------------------
The Strawbery Banke Museum archaeology department seeks advanced undergraduate or graduate level applicants in the fields of anthropology, archaeology, or history, to assist with the museum’s annual archaeological field school. This summer’s session will return to the c. 1695 Sherburne House. Before the field school, the archaeology intern will help prepare equipment and course materials, carry out historical research, and ready the site. During the field school, the archaeology intern will assist students with excavation methodology, field records, public interpretation, artifact identification, and lab processing. Maintenance of our existing archaeological collection of over 1 million artifacts, along with photos, maps, and paperwork; contributions to the archaeology blog; and development of the department’s digital database will also be part of the intern’s responsibilities. The successful applicant must be computer savvy, very organized, an excellent writer, and a great leader. Prior archaeological field work (e.g. a field school) is required and lab work is a plus. Please contact Alexandra Martin, the museum's archaeologist, with any questions at [email protected]. About Strawbery Banke Museum Strawbery Banke is an outdoor history museum located in the heart of Portsmouth, NH, a vibrant coastal city. The museum’s 10-acre site tells compelling stories of life during 350 years of change in a New England waterfront neighborhood. Furnished restored houses and interiors depict change over time from 1695-1955. The site is also home to exhibits on craftsmanship, archaeology, architecture, historic landscapes, and preservation. Museum programs feature engaging learning experiences for all audiences. Each year, Strawbery Banke welcomes more than 90,000 visitors; delivers learning experiences to more than 12,000 school-age students; conducts original archaeological and collections research; and offers a wide-ranging array of public programs. The daily program has featured interactive opportunities, family discovery learning, roleplaying, interpretation, hearth cooking, themed tours, and much more. · A personal statement of up to one page in length (this can be included in your cover letter), detailing your interest in the internship program. Clearly state your professional goals and areas of interest and what skills and abilities you can contribute to the museum’s work. Finally, describe the outcomes you hope the experience will produce. · Two letters of recommendation from professors or supervisors in a related field. Next topic: "Project Archaeologist, Dovetail Cultural Resource Group - Fredericksburg, VA" Previous topic: " Senior Archaeologist/ Principal Investigator - AECOM - Ohio "
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import logging import numpy as np from builtins import object from numpy.core.records import ndarray from typing import Any from typing import List from typing import Optional from typing import Text from rasa_core.domain import Domain from rasa_core.featurizers import Featurizer from rasa_core.trackers import DialogueStateTracker logger = logging.getLogger(__name__) class Policy(object): SUPPORTS_ONLINE_TRAINING = False MAX_HISTORY_DEFAULT = 3 def __init__(self, featurizer=None, max_history=None): # type: (Optional[Featurizer]) -> None self.featurizer = featurizer self.max_history = max_history def featurize(self, tracker, domain): # type: (DialogueStateTracker, Domain) -> ndarray """Transform tracker into a vector representation. The tracker, consisting of multiple turns, will be transformed into a float vector which can be used by a ML model.""" x = domain.feature_vector_for_tracker(self.featurizer, tracker, self.max_history) return np.array(x) def predict_action_probabilities(self, tracker, domain): # type: (DialogueStateTracker, Domain) -> List[float] return [] def prepare(self, featurizer, max_history): self.featurizer = featurizer self.max_history = max_history def train(self, X, y, domain, **kwargs): # type: (ndarray, List[int], Domain, **Any) -> None """Trains the policy on given training data.""" raise NotImplementedError def continue_training(self, X, y, domain, **kwargs): """Continues training an already trained policy. This doesn't need to be supported by every policy. If it is supported, the policy can be used for online training and the implementation for the continued training should be put into this function.""" pass def persist(self, path): # type: (Text) -> None """Persists the policy to storage.""" pass @classmethod def load(cls, path, featurizer, max_history): raise NotImplementedError
Wonder Woman. The Original Badass Chick. June 2, 2017. I need to watch all the new trailers coming out of Comic Con!!! I don't know if I plan on watching Wonder Woman or not yet. It's set in the 20's or earlier which is an era that I really don't like. So I'm still on the fence about this one. Wonder Woman looks great along with Justice League. Looks like you got some other books. lol we had the same books! happy reading! I hope you'll enjoy your new books, Braine. Have a wonderful week and happy reading. I snagged City of Wolves, too. Oh yes, I can't wait for Wonder Woman. City of Wolves looks great. I've been watching all the new trailers from Warner Bros. and Marvel over the weekend. Can't wait for WW to hit theaters next year.
from PyQt5.QtCore import pyqtSlot, pyqtSignal from PyQt5.QtWidgets import QApplication, QMainWindow, QFileDialog from PyQt5.QtWidgets import QTableWidgetItem, QHeaderView from PyQt5 import QtGui from unstyle.gui.unstyle_auto import Ui_Unstyle import unstyle.controller class Unstyle(QMainWindow): def __init__(self, parent=None): # Initialized the generated interface code. super(Unstyle, self).__init__(parent) self.ui = Ui_Unstyle() self.ui.setupUi(self) self.featureRows = {} self.setWindowTitle("Unstyle") # Signal connections self.ui.stackedNext.clicked.connect(self.stackNext_clicked) self.ui.browseYourDoc.clicked.connect(self.browseYourDoc_clicked) self.ui.browseYourDocs.clicked.connect(self.browseYourDocs_clicked) self.ui.deleteYourDocs.clicked.connect(self.deleteYourDocs_clicked) self.ui.textEdit.textChanged.connect(self.refreshAnonymity) self.ui.rankTable.selectionModel().selectionChanged.connect( self.row_highlighted) self.ui.saveDoc.clicked.connect(self.saveDoc_clicked) def getFeatureDesc(self, functionName): """Translate feature extractor names into something that the end user can understand. :param functionName: A feature extracting function. :returns: A typle containing ("Feature Name", "Description of feature"). """ names = { "letterSpace": ( "Letter Space", ("The total number of letters appearing in your " "document.")), "gunningFog": ( "Gunning-Fog readability", ("A function related to " "the ratio of words/sentences and complex word/total words.")), "avgSyllablesPerWord": ( "Average syllables per word", ("The total " "number of syllables/the total number of words.")), "unique_words": ( "Unique words", ("The number of words that appear " "only once in your document.")), "sentenceCount": ( "Sentence count", ("The number of sentences in your document.")), "characterSpace": ( "Character space", ("The total number of " "characters (letters and numbers) appearing in your document.")), "avgSentenceLength": ( "Average sentence length", ("The average " "length of sentences in your document.")), "complexity": ( "Complexity", ("The ratio of unique words to total" "words in your document.")), "fleschReadingEase": ( "Flesch readability", ("A function related to" " the ratio of words/sentences and syllables/words."))} return names[functionName] # stackedWidget buttons def stackNext_clicked(self): # Go to the next screen. self.ui.stackedWidget.setCurrentIndex(1) # Tell the controller to train its classifier. unstyle.controller.readyToClassify() def browseYourDoc_clicked(self): filename = QFileDialog.getOpenFileName() unstyle.controller.document_to_anonymize_path = filename[0] self.ui.yourdoc.setText(filename[0]) unstyle.controller.document_to_anonymize = unstyle.controller.load_document( filename[0]) # Show the text of the document in the text editor and enable it. self.ui.textEdit.setText(unstyle.controller.document_to_anonymize) self.ui.textEdit.setEnabled(True) def browseYourDocs_clicked(self): filenames = QFileDialog.getOpenFileNames() if filenames is not '': for path in filenames[0]: unstyle.controller.other_user_documents_paths.append(path) self.ui.otherdocslist.addItem(path) def deleteYourDocs_clicked(self): selected = self.ui.otherdocslist.currentItem() # Make sure the user selected a document before trying to delete # anything if selected is not None: row = self.ui.otherdocslist.currentRow() unstyle.controller.other_user_documents_paths.remove( selected.text()) self.ui.otherdocslist.takeItem(row) else: pass def saveDoc_clicked(self): """Save the current state of the text editor to a file defined by the user. """ # Open a save dialog filename = QFileDialog.getSaveFileName() if filename is not None: with open(filename, 'w+') as file: file.write(str(textEdit.toPlainText())) # TODO: Rather than check anonymity every time the user changes the text, # have a separate thread check every 5 or 10 seconds. Otherwise, we're going # to be constantly locking up the interface when we use large featuresets. def refreshAnonymity(self): """Called whenever the user changes the text editor. """ # Make sure we've trained the classifier before trying to do any # predictions. if unstyle.controller.trained_classifier is None: return 0 anonymity = unstyle.controller.checkAnonymity( self.ui.textEdit.toPlainText()) if anonymity is 0: self.ui.anonIcon.setPixmap(QtGui.QPixmap(":/icons/img/x.png")) self.ui.anonStatus.setText( ("It is still possible to identify you as the " "author. Continue changing your document.")) if anonymity is 1: self.ui.anonIcon.setPixmap(QtGui.QPixmap(":/icons/img/w.png")) self.ui.anonStatus.setText( ("Although you are not the most likely author," " there is a statistically significant chance" " that you wrote the document. Continue" " changing your document.")) if anonymity is 2: self.ui.anonIcon.setPixmap(QtGui.QPixmap(":/icons/img/check.png")) self.ui.anonStatus.setText( ("Congratulations! It appears that your" " document is no longer associated with your" " identity.")) def row_highlighted(self, _, __): """Every time someone selects a row from the table, we update our description box with the description of the feature. """ selected = self.ui.rankTable.selectionModel().selectedRows()[0].row() featureHighlighted = self.featureRows[selected] # Display the description of the highlighted feature self.ui.featureDescription.setText( self.getFeatureDesc(featureHighlighted)[1]) # Controller messages def update_stats(self): self.refreshAnonymity() # Set up rank table dimensions self.ui.rankTable.setRowCount(len(unstyle.controller.feature_ranks)) # Name the headers of the table headers = "Text Features", "Target", "Initial" self.ui.rankTable.setHorizontalHeaderLabels(headers) headerObj = self.ui.rankTable.horizontalHeader() headerObj.setSectionResizeMode(0, QHeaderView.ResizeToContents) tableHeight = (len(unstyle.controller.feature_ranks)) # XXX: Sorting should be handled in the table, not in the # rank_features methods. This will allow us to fix this embarrassingly # overcomplicated code. # Fill in the feature column for idx, pair in enumerate(unstyle.controller.feature_ranks): currItem = self.ui.rankTable.item(idx, 0) # If we are setting up the table for the first time, currItem will # not exist. if currItem is None: currItem = QTableWidgetItem(1) currItem.setText(self.getFeatureDesc(pair[0])[0]) self.ui.rankTable.setItem(idx, 0, currItem) else: currItem.setText( self.getFeatureDesc(feature_ranks[pair[0]])[0]) # Initialize target and initial columns for idx, target in enumerate(unstyle.controller.targets): currItem = self.ui.rankTable.item(idx, 1) if currItem is None: currItem = QTableWidgetItem(1) currItem.setText(str(target)) self.ui.rankTable.setItem(idx, 1, currItem) currItem2 = QTableWidgetItem(1) self.ui.rankTable.setItem(idx, 2, currItem2) # Populate target and current val columns # Track feature table locations labelsBeforeSorting = unstyle.controller.featlabels for idx, label in enumerate(labelsBeforeSorting): for idx2, item in enumerate(range(tableHeight)): currItem = self.ui.rankTable.item(item, 0) if self.getFeatureDesc(label)[0] == currItem.text(): self.featureRows[idx2] = label print(label, " ", currItem.text(), " ", item) currItem = self.ui.rankTable.item(item, 1) currItem.setText(str(unstyle.controller.targets[idx])) currItem = self.ui.rankTable.item(item, 2) currItem.setText( str(unstyle.controller.to_anonymize_features[0][idx]))
Find out what the related areas are that Estimating Software Costs connects with, associates with, correlates with or affects, and which require thought, deliberation, analysis, review and discussion. This unique checklist stands out in a sense that it is not per-se designed to give answers, but to engage the reader and lay out a Estimating Software Costs thinking-frame. How far is your company on its Estimating Software Costs journey? Take this short survey to gauge your organization’s progress toward Estimating Software Costs leadership. Learn your strongest and weakest areas, and what you can do now to create a strategy that delivers results. Below you will find a quick checklist designed to help you think about which Estimating Software Costs related domains to cover and 131 essential critical questions to check off in that domain. Cut a stake in Estimating Software Costs goals and describe the risks of Estimating Software Costs sustainability. – Will Estimating Software Costs deliverables need to be tested and, if so, by whom? – How do we Lead with Estimating Software Costs in Mind? – How to deal with Estimating Software Costs Changes? Study Construction estimating software outcomes and describe the risks of Construction estimating software sustainability. – Marketing budgets are tighter, consumers are more skeptical, and social media has changed forever the way we talk about Estimating Software Costs. How do we gain traction? – In what ways are Estimating Software Costs vendors and us interacting to ensure safe and effective use? – Who are the people involved in developing and implementing Estimating Software Costs? Ventilate your thoughts about Accounting software failures and interpret which customers can’t participate in Accounting software because they lack skills. – Who is responsible for ensuring appropriate resources (time, people and money) are allocated to Estimating Software Costs? – How do senior leaders actions reflect a commitment to the organizations Estimating Software Costs values? – How will you know that the Estimating Software Costs project has been successful? Audit Application software decisions and budget the knowledge transfer for any interested in Application software. – Which customers cant participate in our Estimating Software Costs domain because they lack skills, wealth, or convenient access to existing solutions? – Do we monitor the Estimating Software Costs decisions made and fine tune them as they evolve? – How to Secure Estimating Software Costs? Confer over Building adoptions and drive action. – What are your current levels and trends in key measures or indicators of Estimating Software Costs product and process performance that are important to and directly serve your customers? how do these results compare with the performance of your competitors and other organizations with similar offerings? – How much effort (in terms of building new or modifying existing tools) is required to move the application? – Creating a repository for Building Blocks (BBIB)? – What are the components of team building? Track Building estimator outcomes and develop and take control of the Building estimator initiative. – What are all of our Estimating Software Costs domains and what do they do? – What are the barriers to increased Estimating Software Costs production? – How do we manage Estimating Software Costs Knowledge Management (KM)? Generalize Civil engineering planning and don’t overlook the obvious. – What are our best practices for minimizing Estimating Software Costs project risk, while demonstrating incremental value and quick wins throughout the Estimating Software Costs project lifecycle? – What is our Estimating Software Costs Strategy? Troubleshoot Construction bidding tasks and create a map for yourself. – Do we cover the five essential competencies-Communication, Collaboration,Innovation, Adaptability, and Leadership that improve an organizations ability to leverage the new Estimating Software Costs in a volatile global economy? – What are your key performance measures or indicators and in-process measures for the control and improvement of your Estimating Software Costs processes? – Risk factors: what are the characteristics of Estimating Software Costs that make it risky? Pay attention to Cost database adoptions and don’t overlook the obvious. – Will Estimating Software Costs have an impact on current business continuity, disaster recovery processes and/or infrastructure? – Meeting the challenge: are missed Estimating Software Costs opportunities costing us money? – What are the short and long-term Estimating Software Costs goals? Participate in Cost estimate decisions and assess what counts with Cost estimate that we are not counting. – Who will be responsible for deciding whether Estimating Software Costs goes ahead or not after the initial investigations? – In a project to restructure Estimating Software Costs outcomes, which stakeholders would you involve? Adapt Cut and fill management and document what potential Cut and fill megatrends could make our business model obsolete. – What are the long-term Estimating Software Costs goals? Talk about Database engagements and correct better engagement with Database results. – Are reusable policy objects separate, referenced databases, files, or subroutines so that they can be reused in multiple policies, but centrally updated? – What is the structure of the organizations existing user account directories and security databases? – Does the database contain what you think it contains? Jump start Duration governance and work towards be a leading Duration expert. – Are we making progress? and are we making progress as Estimating Software Costs leaders? – Why is Estimating Software Costs important for you now? Meet over Electrical wiring visions and revise understanding of Electrical wiring architectures. – Do Estimating Software Costs rules make a reasonable demand on a users capabilities? Consider Employee benefit leadership and report on the economics of relationships managing Employee benefit and constraints. – How will we insure seamless interoperability of Estimating Software Costs moving forward? – How do we go about Securing Estimating Software Costs? Win new insights about Indirect costs tasks and sort Indirect costs activities. – A compounding model resolution with available relevant data can often provide insight towards a solution methodology; which Estimating Software Costs models, tools and techniques are necessary? – What will be the consequences to the business (financial, reputation etc) if Estimating Software Costs does not go ahead or fails to deliver the objectives? – How do we Improve Estimating Software Costs service perception, and satisfaction? – How are the subunit s indirect costs allocated to products? Accelerate International Standard Book Number governance and find the essential reading for International Standard Book Number researchers. – Among the Estimating Software Costs product and service cost to be estimated, which is considered hardest to estimate? – Who will provide the final approval of Estimating Software Costs deliverables? Wrangle Lotus 1-2-3 goals and be persistent. – What are our needs in relation to Estimating Software Costs skills, labor, equipment, and markets? – Why should we adopt a Estimating Software Costs framework? Canvass Microsoft Excel outcomes and cater for concise Microsoft Excel education. – Think about the people you identified for your Estimating Software Costs project and the project responsibilities you would assign to them. what kind of training do you think they would need to perform these responsibilities effectively? – How do we ensure that implementations of Estimating Software Costs products are done in a way that ensures safety? – What are current Estimating Software Costs Paradigms? Accelerate Operating expenses issues and perfect Operating expenses conflict management. – What are your most important goals for the strategic Estimating Software Costs objectives? – What are the Essentials of Internal Estimating Software Costs Management? – How do we maintain Estimating Software Costss Integrity? Accelerate Overhead tactics and remodel and develop an effective Overhead strategy. – How can we incorporate support to ensure safe and effective use of Estimating Software Costs into the services that we provide? – Are dollars able to be distributed in gross without assessment of ACO overhead? – What are the business goals Estimating Software Costs is aiming to achieve? – Which Estimating Software Costs goals are the most important? Use past Performance bond governance and look at the big picture. – Is the Estimating Software Costs organization completing tasks effectively and efficiently? – How do we measure improved Estimating Software Costs service perception, and satisfaction? – What sources do you use to gather information for a Estimating Software Costs study? Reorganize Project management software tactics and assess what counts with Project management software that we are not counting. – What are the success criteria that will indicate that Estimating Software Costs objectives have been met and the benefits delivered? Group Proposal management and integrate design thinking in Proposal innovation. – Where do ideas that reach policy makers and planners as proposals for Estimating Software Costs strengthening and reform actually originate? – Do the Estimating Software Costs decisions we make today help people and the planet tomorrow? – What do I do when asked to submit a fixed-fee proposal when I m not sure of all the details? – What vendors make products that address the Estimating Software Costs needs? Graph Quantity surveyor risks and balance specific methods for improving Quantity surveyor results. – Have all basic functions of Estimating Software Costs been defined? – Is Estimating Software Costs Required? Focus on Spreadsheet decisions and assess what counts with Spreadsheet that we are not counting. – Does Estimating Software Costs create potential expectations in other areas that need to be recognized and considered? – How can the value of Estimating Software Costs be defined? Chart Trench management and inform on and uncover unspoken needs and breakthrough Trench results. – What tools do you use once you have decided on a Estimating Software Costs strategy and more importantly how do you choose? – What is Effective Estimating Software Costs? Weigh in on VisiCalc risks and acquire concise VisiCalc education. – Record-keeping requirements flow from the records needed as inputs, outputs, controls and for transformation of a Estimating Software Costs process. ask yourself: are the records needed as inputs to the Estimating Software Costs process available? – What are the disruptive Estimating Software Costs technologies that enable our organization to radically change our business processes? – Think of your Estimating Software Costs project. what are the main functions? Face Workers comp visions and describe the risks of Workers comp sustainability. – Does Estimating Software Costs analysis isolate the fundamental causes of problems? – What are internal and external Estimating Software Costs relations? – Do we all define Estimating Software Costs in the same way? – Are we paying our workers competitively? http://The Time Duration Calculator will calculate the time that has elapsed/difference between two dates with time. SafeRacks – OVERHEAD CEILING STORAGE RACKS! Share this Rating. Title: The Trench (1999) 6.1 /10. Want to share IMDb’s rating on your own site? Use the HTML below.
import numpy as np from layer_example import Layer class Activation(Layer): def __init__(self, type): if type=='sigmod': self.fun = self.sigmoid self.fun_d = self.sigmoid_d elif type == 'relu': self.fun = self.relu self.fun_d = self.relu_d elif type == 'tanh': self.fun = self.tanh self.fun_d = self.tanh_d else: raise ValueError('Invalid activation function.') def sigmoid(self, x): return 1.0/(1.0+np.exp(-x)) def sigmoid_d(self, x): s = self.sigmoid(x) return s*(1.0-s) def tanh(self, x): return np.tanh(x) def tanh_d(self, x): e = np.exp(2*x) return (e-1)/(e+1) def relu(self, x): return np.maximum(0.0, x) def relu_d(self, x): dx = np.zeros(x.shape) dx[x >= 0] = 1 return dx def fprop(self, input_data): self.last_input_data = input_data return self.fun(input_data) def bprop(self, output_grad): return output_grad * self.fun_d(self.last_input_data) def get_output_shape(self, input_shape): return input_shape
Then she ladled the coffee out and placed it in a bowl. Turning to her daughter, she asked, "Tell me what you see." "Carrots, eggs, and coffee," she replied. May we all be COFFEE!!!!!! !
from __future__ import absolute_import from __future__ import division from __future__ import print_function from builtins import zip from builtins import range from builtins import object from ..ternary import normal from ..misc import * import itertools from mpl_toolkits.mplot3d.art3d import Poly3DCollection from mpl_toolkits.mplot3d.art3d import Line3DCollection import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import proj3d import numpy as np from scipy.spatial import Voronoi, ConvexHull import warnings def to_cartesian(vecmat,points): """Given lattice vectors and points in fractional coordinates, convert the points to Cartesian :vecmat: np array (lattice vectors as columns) :points: np array (vertically stacked coordinates) :returns: np array """ return np.dot(vecmat,points.T).T def to_fractional(vecmat,points): """Given lattice vectors and points in Cartesian coordinates, convert the points to fractional :vecmat: np array (lattice vectors as columns) :points: np array (vertically stacked coordinates) :returns: np array """ return np.dot(np.linalg.inv(vecmat),points.T).T def simplex_bin(hull): """Given a convex hull, check the equations of the hyperplanes and bin them, returning a set of simplex groups with a common equation (i.e. coplanar simplices) :hull: convex hull :returns: array of array of simplex """ equations=np.vstack({tuple(q) for q in hull.equations}) binned=[[] for q in equations] for q,s in zip(hull.equations,hull.simplices): #whichever row is zero has the same equation as the current simplex single_zero_row=equations-q index=np.where((single_zero_row==0).all(axis=1))[0] assert(index.shape==(1,)) index=index[0] binned[index].append(s) return [np.unique(a) for a in binned] def signed_angle_3d(v0,v1,vn): """Get the signed angle for two vectors in 3d space. :v0: np vector :v1: np vector :vn: np vector (normal vector) :returns: float (rad) """ v0n=v0/np.linalg.norm(v0) v1n=v1/np.linalg.norm(v1) #Avoid float point pain with 8 decimal places. Close enough angle=np.arccos(round(np.dot(v0n,v1n),8)) cross=np.cross(v0,v1) if np.dot(vn, cross) < 0: angle=-angle return angle def polygonal_sort(points): """Given a set of points that define a polygon, sort them so that they all go around the center in order. :points: np array :returns: np array """ n=normal(points[0:3]) c=np.sum(points,axis=0)/len(points) ref=points[0]-c angles=np.array([signed_angle_3d(c-p,ref,n) for p in points]) sortinds=np.argsort(angles) return points[sortinds] def polygon_facet_center(points): """Given a set of points that define a polygon, find the center of the polygon :points: np array :returns: np array """ center=np.average(points,axis=0) return center def polygon_edge_centers(points): """Given a set of points that define a polygon, find the centers of the edges. :points: np array :returns: np array """ rolled=np.roll(points,1,axis=0) centers=(rolled+points)/2 return centers def reciprocal_lattice(latmat): """Cross vectors and multiply by 2 pi to get the reciprocal of the given lattice :latmat: np 3x3 (vectors in columns) :returns: np 3x3 """ a,b,c=latmat.T vol=np.dot(a,np.cross(b,c)) astar=2*np.pi*np.cross(b,c)/vol bstar=2*np.pi*np.cross(c,a)/vol cstar=2*np.pi*np.cross(a,b)/vol return np.array([astar,bstar,cstar]).T def wigner_seitz_points(latmat): """Determine the edges of the Wigner Seitz cell, given the lattice. Generates just enough lattice points to generate a single WS cell, then selects points from the only full region. If the reciprocal lattice is given, then the points define the first Brillouin zone. :latmat: 3x3 vectors as columns :returns: np list of points (as rows) """ a,b,c=latmat.T #Range of lattice points that will be enough to enclose the Weigner Seitz cell radpoints=list(range(-1,2)) counterpoints=[(x,y,z) for x in radpoints for y in radpoints for z in radpoints] gridpoints=np.array([x*a+y*b+z*c for x,y,z in counterpoints]) #Construct Voronoi cell vor=Voronoi(gridpoints,furthest_site=False) vorpoints=vor.vertices vorregions=vor.regions #Only one full Voronoi cell should have been constructed goodregions=[x for x in vorregions if len(x) > 0 and x[0] is not -1] if len(goodregions)!=1: warnings.warn("Could not isolate a single Voronoi cell! Results may be wonky.") return vorpoints[goodregions[-1]] def wigner_seitz_facets(latmat): """Returns a list of polygons corresponding to the Weigner Seitz cell :returns: Poly3DCollection """ vorpoints=wigner_seitz_points(latmat) ch=ConvexHull(vorpoints) binned=simplex_bin(ch) polygons=[polygonal_sort(ch.points[b]) for b in binned] return polygons def draw_voronoi_cell(vectormat,ax,alpha): """Plot the Voronoi cell using the given lattice :vectormat: Either the real or reciprocal lattice :ax: matplotlib subplot :returns: ax """ norms=np.linalg.norm(vectormat,axis=0) maxrange=np.amax(norms) polygons=wigner_seitz_facets(vectormat) ax.add_collection(Poly3DCollection(polygons,facecolors='w',linewidth=2,alpha=alpha,zorder=0)) ax.add_collection(Line3DCollection(polygons,colors='k',linewidth=0.8, linestyles=':')) ax.set_xlim([-maxrange,maxrange]) ax.set_ylim([-maxrange,maxrange]) ax.set_zlim([-maxrange,maxrange]) return ax def voronoi_facet_centers(vectormat, fractional=True): """Calculate the centers of facets of either the brillouin zone, or Wigner Seitz cell, depending on the given vectormat :vectormat: Either the real or reciprocal lattice :fractional: bool :returns: np array """ polygons=wigner_seitz_facets(vectormat) centers=np.stack([polygon_facet_center(p) for p in polygons]) if fractional: centers=to_fractional(vectormat,centers) return centers def voronoi_edge_centers(vectormat, fractional=True): """Calculate the centers of the edges of either the brillouin zone, or Wigner Seitz cell, depending on the given vectormat :vectormat: Either the real or reciprocal lattice :fractional: bool :returns: np array """ polygons=wigner_seitz_facets(vectormat) for p in polygons: print(polygon_edge_centers(p)) centers=np.concatenate([polygon_edge_centers(p) for p in polygons],axis=0) if fractional: centers=to_fractional(vectormat,centers) return np.vstack({tuple(row) for row in centers}) def voronoi_vertexes(vectormat, fractional=True): """Get the coordinates of the corners/vertexes of the brillouin zone :vectormat: Either the real or reciprocal lattice :fractional: bool :returns: np array """ polygons=wigner_seitz_facets(vectormat) points=np.concatenate(polygons,axis=0) return np.vstack({tuple(row) for row in points}) class Lattice(object): """Simple class to hold the lattice vectors of a lattice, with a few routines to do things in reciprocal space""" def __init__(self, a, b, c): """Define the lattice with three lattice vectors, stored vertically in a matrix :a: 3x1 :b: 3x1 :c: 3x1 """ self._latmat=np.array([a,b,c]).T self._recipmat=reciprocal_lattice(self._latmat) def real_to_cartesian(self, points): """Convert a list of fractional coordinates into Cartesian for the real lattice :points: np array (vertically stacked coordinates) :returns: np array """ return to_cartesian(self._latmat,points) def real_to_fractional(self, points): """Convert a list of Cartesian coordinates into fractional for the real lattice :points: np array (vertically stacked coordinates) :returns: np array """ return to_fractional(self._latmat,points) def reciprocal_to_cartesian(self, points): """Convert a list of fractional coordinates into Cartesian for the reciprocal lattice :points: np array (vertically stacked coordinates) :returns: np array """ return to_cartesian(self._recipmat,points) def reciprocal_to_fractional(self, points): """Convert a list of Cartesian coordinates into fractional for the reciprocal lattice :points: np array (vertically stacked coordinates) :returns: np array """ return to_fractional(self._recipmat,points) def draw_wigner_seitz_cell(self,ax,alpha=1): """Plot the Wigner Seitz cell of the lattice (Voronoi of real lattice) :ax: matplotlib subplot :returns: ax """ return self._draw_voronoi_cell(self._latmat,ax,alpha) def draw_brillouin_zone(self,ax,alpha=1): """Plot the first Brillouin zone in reciprocal space (Voronoi of reciprocal lattice) :ax: matplotlib subplot :returns: ax """ return draw_voronoi_cell(self._recipmat,ax,alpha) def brillouin_facet_centers(self,fractional=True): """Calculate the center of all facets of the brillouin zone :returns: np array """ return voronoi_facet_centers(self._recipmat,fractional) def brillouin_edge_centers(self,fractional=True): """Calculate the center of all facets of the brillouin zone :returns: np array """ return voronoi_edge_centers(self._recipmat,fractional) def brillouin_vertexes(self,fractional=True): """Get the coordinates of the vertexes of the brillouin zone :returns: np array """ return voronoi_vertexes(self._recipmat,fractional) def draw_real_vectors(self, ax): """Draw the real lattice vectors :ax: matplotlib subplot :returns: ax """ for v,color in zip(self._latmat.T,['r','g','b']): arr=Arrow3D([0,v[0]],[0,v[1]],[0,v[2]],lw=3,arrowstyle="-|>",mutation_scale=20,color=color,linestyle="-") ax.add_artist(arr) return ax def draw_reciprocal_vectors(self, ax): """Draw the reciprocal lattice vectors :ax: matplotlib subplot :returns: ax """ for v,color in zip(self._recipmat.T,['r','g','b']): arr=Arrow3D([0,v[0]],[0,v[1]],[0,v[2]],lw=3,arrowstyle="-|>",mutation_scale=20,color=color,linestyle="--") ax.add_artist(arr) return ax def angles(self, rad=True, reciprocal=False): """Return the value of alpha, beta and gamma, i.e. the angles between the lattice vectors. :returns: (float,float,float) """ if not reciprocal: a,b,c=self._latmat.T else: a,b,c=self._recipmat.T alpha=angle_between(b,c) beta=angle_between(c,a) gamma=angle_between(a,b) if not rad: alpha=alpha*180/np.pi beta=beta*180/np.pi gamma=gamma*180/np.pi return alpha,beta,gamma def lengths(self,reciprocal=False): """Return the length of each lattice vector :returns: TODO """ if not reciprocal: a,b,c=self._latmat.T else: a,b,c=self._recipmat.T al=np.linalg.norm(a) bl=np.linalg.norm(b) cl=np.linalg.norm(c) return al,bl,cl def column_lattice(self): """Return the lattice as column vectors in a matrix Returns ------- np.array 3x3 """ return self._latmat def row_lattice(self): """Return the lattice as row vectors in a matrix Returns ------- np.array 3x3 """ return self._latmat.T if __name__ == "__main__": main()
Definition: From an accounting perspective, total costs refer to the sum of fixed costs and variable costs. If there are any semi-variable costs, these would have to be added as well to arrive at the figure of total costs. When calculating total investment costs, you have to add the cost of the investment itself and include any costs associated with the investment. All fees, commission, and other transaction costs would also have to be taken into consideration. What does Total Cost mean? Variable costs rise with an increase in the level of production. If a greater volume of goods is to be manufactured, the requirement for raw materials will go up. It will also be necessary to purchase more production supplies. Variable costs could also take the form of packaging materials. The company may also have to increase the number of workers who are employed on a piece rate basis. A company’s fixed costs include the depreciation charged on machinery, insurance expenses, the interest that the company pays for the loans that it has taken, and property taxes. These remain constant regardless of the level of production. Goodearth sells the boxes that it manufactures at a markup of 25%. Calculating its total costs allows the company to arrive at its selling price of $12.50 per box. ($10 + 25% of $10). Total costs include all the costs associated with producing a good.
# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from itertools import chain # TODO(paul): I can't believe Python doesn't have one of these def map_concat(func, items): # flatten a list-of-lists return list(chain.from_iterable(map(func, items))) class BaseMetric(object): def __init__(self, name, labels=[]): self.name = name self.labels = labels # OK not to clone as we never write it def dimension(self): return len(self.labels) def is_scalar(self): return not len(self.labels) def _render_labelvalue(self, value): # TODO: some kind of value escape return '"%s"' % (value) def _render_key(self, values): if self.is_scalar(): return "" return "{%s}" % ( ",".join(["%s=%s" % (k, self._render_labelvalue(v)) for k, v in zip(self.labels, values)]) ) class CounterMetric(BaseMetric): """The simplest kind of metric; one that stores a monotonically-increasing integer that counts events.""" def __init__(self, *args, **kwargs): super(CounterMetric, self).__init__(*args, **kwargs) self.counts = {} # Scalar metrics are never empty if self.is_scalar(): self.counts[()] = 0 def inc_by(self, incr, *values): if len(values) != self.dimension(): raise ValueError( "Expected as many values to inc() as labels (%d)" % (self.dimension()) ) # TODO: should assert that the tag values are all strings if values not in self.counts: self.counts[values] = incr else: self.counts[values] += incr def inc(self, *values): self.inc_by(1, *values) def render_item(self, k): return ["%s%s %d" % (self.name, self._render_key(k), self.counts[k])] def render(self): return map_concat(self.render_item, sorted(self.counts.keys())) class CallbackMetric(BaseMetric): """A metric that returns the numeric value returned by a callback whenever it is rendered. Typically this is used to implement gauges that yield the size or other state of some in-memory object by actively querying it.""" def __init__(self, name, callback, labels=[]): super(CallbackMetric, self).__init__(name, labels=labels) self.callback = callback def render(self): value = self.callback() if self.is_scalar(): return ["%s %.12g" % (self.name, value)] return ["%s%s %.12g" % (self.name, self._render_key(k), value[k]) for k in sorted(value.keys())] class DistributionMetric(object): """A combination of an event counter and an accumulator, which counts both the number of events and accumulates the total value. Typically this could be used to keep track of method-running times, or other distributions of values that occur in discrete occurances. TODO(paul): Try to export some heatmap-style stats? """ def __init__(self, name, *args, **kwargs): self.counts = CounterMetric(name + ":count", **kwargs) self.totals = CounterMetric(name + ":total", **kwargs) def inc_by(self, inc, *values): self.counts.inc(*values) self.totals.inc_by(inc, *values) def render(self): return self.counts.render() + self.totals.render() class CacheMetric(object): __slots__ = ("name", "cache_name", "hits", "misses", "size_callback") def __init__(self, name, size_callback, cache_name): self.name = name self.cache_name = cache_name self.hits = 0 self.misses = 0 self.size_callback = size_callback def inc_hits(self): self.hits += 1 def inc_misses(self): self.misses += 1 def render(self): size = self.size_callback() hits = self.hits total = self.misses + self.hits return [ """%s:hits{name="%s"} %d""" % (self.name, self.cache_name, hits), """%s:total{name="%s"} %d""" % (self.name, self.cache_name, total), """%s:size{name="%s"} %d""" % (self.name, self.cache_name, size), ] class MemoryUsageMetric(object): """Keeps track of the current memory usage, using psutil. The class will keep the current min/max/sum/counts of rss over the last WINDOW_SIZE_SEC, by polling UPDATE_HZ times per second """ UPDATE_HZ = 2 # number of times to get memory per second WINDOW_SIZE_SEC = 30 # the size of the window in seconds def __init__(self, hs, psutil): clock = hs.get_clock() self.memory_snapshots = [] self.process = psutil.Process() clock.looping_call(self._update_curr_values, 1000 / self.UPDATE_HZ) def _update_curr_values(self): max_size = self.UPDATE_HZ * self.WINDOW_SIZE_SEC self.memory_snapshots.append(self.process.memory_info().rss) self.memory_snapshots[:] = self.memory_snapshots[-max_size:] def render(self): if not self.memory_snapshots: return [] max_rss = max(self.memory_snapshots) min_rss = min(self.memory_snapshots) sum_rss = sum(self.memory_snapshots) len_rss = len(self.memory_snapshots) return [ "process_psutil_rss:max %d" % max_rss, "process_psutil_rss:min %d" % min_rss, "process_psutil_rss:total %d" % sum_rss, "process_psutil_rss:count %d" % len_rss, ]
Esquire Group is a leading strata services group with offices across Australia. As a group, we’re proud to operate under our banner of ‘supporting your world’. This means we’re dedicated to helping you manage all of the social, political, financial and functional needs of your living arrangement. We’ll develop a strong and respectful partnership with your executive committee and also help you to build a real sense of community spirit and belonging from the moment you set foot inside your property. As strata professionals we love our job, we’re passionate about making strata work and we’re personally committed to making your lifestyle experience of strata living as rewarding and enjoyable as it can possibly be.
# AUTO GENERATED. DO NOT CHANGE! from ctypes import * import numpy as np class MJCONTACT(Structure): _fields_ = [ ("dist", c_double), ("pos", c_double * 3), ("frame", c_double * 9), ("includemargin", c_double), ("friction", c_double * 5), ("solref", c_double * 2), ("solimp", c_double * 3), ("mu", c_double), ("coef", c_double * 5), ("zone", c_int), ("dim", c_int), ("geom1", c_int), ("geom2", c_int), ("exclude", c_int), ("efc_address", c_int), ] class MJRRECT(Structure): _fields_ = [ ("left", c_int), ("bottom", c_int), ("width", c_int), ("height", c_int), ] class MJVGEOM(Structure): _fields_ = [ ("type", c_int), ("dataid", c_int), ("objtype", c_int), ("objid", c_int), ("category", c_int), ("texid", c_int), ("texuniform", c_int), ("texrepeat", c_float * 2), ("size", c_float * 3), ("pos", c_float * 3), ("mat", c_float * 9), ("rgba", c_float * 4), ("emission", c_float), ("specular", c_float), ("shininess", c_float), ("reflectance", c_float), ("label", c_char * 100), ("camdist", c_float), ("rbound", c_float), ("transparent", c_ubyte), ] class MJVSCENE(Structure): _fields_ = [ ("maxgeom", c_int), ("ngeom", c_int), ("geoms", POINTER(MJVGEOM)), ("geomorder", POINTER(c_int)), ("nlight", c_int), ("lights", MJVLIGHT * 8), ("camera", MJVGLCAMERA * 2), ("enabletransform", c_ubyte), ("translate", c_float * 3), ("rotate", c_float * 4), ("scale", c_float), ("stereo", c_int), ("flags", c_ubyte * 5), ] class MJVPERTURB(Structure): _fields_ = [ ("select", c_int), ("active", c_int), ("refpos", c_double * 3), ("refquat", c_double * 4), ("localpos", c_double * 3), ("scale", c_double), ] class MJRCONTEXT(Structure): _fields_ = [ ("lineWidth", c_float), ("shadowClip", c_float), ("shadowScale", c_float), ("shadowSize", c_int), ("offWidth", c_int), ("offHeight", c_int), ("offSamples", c_int), ("offFBO", c_uint), ("offFBO_r", c_uint), ("offColor", c_uint), ("offColor_r", c_uint), ("offDepthStencil", c_uint), ("offDepthStencil_r", c_uint), ("shadowFBO", c_uint), ("shadowTex", c_uint), ("ntexture", c_int), ("textureType", c_int * 100), ("texture", c_int * 100), ("basePlane", c_uint), ("baseMesh", c_uint), ("baseHField", c_uint), ("baseBuiltin", c_uint), ("baseFontNormal", c_uint), ("baseFontShadow", c_uint), ("baseFontBig", c_uint), ("rangePlane", c_int), ("rangeMesh", c_int), ("rangeHField", c_int), ("rangeBuiltin", c_int), ("rangeFont", c_int), ("charWidth", c_int * 127), ("charWidthBig", c_int * 127), ("charHeight", c_int), ("charHeightBig", c_int), ("glewInitialized", c_int), ("windowAvailable", c_int), ("windowSamples", c_int), ("windowStereo", c_int), ("windowDoublebuffer", c_int), ("currentBuffer", c_int), ] class MJVCAMERA(Structure): _fields_ = [ ("type", c_int), ("fixedcamid", c_int), ("trackbodyid", c_int), ("lookat", c_double * 3), ("distance", c_double), ("azimuth", c_double), ("elevation", c_double), ] class MJVOPTION(Structure): _fields_ = [ ("label", c_int), ("frame", c_int), ("geomgroup", c_ubyte * 5), ("sitegroup", c_ubyte * 5), ("flags", c_ubyte * 18), ] class MJVGEOM(Structure): _fields_ = [ ("type", c_int), ("dataid", c_int), ("objtype", c_int), ("objid", c_int), ("category", c_int), ("texid", c_int), ("texuniform", c_int), ("texrepeat", c_float * 2), ("size", c_float * 3), ("pos", c_float * 3), ("mat", c_float * 9), ("rgba", c_float * 4), ("emission", c_float), ("specular", c_float), ("shininess", c_float), ("reflectance", c_float), ("label", c_char * 100), ("camdist", c_float), ("rbound", c_float), ("transparent", c_ubyte), ] class MJVLIGHT(Structure): _fields_ = [ ("pos", c_float * 3), ("dir", c_float * 3), ("attenuation", c_float * 3), ("cutoff", c_float), ("exponent", c_float), ("ambient", c_float * 3), ("diffuse", c_float * 3), ("specular", c_float * 3), ("headlight", c_ubyte), ("directional", c_ubyte), ("castshadow", c_ubyte), ] class MJOPTION(Structure): _fields_ = [ ("timestep", c_double), ("apirate", c_double), ("tolerance", c_double), ("impratio", c_double), ("gravity", c_double * 3), ("wind", c_double * 3), ("magnetic", c_double * 3), ("density", c_double), ("viscosity", c_double), ("o_margin", c_double), ("o_solref", c_double * 2), ("o_solimp", c_double * 3), ("mpr_tolerance", c_double), ("mpr_iterations", c_int), ("integrator", c_int), ("collision", c_int), ("impedance", c_int), ("reference", c_int), ("solver", c_int), ("iterations", c_int), ("disableflags", c_int), ("enableflags", c_int), ] class MJVISUAL(Structure): class ANON_GLOBAL(Structure): _fields_ = [ ("fovy", c_float), ("ipd", c_float), ("linewidth", c_float), ("glow", c_float), ("offwidth", c_int), ("offheight", c_int), ] class ANON_QUALITY(Structure): _fields_ = [ ("shadowsize", c_int), ("offsamples", c_int), ("numslices", c_int), ("numstacks", c_int), ("numarrows", c_int), ("numquads", c_int), ] class ANON_HEADLIGHT(Structure): _fields_ = [ ("ambient", c_float * 3), ("diffuse", c_float * 3), ("specular", c_float * 3), ("active", c_int), ] class ANON_MAP(Structure): _fields_ = [ ("stiffness", c_float), ("stiffnessrot", c_float), ("force", c_float), ("torque", c_float), ("alpha", c_float), ("fogstart", c_float), ("fogend", c_float), ("znear", c_float), ("zfar", c_float), ("shadowclip", c_float), ("shadowscale", c_float), ] class ANON_SCALE(Structure): _fields_ = [ ("forcewidth", c_float), ("contactwidth", c_float), ("contactheight", c_float), ("connect", c_float), ("com", c_float), ("camera", c_float), ("light", c_float), ("selectpoint", c_float), ("jointlength", c_float), ("jointwidth", c_float), ("actuatorlength", c_float), ("actuatorwidth", c_float), ("framelength", c_float), ("framewidth", c_float), ("constraint", c_float), ("slidercrank", c_float), ] class ANON_RGBA(Structure): _fields_ = [ ("fog", c_float * 4), ("force", c_float * 4), ("inertia", c_float * 4), ("joint", c_float * 4), ("actuator", c_float * 4), ("com", c_float * 4), ("camera", c_float * 4), ("light", c_float * 4), ("selectpoint", c_float * 4), ("connect", c_float * 4), ("contactpoint", c_float * 4), ("contactforce", c_float * 4), ("contactfriction", c_float * 4), ("contacttorque", c_float * 4), ("constraint", c_float * 4), ("slidercrank", c_float * 4), ("crankbroken", c_float * 4), ] _fields_ = [ ("global_", ANON_GLOBAL), ("quality", ANON_QUALITY), ("headlight", ANON_HEADLIGHT), ("map_", ANON_MAP), ("scale", ANON_SCALE), ("rgba", ANON_RGBA), ] class MJSTATISTIC(Structure): _fields_ = [ ("meanmass", c_double), ("meansize", c_double), ("extent", c_double), ("center", c_double * 3), ] class MJDATA(Structure): _fields_ = [ ("nstack", c_int), ("nbuffer", c_int), ("pstack", c_int), ("maxuse_stack", c_int), ("maxuse_con", c_int), ("maxuse_efc", c_int), ("nwarning", c_int * 8), ("warning_info", c_int * 8), ("timer_ncall", c_int * 13), ("timer_duration", c_double * 13), ("solver_iter", c_int), ("solver_trace", c_double * 200), ("solver_fwdinv", c_double * 2), ("ne", c_int), ("nf", c_int), ("nefc", c_int), ("ncon", c_int), ("time", c_double), ("energy", c_double * 2), ("buffer", POINTER(c_ubyte)), ("stack", POINTER(c_double)), ("qpos", POINTER(c_double)), ("qvel", POINTER(c_double)), ("act", POINTER(c_double)), ("ctrl", POINTER(c_double)), ("qfrc_applied", POINTER(c_double)), ("xfrc_applied", POINTER(c_double)), ("qacc", POINTER(c_double)), ("act_dot", POINTER(c_double)), ("mocap_pos", POINTER(c_double)), ("mocap_quat", POINTER(c_double)), ("userdata", POINTER(c_double)), ("sensordata", POINTER(c_double)), ("xpos", POINTER(c_double)), ("xquat", POINTER(c_double)), ("xmat", POINTER(c_double)), ("xipos", POINTER(c_double)), ("ximat", POINTER(c_double)), ("xanchor", POINTER(c_double)), ("xaxis", POINTER(c_double)), ("geom_xpos", POINTER(c_double)), ("geom_xmat", POINTER(c_double)), ("site_xpos", POINTER(c_double)), ("site_xmat", POINTER(c_double)), ("cam_xpos", POINTER(c_double)), ("cam_xmat", POINTER(c_double)), ("light_xpos", POINTER(c_double)), ("light_xdir", POINTER(c_double)), ("subtree_com", POINTER(c_double)), ("cdof", POINTER(c_double)), ("cinert", POINTER(c_double)), ("ten_wrapadr", POINTER(c_int)), ("ten_wrapnum", POINTER(c_int)), ("ten_length", POINTER(c_double)), ("ten_moment", POINTER(c_double)), ("wrap_obj", POINTER(c_int)), ("wrap_xpos", POINTER(c_double)), ("actuator_length", POINTER(c_double)), ("actuator_moment", POINTER(c_double)), ("crb", POINTER(c_double)), ("qM", POINTER(c_double)), ("qLD", POINTER(c_double)), ("qLDiagInv", POINTER(c_double)), ("qLDiagSqrtInv", POINTER(c_double)), ("contact", POINTER(MJCONTACT)), ("efc_type", POINTER(c_int)), ("efc_id", POINTER(c_int)), ("efc_rownnz", POINTER(c_int)), ("efc_rowadr", POINTER(c_int)), ("efc_colind", POINTER(c_int)), ("efc_rownnz_T", POINTER(c_int)), ("efc_rowadr_T", POINTER(c_int)), ("efc_colind_T", POINTER(c_int)), ("efc_solref", POINTER(c_double)), ("efc_solimp", POINTER(c_double)), ("efc_margin", POINTER(c_double)), ("efc_frictionloss", POINTER(c_double)), ("efc_pos", POINTER(c_double)), ("efc_J", POINTER(c_double)), ("efc_J_T", POINTER(c_double)), ("efc_diagApprox", POINTER(c_double)), ("efc_D", POINTER(c_double)), ("efc_R", POINTER(c_double)), ("efc_AR", POINTER(c_double)), ("e_ARchol", POINTER(c_double)), ("fc_e_rect", POINTER(c_double)), ("fc_AR", POINTER(c_double)), ("ten_velocity", POINTER(c_double)), ("actuator_velocity", POINTER(c_double)), ("cvel", POINTER(c_double)), ("cdof_dot", POINTER(c_double)), ("qfrc_bias", POINTER(c_double)), ("qfrc_passive", POINTER(c_double)), ("efc_vel", POINTER(c_double)), ("efc_aref", POINTER(c_double)), ("subtree_linvel", POINTER(c_double)), ("subtree_angmom", POINTER(c_double)), ("actuator_force", POINTER(c_double)), ("qfrc_actuator", POINTER(c_double)), ("qfrc_unc", POINTER(c_double)), ("qacc_unc", POINTER(c_double)), ("efc_b", POINTER(c_double)), ("fc_b", POINTER(c_double)), ("efc_force", POINTER(c_double)), ("qfrc_constraint", POINTER(c_double)), ("qfrc_inverse", POINTER(c_double)), ("cacc", POINTER(c_double)), ("cfrc_int", POINTER(c_double)), ("cfrc_ext", POINTER(c_double)), ] class MJMODEL(Structure): _fields_ = [ ("nq", c_int), ("nv", c_int), ("nu", c_int), ("na", c_int), ("nbody", c_int), ("njnt", c_int), ("ngeom", c_int), ("nsite", c_int), ("ncam", c_int), ("nlight", c_int), ("nmesh", c_int), ("nmeshvert", c_int), ("nmeshface", c_int), ("nmeshgraph", c_int), ("nhfield", c_int), ("nhfielddata", c_int), ("ntex", c_int), ("ntexdata", c_int), ("nmat", c_int), ("npair", c_int), ("nexclude", c_int), ("neq", c_int), ("ntendon", c_int), ("nwrap", c_int), ("nsensor", c_int), ("nnumeric", c_int), ("nnumericdata", c_int), ("ntext", c_int), ("ntextdata", c_int), ("ntuple", c_int), ("ntupledata", c_int), ("nkey", c_int), ("nuser_body", c_int), ("nuser_jnt", c_int), ("nuser_geom", c_int), ("nuser_site", c_int), ("nuser_tendon", c_int), ("nuser_actuator", c_int), ("nuser_sensor", c_int), ("nnames", c_int), ("nM", c_int), ("nemax", c_int), ("njmax", c_int), ("nconmax", c_int), ("nstack", c_int), ("nuserdata", c_int), ("nmocap", c_int), ("nsensordata", c_int), ("nbuffer", c_int), ("opt", MJOPTION), ("vis", MJVISUAL), ("stat", MJSTATISTIC), ("buffer", POINTER(c_ubyte)), ("qpos0", POINTER(c_double)), ("qpos_spring", POINTER(c_double)), ("body_parentid", POINTER(c_int)), ("body_rootid", POINTER(c_int)), ("body_weldid", POINTER(c_int)), ("body_mocapid", POINTER(c_int)), ("body_jntnum", POINTER(c_int)), ("body_jntadr", POINTER(c_int)), ("body_dofnum", POINTER(c_int)), ("body_dofadr", POINTER(c_int)), ("body_geomnum", POINTER(c_int)), ("body_geomadr", POINTER(c_int)), ("body_pos", POINTER(c_double)), ("body_quat", POINTER(c_double)), ("body_ipos", POINTER(c_double)), ("body_iquat", POINTER(c_double)), ("body_mass", POINTER(c_double)), ("body_subtreemass", POINTER(c_double)), ("body_inertia", POINTER(c_double)), ("body_invweight0", POINTER(c_double)), ("body_user", POINTER(c_double)), ("jnt_type", POINTER(c_int)), ("jnt_qposadr", POINTER(c_int)), ("jnt_dofadr", POINTER(c_int)), ("jnt_bodyid", POINTER(c_int)), ("jnt_limited", POINTER(c_ubyte)), ("jnt_solref", POINTER(c_double)), ("jnt_solimp", POINTER(c_double)), ("jnt_pos", POINTER(c_double)), ("jnt_axis", POINTER(c_double)), ("jnt_stiffness", POINTER(c_double)), ("jnt_range", POINTER(c_double)), ("jnt_margin", POINTER(c_double)), ("jnt_user", POINTER(c_double)), ("dof_bodyid", POINTER(c_int)), ("dof_jntid", POINTER(c_int)), ("dof_parentid", POINTER(c_int)), ("dof_Madr", POINTER(c_int)), ("dof_frictional", POINTER(c_ubyte)), ("dof_solref", POINTER(c_double)), ("dof_solimp", POINTER(c_double)), ("dof_frictionloss", POINTER(c_double)), ("dof_armature", POINTER(c_double)), ("dof_damping", POINTER(c_double)), ("dof_invweight0", POINTER(c_double)), ("geom_type", POINTER(c_int)), ("geom_contype", POINTER(c_int)), ("geom_conaffinity", POINTER(c_int)), ("geom_condim", POINTER(c_int)), ("geom_bodyid", POINTER(c_int)), ("geom_dataid", POINTER(c_int)), ("geom_matid", POINTER(c_int)), ("geom_group", POINTER(c_int)), ("geom_solmix", POINTER(c_double)), ("geom_solref", POINTER(c_double)), ("geom_solimp", POINTER(c_double)), ("geom_size", POINTER(c_double)), ("geom_rbound", POINTER(c_double)), ("geom_pos", POINTER(c_double)), ("geom_quat", POINTER(c_double)), ("geom_friction", POINTER(c_double)), ("geom_margin", POINTER(c_double)), ("geom_gap", POINTER(c_double)), ("geom_user", POINTER(c_double)), ("geom_rgba", POINTER(c_float)), ("site_type", POINTER(c_int)), ("site_bodyid", POINTER(c_int)), ("site_matid", POINTER(c_int)), ("site_group", POINTER(c_int)), ("site_size", POINTER(c_double)), ("site_pos", POINTER(c_double)), ("site_quat", POINTER(c_double)), ("site_user", POINTER(c_double)), ("site_rgba", POINTER(c_float)), ("cam_mode", POINTER(c_int)), ("cam_bodyid", POINTER(c_int)), ("cam_targetbodyid", POINTER(c_int)), ("cam_pos", POINTER(c_double)), ("cam_quat", POINTER(c_double)), ("cam_poscom0", POINTER(c_double)), ("cam_pos0", POINTER(c_double)), ("cam_mat0", POINTER(c_double)), ("cam_fovy", POINTER(c_double)), ("cam_ipd", POINTER(c_double)), ("light_mode", POINTER(c_int)), ("light_bodyid", POINTER(c_int)), ("light_targetbodyid", POINTER(c_int)), ("light_directional", POINTER(c_ubyte)), ("light_castshadow", POINTER(c_ubyte)), ("light_active", POINTER(c_ubyte)), ("light_pos", POINTER(c_double)), ("light_dir", POINTER(c_double)), ("light_poscom0", POINTER(c_double)), ("light_pos0", POINTER(c_double)), ("light_dir0", POINTER(c_double)), ("light_attenuation", POINTER(c_float)), ("light_cutoff", POINTER(c_float)), ("light_exponent", POINTER(c_float)), ("light_ambient", POINTER(c_float)), ("light_diffuse", POINTER(c_float)), ("light_specular", POINTER(c_float)), ("mesh_faceadr", POINTER(c_int)), ("mesh_facenum", POINTER(c_int)), ("mesh_vertadr", POINTER(c_int)), ("mesh_vertnum", POINTER(c_int)), ("mesh_graphadr", POINTER(c_int)), ("mesh_vert", POINTER(c_float)), ("mesh_normal", POINTER(c_float)), ("mesh_face", POINTER(c_int)), ("mesh_graph", POINTER(c_int)), ("hfield_size", POINTER(c_double)), ("hfield_nrow", POINTER(c_int)), ("hfield_ncol", POINTER(c_int)), ("hfield_adr", POINTER(c_int)), ("hfield_data", POINTER(c_float)), ("tex_type", POINTER(c_int)), ("tex_height", POINTER(c_int)), ("tex_width", POINTER(c_int)), ("tex_adr", POINTER(c_int)), ("tex_rgb", POINTER(c_ubyte)), ("mat_texid", POINTER(c_int)), ("mat_texuniform", POINTER(c_ubyte)), ("mat_texrepeat", POINTER(c_float)), ("mat_emission", POINTER(c_float)), ("mat_specular", POINTER(c_float)), ("mat_shininess", POINTER(c_float)), ("mat_reflectance", POINTER(c_float)), ("mat_rgba", POINTER(c_float)), ("pair_dim", POINTER(c_int)), ("pair_geom1", POINTER(c_int)), ("pair_geom2", POINTER(c_int)), ("pair_signature", POINTER(c_int)), ("pair_solref", POINTER(c_double)), ("pair_solimp", POINTER(c_double)), ("pair_margin", POINTER(c_double)), ("pair_gap", POINTER(c_double)), ("pair_friction", POINTER(c_double)), ("exclude_signature", POINTER(c_int)), ("eq_type", POINTER(c_int)), ("eq_obj1id", POINTER(c_int)), ("eq_obj2id", POINTER(c_int)), ("eq_active", POINTER(c_ubyte)), ("eq_solref", POINTER(c_double)), ("eq_solimp", POINTER(c_double)), ("eq_data", POINTER(c_double)), ("tendon_adr", POINTER(c_int)), ("tendon_num", POINTER(c_int)), ("tendon_matid", POINTER(c_int)), ("tendon_limited", POINTER(c_ubyte)), ("tendon_frictional", POINTER(c_ubyte)), ("tendon_width", POINTER(c_double)), ("tendon_solref_lim", POINTER(c_double)), ("tendon_solimp_lim", POINTER(c_double)), ("tendon_solref_fri", POINTER(c_double)), ("tendon_solimp_fri", POINTER(c_double)), ("tendon_range", POINTER(c_double)), ("tendon_margin", POINTER(c_double)), ("tendon_stiffness", POINTER(c_double)), ("tendon_damping", POINTER(c_double)), ("tendon_frictionloss", POINTER(c_double)), ("tendon_lengthspring", POINTER(c_double)), ("tendon_length0", POINTER(c_double)), ("tendon_invweight0", POINTER(c_double)), ("tendon_user", POINTER(c_double)), ("tendon_rgba", POINTER(c_float)), ("wrap_type", POINTER(c_int)), ("wrap_objid", POINTER(c_int)), ("wrap_prm", POINTER(c_double)), ("actuator_trntype", POINTER(c_int)), ("actuator_dyntype", POINTER(c_int)), ("actuator_gaintype", POINTER(c_int)), ("actuator_biastype", POINTER(c_int)), ("actuator_trnid", POINTER(c_int)), ("actuator_ctrllimited", POINTER(c_ubyte)), ("actuator_forcelimited", POINTER(c_ubyte)), ("actuator_dynprm", POINTER(c_double)), ("actuator_gainprm", POINTER(c_double)), ("actuator_biasprm", POINTER(c_double)), ("actuator_ctrlrange", POINTER(c_double)), ("actuator_forcerange", POINTER(c_double)), ("actuator_gear", POINTER(c_double)), ("actuator_cranklength", POINTER(c_double)), ("actuator_invweight0", POINTER(c_double)), ("actuator_length0", POINTER(c_double)), ("actuator_lengthrange", POINTER(c_double)), ("actuator_user", POINTER(c_double)), ("sensor_type", POINTER(c_int)), ("sensor_datatype", POINTER(c_int)), ("sensor_needstage", POINTER(c_int)), ("sensor_objtype", POINTER(c_int)), ("sensor_objid", POINTER(c_int)), ("sensor_dim", POINTER(c_int)), ("sensor_adr", POINTER(c_int)), ("sensor_noise", POINTER(c_double)), ("sensor_user", POINTER(c_double)), ("numeric_adr", POINTER(c_int)), ("numeric_size", POINTER(c_int)), ("numeric_data", POINTER(c_double)), ("text_adr", POINTER(c_int)), ("text_size", POINTER(c_int)), ("text_data", POINTER(c_char)), ("tuple_adr", POINTER(c_int)), ("tuple_size", POINTER(c_int)), ("tuple_objtype", POINTER(c_int)), ("tuple_objid", POINTER(c_int)), ("tuple_objprm", POINTER(c_double)), ("key_time", POINTER(c_double)), ("key_qpos", POINTER(c_double)), ("key_qvel", POINTER(c_double)), ("key_act", POINTER(c_double)), ("name_bodyadr", POINTER(c_int)), ("name_jntadr", POINTER(c_int)), ("name_geomadr", POINTER(c_int)), ("name_siteadr", POINTER(c_int)), ("name_camadr", POINTER(c_int)), ("name_lightadr", POINTER(c_int)), ("name_meshadr", POINTER(c_int)), ("name_hfieldadr", POINTER(c_int)), ("name_texadr", POINTER(c_int)), ("name_matadr", POINTER(c_int)), ("name_eqadr", POINTER(c_int)), ("name_tendonadr", POINTER(c_int)), ("name_actuatoradr", POINTER(c_int)), ("name_sensoradr", POINTER(c_int)), ("name_numericadr", POINTER(c_int)), ("name_textadr", POINTER(c_int)), ("name_tupleadr", POINTER(c_int)), ("names", POINTER(c_char)), ] class MjContactWrapper(object): def __init__(self, wrapped, size_src=None): self._wrapped = wrapped self._size_src = size_src @property def ptr(self): return self._wrapped @property def obj(self): return self._wrapped.contents @property def dist(self): return self._wrapped.contents.dist @dist.setter def dist(self, value): self._wrapped.contents.dist = value @property def pos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.pos, dtype=np.double, count=(3)), (3, )) arr.setflags(write=False) return arr @pos.setter def pos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.pos, val_ptr, 3 * sizeof(c_double)) @property def frame(self): arr = np.reshape(np.fromiter(self._wrapped.contents.frame, dtype=np.double, count=(9)), (9, )) arr.setflags(write=False) return arr @frame.setter def frame(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.frame, val_ptr, 9 * sizeof(c_double)) @property def includemargin(self): return self._wrapped.contents.includemargin @includemargin.setter def includemargin(self, value): self._wrapped.contents.includemargin = value @property def friction(self): arr = np.reshape(np.fromiter(self._wrapped.contents.friction, dtype=np.double, count=(5)), (5, )) arr.setflags(write=False) return arr @friction.setter def friction(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.friction, val_ptr, 5 * sizeof(c_double)) @property def solref(self): arr = np.reshape(np.fromiter(self._wrapped.contents.solref, dtype=np.double, count=(2)), (2, )) arr.setflags(write=False) return arr @solref.setter def solref(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.solref, val_ptr, 2 * sizeof(c_double)) @property def solimp(self): arr = np.reshape(np.fromiter(self._wrapped.contents.solimp, dtype=np.double, count=(3)), (3, )) arr.setflags(write=False) return arr @solimp.setter def solimp(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.solimp, val_ptr, 3 * sizeof(c_double)) @property def mu(self): return self._wrapped.contents.mu @mu.setter def mu(self, value): self._wrapped.contents.mu = value @property def coef(self): arr = np.reshape(np.fromiter(self._wrapped.contents.coef, dtype=np.double, count=(5)), (5, )) arr.setflags(write=False) return arr @coef.setter def coef(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.coef, val_ptr, 5 * sizeof(c_double)) @property def zone(self): return self._wrapped.contents.zone @zone.setter def zone(self, value): self._wrapped.contents.zone = value @property def dim(self): return self._wrapped.contents.dim @dim.setter def dim(self, value): self._wrapped.contents.dim = value @property def geom1(self): return self._wrapped.contents.geom1 @geom1.setter def geom1(self, value): self._wrapped.contents.geom1 = value @property def geom2(self): return self._wrapped.contents.geom2 @geom2.setter def geom2(self, value): self._wrapped.contents.geom2 = value @property def exclude(self): return self._wrapped.contents.exclude @exclude.setter def exclude(self, value): self._wrapped.contents.exclude = value @property def efc_address(self): return self._wrapped.contents.efc_address @efc_address.setter def efc_address(self, value): self._wrapped.contents.efc_address = value class MjrRectWrapper(object): def __init__(self, wrapped, size_src=None): self._wrapped = wrapped self._size_src = size_src @property def ptr(self): return self._wrapped @property def obj(self): return self._wrapped.contents @property def left(self): return self._wrapped.contents.left @left.setter def left(self, value): self._wrapped.contents.left = value @property def bottom(self): return self._wrapped.contents.bottom @bottom.setter def bottom(self, value): self._wrapped.contents.bottom = value @property def width(self): return self._wrapped.contents.width @width.setter def width(self, value): self._wrapped.contents.width = value @property def height(self): return self._wrapped.contents.height @height.setter def height(self, value): self._wrapped.contents.height = value class MjvGeomWrapper(object): def __init__(self, wrapped, size_src=None): self._wrapped = wrapped self._size_src = size_src @property def ptr(self): return self._wrapped @property def obj(self): return self._wrapped.contents @property def type(self): return self._wrapped.contents.type @type.setter def type(self, value): self._wrapped.contents.type = value @property def dataid(self): return self._wrapped.contents.dataid @dataid.setter def dataid(self, value): self._wrapped.contents.dataid = value @property def objtype(self): return self._wrapped.contents.objtype @objtype.setter def objtype(self, value): self._wrapped.contents.objtype = value @property def objid(self): return self._wrapped.contents.objid @objid.setter def objid(self, value): self._wrapped.contents.objid = value @property def category(self): return self._wrapped.contents.category @category.setter def category(self, value): self._wrapped.contents.category = value @property def texid(self): return self._wrapped.contents.texid @texid.setter def texid(self, value): self._wrapped.contents.texid = value @property def texuniform(self): return self._wrapped.contents.texuniform @texuniform.setter def texuniform(self, value): self._wrapped.contents.texuniform = value @property def texrepeat(self): arr = np.reshape(np.fromiter(self._wrapped.contents.texrepeat, dtype=np.float, count=(2)), (2, )) arr.setflags(write=False) return arr @texrepeat.setter def texrepeat(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.texrepeat, val_ptr, 2 * sizeof(c_float)) @property def size(self): arr = np.reshape(np.fromiter(self._wrapped.contents.size, dtype=np.float, count=(3)), (3, )) arr.setflags(write=False) return arr @size.setter def size(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.size, val_ptr, 3 * sizeof(c_float)) @property def pos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.pos, dtype=np.float, count=(3)), (3, )) arr.setflags(write=False) return arr @pos.setter def pos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.pos, val_ptr, 3 * sizeof(c_float)) @property def mat(self): arr = np.reshape(np.fromiter(self._wrapped.contents.mat, dtype=np.float, count=(9)), (9, )) arr.setflags(write=False) return arr @mat.setter def mat(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.mat, val_ptr, 9 * sizeof(c_float)) @property def rgba(self): arr = np.reshape(np.fromiter(self._wrapped.contents.rgba, dtype=np.float, count=(4)), (4, )) arr.setflags(write=False) return arr @rgba.setter def rgba(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.rgba, val_ptr, 4 * sizeof(c_float)) @property def emission(self): return self._wrapped.contents.emission @emission.setter def emission(self, value): self._wrapped.contents.emission = value @property def specular(self): return self._wrapped.contents.specular @specular.setter def specular(self, value): self._wrapped.contents.specular = value @property def shininess(self): return self._wrapped.contents.shininess @shininess.setter def shininess(self, value): self._wrapped.contents.shininess = value @property def reflectance(self): return self._wrapped.contents.reflectance @reflectance.setter def reflectance(self, value): self._wrapped.contents.reflectance = value @property def label(self): return self._wrapped.contents.label @label.setter def label(self, value): self._wrapped.contents.label = value @property def camdist(self): return self._wrapped.contents.camdist @camdist.setter def camdist(self, value): self._wrapped.contents.camdist = value @property def rbound(self): return self._wrapped.contents.rbound @rbound.setter def rbound(self, value): self._wrapped.contents.rbound = value @property def transparent(self): return self._wrapped.contents.transparent @transparent.setter def transparent(self, value): self._wrapped.contents.transparent = value class MjvSceneWrapper(object): def __init__(self, wrapped, size_src=None): self._wrapped = wrapped self._size_src = size_src @property def ptr(self): return self._wrapped @property def obj(self): return self._wrapped.contents @property def maxgeom(self): return self._wrapped.contents.maxgeom @maxgeom.setter def maxgeom(self, value): self._wrapped.contents.maxgeom = value @property def ngeom(self): return self._wrapped.contents.ngeom @ngeom.setter def ngeom(self, value): self._wrapped.contents.ngeom = value @property def nlight(self): return self._wrapped.contents.nlight @nlight.setter def nlight(self, value): self._wrapped.contents.nlight = value @property def lights(self): return self._wrapped.contents.lights @lights.setter def lights(self, value): self._wrapped.contents.lights = value @property def camera(self): return self._wrapped.contents.camera @camera.setter def camera(self, value): self._wrapped.contents.camera = value @property def enabletransform(self): return self._wrapped.contents.enabletransform @enabletransform.setter def enabletransform(self, value): self._wrapped.contents.enabletransform = value @property def translate(self): arr = np.reshape(np.fromiter(self._wrapped.contents.translate, dtype=np.float, count=(3)), (3, )) arr.setflags(write=False) return arr @translate.setter def translate(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.translate, val_ptr, 3 * sizeof(c_float)) @property def rotate(self): arr = np.reshape(np.fromiter(self._wrapped.contents.rotate, dtype=np.float, count=(4)), (4, )) arr.setflags(write=False) return arr @rotate.setter def rotate(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.rotate, val_ptr, 4 * sizeof(c_float)) @property def scale(self): return self._wrapped.contents.scale @scale.setter def scale(self, value): self._wrapped.contents.scale = value @property def stereo(self): return self._wrapped.contents.stereo @stereo.setter def stereo(self, value): self._wrapped.contents.stereo = value @property def flags(self): arr = np.reshape(np.fromiter(self._wrapped.contents.flags, dtype=np.uint8, count=(5)), (5, )) arr.setflags(write=False) return arr @flags.setter def flags(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte)) memmove(self._wrapped.contents.flags, val_ptr, 5 * sizeof(c_ubyte)) class MjvPerturbWrapper(object): def __init__(self, wrapped, size_src=None): self._wrapped = wrapped self._size_src = size_src @property def ptr(self): return self._wrapped @property def obj(self): return self._wrapped.contents @property def select(self): return self._wrapped.contents.select @select.setter def select(self, value): self._wrapped.contents.select = value @property def active(self): return self._wrapped.contents.active @active.setter def active(self, value): self._wrapped.contents.active = value @property def refpos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.refpos, dtype=np.double, count=(3)), (3, )) arr.setflags(write=False) return arr @refpos.setter def refpos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.refpos, val_ptr, 3 * sizeof(c_double)) @property def refquat(self): arr = np.reshape(np.fromiter(self._wrapped.contents.refquat, dtype=np.double, count=(4)), (4, )) arr.setflags(write=False) return arr @refquat.setter def refquat(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.refquat, val_ptr, 4 * sizeof(c_double)) @property def localpos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.localpos, dtype=np.double, count=(3)), (3, )) arr.setflags(write=False) return arr @localpos.setter def localpos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.localpos, val_ptr, 3 * sizeof(c_double)) @property def scale(self): return self._wrapped.contents.scale @scale.setter def scale(self, value): self._wrapped.contents.scale = value class MjrContextWrapper(object): def __init__(self, wrapped, size_src=None): self._wrapped = wrapped self._size_src = size_src @property def ptr(self): return self._wrapped @property def obj(self): return self._wrapped.contents @property def lineWidth(self): return self._wrapped.contents.lineWidth @lineWidth.setter def lineWidth(self, value): self._wrapped.contents.lineWidth = value @property def shadowClip(self): return self._wrapped.contents.shadowClip @shadowClip.setter def shadowClip(self, value): self._wrapped.contents.shadowClip = value @property def shadowScale(self): return self._wrapped.contents.shadowScale @shadowScale.setter def shadowScale(self, value): self._wrapped.contents.shadowScale = value @property def shadowSize(self): return self._wrapped.contents.shadowSize @shadowSize.setter def shadowSize(self, value): self._wrapped.contents.shadowSize = value @property def offWidth(self): return self._wrapped.contents.offWidth @offWidth.setter def offWidth(self, value): self._wrapped.contents.offWidth = value @property def offHeight(self): return self._wrapped.contents.offHeight @offHeight.setter def offHeight(self, value): self._wrapped.contents.offHeight = value @property def offSamples(self): return self._wrapped.contents.offSamples @offSamples.setter def offSamples(self, value): self._wrapped.contents.offSamples = value @property def offFBO(self): return self._wrapped.contents.offFBO @offFBO.setter def offFBO(self, value): self._wrapped.contents.offFBO = value @property def offFBO_r(self): return self._wrapped.contents.offFBO_r @offFBO_r.setter def offFBO_r(self, value): self._wrapped.contents.offFBO_r = value @property def offColor(self): return self._wrapped.contents.offColor @offColor.setter def offColor(self, value): self._wrapped.contents.offColor = value @property def offColor_r(self): return self._wrapped.contents.offColor_r @offColor_r.setter def offColor_r(self, value): self._wrapped.contents.offColor_r = value @property def offDepthStencil(self): return self._wrapped.contents.offDepthStencil @offDepthStencil.setter def offDepthStencil(self, value): self._wrapped.contents.offDepthStencil = value @property def offDepthStencil_r(self): return self._wrapped.contents.offDepthStencil_r @offDepthStencil_r.setter def offDepthStencil_r(self, value): self._wrapped.contents.offDepthStencil_r = value @property def shadowFBO(self): return self._wrapped.contents.shadowFBO @shadowFBO.setter def shadowFBO(self, value): self._wrapped.contents.shadowFBO = value @property def shadowTex(self): return self._wrapped.contents.shadowTex @shadowTex.setter def shadowTex(self, value): self._wrapped.contents.shadowTex = value @property def ntexture(self): return self._wrapped.contents.ntexture @ntexture.setter def ntexture(self, value): self._wrapped.contents.ntexture = value @property def textureType(self): arr = np.reshape(np.fromiter(self._wrapped.contents.textureType, dtype=np.int, count=(100)), (100, )) arr.setflags(write=False) return arr @textureType.setter def textureType(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.textureType, val_ptr, 100 * sizeof(c_int)) @property def texture(self): arr = np.reshape(np.fromiter(self._wrapped.contents.texture, dtype=np.int, count=(100)), (100, )) arr.setflags(write=False) return arr @texture.setter def texture(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.texture, val_ptr, 100 * sizeof(c_int)) @property def basePlane(self): return self._wrapped.contents.basePlane @basePlane.setter def basePlane(self, value): self._wrapped.contents.basePlane = value @property def baseMesh(self): return self._wrapped.contents.baseMesh @baseMesh.setter def baseMesh(self, value): self._wrapped.contents.baseMesh = value @property def baseHField(self): return self._wrapped.contents.baseHField @baseHField.setter def baseHField(self, value): self._wrapped.contents.baseHField = value @property def baseBuiltin(self): return self._wrapped.contents.baseBuiltin @baseBuiltin.setter def baseBuiltin(self, value): self._wrapped.contents.baseBuiltin = value @property def baseFontNormal(self): return self._wrapped.contents.baseFontNormal @baseFontNormal.setter def baseFontNormal(self, value): self._wrapped.contents.baseFontNormal = value @property def baseFontShadow(self): return self._wrapped.contents.baseFontShadow @baseFontShadow.setter def baseFontShadow(self, value): self._wrapped.contents.baseFontShadow = value @property def baseFontBig(self): return self._wrapped.contents.baseFontBig @baseFontBig.setter def baseFontBig(self, value): self._wrapped.contents.baseFontBig = value @property def rangePlane(self): return self._wrapped.contents.rangePlane @rangePlane.setter def rangePlane(self, value): self._wrapped.contents.rangePlane = value @property def rangeMesh(self): return self._wrapped.contents.rangeMesh @rangeMesh.setter def rangeMesh(self, value): self._wrapped.contents.rangeMesh = value @property def rangeHField(self): return self._wrapped.contents.rangeHField @rangeHField.setter def rangeHField(self, value): self._wrapped.contents.rangeHField = value @property def rangeBuiltin(self): return self._wrapped.contents.rangeBuiltin @rangeBuiltin.setter def rangeBuiltin(self, value): self._wrapped.contents.rangeBuiltin = value @property def rangeFont(self): return self._wrapped.contents.rangeFont @rangeFont.setter def rangeFont(self, value): self._wrapped.contents.rangeFont = value @property def charWidth(self): arr = np.reshape(np.fromiter(self._wrapped.contents.charWidth, dtype=np.int, count=(127)), (127, )) arr.setflags(write=False) return arr @charWidth.setter def charWidth(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.charWidth, val_ptr, 127 * sizeof(c_int)) @property def charWidthBig(self): arr = np.reshape(np.fromiter(self._wrapped.contents.charWidthBig, dtype=np.int, count=(127)), (127, )) arr.setflags(write=False) return arr @charWidthBig.setter def charWidthBig(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.charWidthBig, val_ptr, 127 * sizeof(c_int)) @property def charHeight(self): return self._wrapped.contents.charHeight @charHeight.setter def charHeight(self, value): self._wrapped.contents.charHeight = value @property def charHeightBig(self): return self._wrapped.contents.charHeightBig @charHeightBig.setter def charHeightBig(self, value): self._wrapped.contents.charHeightBig = value @property def glewInitialized(self): return self._wrapped.contents.glewInitialized @glewInitialized.setter def glewInitialized(self, value): self._wrapped.contents.glewInitialized = value @property def windowAvailable(self): return self._wrapped.contents.windowAvailable @windowAvailable.setter def windowAvailable(self, value): self._wrapped.contents.windowAvailable = value @property def windowSamples(self): return self._wrapped.contents.windowSamples @windowSamples.setter def windowSamples(self, value): self._wrapped.contents.windowSamples = value @property def windowStereo(self): return self._wrapped.contents.windowStereo @windowStereo.setter def windowStereo(self, value): self._wrapped.contents.windowStereo = value @property def windowDoublebuffer(self): return self._wrapped.contents.windowDoublebuffer @windowDoublebuffer.setter def windowDoublebuffer(self, value): self._wrapped.contents.windowDoublebuffer = value @property def currentBuffer(self): return self._wrapped.contents.currentBuffer @currentBuffer.setter def currentBuffer(self, value): self._wrapped.contents.currentBuffer = value class MjvCameraWrapper(object): def __init__(self, wrapped, size_src=None): self._wrapped = wrapped self._size_src = size_src @property def ptr(self): return self._wrapped @property def obj(self): return self._wrapped.contents @property def type(self): return self._wrapped.contents.type @type.setter def type(self, value): self._wrapped.contents.type = value @property def fixedcamid(self): return self._wrapped.contents.fixedcamid @fixedcamid.setter def fixedcamid(self, value): self._wrapped.contents.fixedcamid = value @property def trackbodyid(self): return self._wrapped.contents.trackbodyid @trackbodyid.setter def trackbodyid(self, value): self._wrapped.contents.trackbodyid = value @property def lookat(self): arr = np.reshape(np.fromiter(self._wrapped.contents.lookat, dtype=np.double, count=(3)), (3, )) arr.setflags(write=False) return arr @lookat.setter def lookat(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.lookat, val_ptr, 3 * sizeof(c_double)) @property def distance(self): return self._wrapped.contents.distance @distance.setter def distance(self, value): self._wrapped.contents.distance = value @property def azimuth(self): return self._wrapped.contents.azimuth @azimuth.setter def azimuth(self, value): self._wrapped.contents.azimuth = value @property def elevation(self): return self._wrapped.contents.elevation @elevation.setter def elevation(self, value): self._wrapped.contents.elevation = value class MjvOptionWrapper(object): def __init__(self, wrapped, size_src=None): self._wrapped = wrapped self._size_src = size_src @property def ptr(self): return self._wrapped @property def obj(self): return self._wrapped.contents @property def label(self): return self._wrapped.contents.label @label.setter def label(self, value): self._wrapped.contents.label = value @property def frame(self): return self._wrapped.contents.frame @frame.setter def frame(self, value): self._wrapped.contents.frame = value @property def geomgroup(self): arr = np.reshape(np.fromiter(self._wrapped.contents.geomgroup, dtype=np.uint8, count=(5)), (5, )) arr.setflags(write=False) return arr @geomgroup.setter def geomgroup(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte)) memmove(self._wrapped.contents.geomgroup, val_ptr, 5 * sizeof(c_ubyte)) @property def sitegroup(self): arr = np.reshape(np.fromiter(self._wrapped.contents.sitegroup, dtype=np.uint8, count=(5)), (5, )) arr.setflags(write=False) return arr @sitegroup.setter def sitegroup(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte)) memmove(self._wrapped.contents.sitegroup, val_ptr, 5 * sizeof(c_ubyte)) @property def flags(self): arr = np.reshape(np.fromiter(self._wrapped.contents.flags, dtype=np.uint8, count=(18)), (18, )) arr.setflags(write=False) return arr @flags.setter def flags(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte)) memmove(self._wrapped.contents.flags, val_ptr, 18 * sizeof(c_ubyte)) class MjvGeomWrapper(object): def __init__(self, wrapped, size_src=None): self._wrapped = wrapped self._size_src = size_src @property def ptr(self): return self._wrapped @property def obj(self): return self._wrapped.contents @property def type(self): return self._wrapped.contents.type @type.setter def type(self, value): self._wrapped.contents.type = value @property def dataid(self): return self._wrapped.contents.dataid @dataid.setter def dataid(self, value): self._wrapped.contents.dataid = value @property def objtype(self): return self._wrapped.contents.objtype @objtype.setter def objtype(self, value): self._wrapped.contents.objtype = value @property def objid(self): return self._wrapped.contents.objid @objid.setter def objid(self, value): self._wrapped.contents.objid = value @property def category(self): return self._wrapped.contents.category @category.setter def category(self, value): self._wrapped.contents.category = value @property def texid(self): return self._wrapped.contents.texid @texid.setter def texid(self, value): self._wrapped.contents.texid = value @property def texuniform(self): return self._wrapped.contents.texuniform @texuniform.setter def texuniform(self, value): self._wrapped.contents.texuniform = value @property def texrepeat(self): arr = np.reshape(np.fromiter(self._wrapped.contents.texrepeat, dtype=np.float, count=(2)), (2, )) arr.setflags(write=False) return arr @texrepeat.setter def texrepeat(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.texrepeat, val_ptr, 2 * sizeof(c_float)) @property def size(self): arr = np.reshape(np.fromiter(self._wrapped.contents.size, dtype=np.float, count=(3)), (3, )) arr.setflags(write=False) return arr @size.setter def size(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.size, val_ptr, 3 * sizeof(c_float)) @property def pos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.pos, dtype=np.float, count=(3)), (3, )) arr.setflags(write=False) return arr @pos.setter def pos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.pos, val_ptr, 3 * sizeof(c_float)) @property def mat(self): arr = np.reshape(np.fromiter(self._wrapped.contents.mat, dtype=np.float, count=(9)), (9, )) arr.setflags(write=False) return arr @mat.setter def mat(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.mat, val_ptr, 9 * sizeof(c_float)) @property def rgba(self): arr = np.reshape(np.fromiter(self._wrapped.contents.rgba, dtype=np.float, count=(4)), (4, )) arr.setflags(write=False) return arr @rgba.setter def rgba(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.rgba, val_ptr, 4 * sizeof(c_float)) @property def emission(self): return self._wrapped.contents.emission @emission.setter def emission(self, value): self._wrapped.contents.emission = value @property def specular(self): return self._wrapped.contents.specular @specular.setter def specular(self, value): self._wrapped.contents.specular = value @property def shininess(self): return self._wrapped.contents.shininess @shininess.setter def shininess(self, value): self._wrapped.contents.shininess = value @property def reflectance(self): return self._wrapped.contents.reflectance @reflectance.setter def reflectance(self, value): self._wrapped.contents.reflectance = value @property def label(self): return self._wrapped.contents.label @label.setter def label(self, value): self._wrapped.contents.label = value @property def camdist(self): return self._wrapped.contents.camdist @camdist.setter def camdist(self, value): self._wrapped.contents.camdist = value @property def rbound(self): return self._wrapped.contents.rbound @rbound.setter def rbound(self, value): self._wrapped.contents.rbound = value @property def transparent(self): return self._wrapped.contents.transparent @transparent.setter def transparent(self, value): self._wrapped.contents.transparent = value class MjvLightWrapper(object): def __init__(self, wrapped, size_src=None): self._wrapped = wrapped self._size_src = size_src @property def ptr(self): return self._wrapped @property def obj(self): return self._wrapped.contents @property def pos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.pos, dtype=np.float, count=(3)), (3, )) arr.setflags(write=False) return arr @pos.setter def pos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.pos, val_ptr, 3 * sizeof(c_float)) @property def dir(self): arr = np.reshape(np.fromiter(self._wrapped.contents.dir, dtype=np.float, count=(3)), (3, )) arr.setflags(write=False) return arr @dir.setter def dir(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.dir, val_ptr, 3 * sizeof(c_float)) @property def attenuation(self): arr = np.reshape(np.fromiter(self._wrapped.contents.attenuation, dtype=np.float, count=(3)), (3, )) arr.setflags(write=False) return arr @attenuation.setter def attenuation(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.attenuation, val_ptr, 3 * sizeof(c_float)) @property def cutoff(self): return self._wrapped.contents.cutoff @cutoff.setter def cutoff(self, value): self._wrapped.contents.cutoff = value @property def exponent(self): return self._wrapped.contents.exponent @exponent.setter def exponent(self, value): self._wrapped.contents.exponent = value @property def ambient(self): arr = np.reshape(np.fromiter(self._wrapped.contents.ambient, dtype=np.float, count=(3)), (3, )) arr.setflags(write=False) return arr @ambient.setter def ambient(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.ambient, val_ptr, 3 * sizeof(c_float)) @property def diffuse(self): arr = np.reshape(np.fromiter(self._wrapped.contents.diffuse, dtype=np.float, count=(3)), (3, )) arr.setflags(write=False) return arr @diffuse.setter def diffuse(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.diffuse, val_ptr, 3 * sizeof(c_float)) @property def specular(self): arr = np.reshape(np.fromiter(self._wrapped.contents.specular, dtype=np.float, count=(3)), (3, )) arr.setflags(write=False) return arr @specular.setter def specular(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.specular, val_ptr, 3 * sizeof(c_float)) @property def headlight(self): return self._wrapped.contents.headlight @headlight.setter def headlight(self, value): self._wrapped.contents.headlight = value @property def directional(self): return self._wrapped.contents.directional @directional.setter def directional(self, value): self._wrapped.contents.directional = value @property def castshadow(self): return self._wrapped.contents.castshadow @castshadow.setter def castshadow(self, value): self._wrapped.contents.castshadow = value class MjOptionWrapper(object): def __init__(self, wrapped, size_src=None): self._wrapped = wrapped self._size_src = size_src @property def ptr(self): return self._wrapped @property def obj(self): return self._wrapped.contents @property def timestep(self): return self._wrapped.contents.timestep @timestep.setter def timestep(self, value): self._wrapped.contents.timestep = value @property def apirate(self): return self._wrapped.contents.apirate @apirate.setter def apirate(self, value): self._wrapped.contents.apirate = value @property def tolerance(self): return self._wrapped.contents.tolerance @tolerance.setter def tolerance(self, value): self._wrapped.contents.tolerance = value @property def impratio(self): return self._wrapped.contents.impratio @impratio.setter def impratio(self, value): self._wrapped.contents.impratio = value @property def gravity(self): arr = np.reshape(np.fromiter(self._wrapped.contents.gravity, dtype=np.double, count=(3)), (3, )) arr.setflags(write=False) return arr @gravity.setter def gravity(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.gravity, val_ptr, 3 * sizeof(c_double)) @property def wind(self): arr = np.reshape(np.fromiter(self._wrapped.contents.wind, dtype=np.double, count=(3)), (3, )) arr.setflags(write=False) return arr @wind.setter def wind(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.wind, val_ptr, 3 * sizeof(c_double)) @property def magnetic(self): arr = np.reshape(np.fromiter(self._wrapped.contents.magnetic, dtype=np.double, count=(3)), (3, )) arr.setflags(write=False) return arr @magnetic.setter def magnetic(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.magnetic, val_ptr, 3 * sizeof(c_double)) @property def density(self): return self._wrapped.contents.density @density.setter def density(self, value): self._wrapped.contents.density = value @property def viscosity(self): return self._wrapped.contents.viscosity @viscosity.setter def viscosity(self, value): self._wrapped.contents.viscosity = value @property def o_margin(self): return self._wrapped.contents.o_margin @o_margin.setter def o_margin(self, value): self._wrapped.contents.o_margin = value @property def o_solref(self): arr = np.reshape(np.fromiter(self._wrapped.contents.o_solref, dtype=np.double, count=(2)), (2, )) arr.setflags(write=False) return arr @o_solref.setter def o_solref(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.o_solref, val_ptr, 2 * sizeof(c_double)) @property def o_solimp(self): arr = np.reshape(np.fromiter(self._wrapped.contents.o_solimp, dtype=np.double, count=(3)), (3, )) arr.setflags(write=False) return arr @o_solimp.setter def o_solimp(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.o_solimp, val_ptr, 3 * sizeof(c_double)) @property def mpr_tolerance(self): return self._wrapped.contents.mpr_tolerance @mpr_tolerance.setter def mpr_tolerance(self, value): self._wrapped.contents.mpr_tolerance = value @property def mpr_iterations(self): return self._wrapped.contents.mpr_iterations @mpr_iterations.setter def mpr_iterations(self, value): self._wrapped.contents.mpr_iterations = value @property def integrator(self): return self._wrapped.contents.integrator @integrator.setter def integrator(self, value): self._wrapped.contents.integrator = value @property def collision(self): return self._wrapped.contents.collision @collision.setter def collision(self, value): self._wrapped.contents.collision = value @property def impedance(self): return self._wrapped.contents.impedance @impedance.setter def impedance(self, value): self._wrapped.contents.impedance = value @property def reference(self): return self._wrapped.contents.reference @reference.setter def reference(self, value): self._wrapped.contents.reference = value @property def solver(self): return self._wrapped.contents.solver @solver.setter def solver(self, value): self._wrapped.contents.solver = value @property def iterations(self): return self._wrapped.contents.iterations @iterations.setter def iterations(self, value): self._wrapped.contents.iterations = value @property def disableflags(self): return self._wrapped.contents.disableflags @disableflags.setter def disableflags(self, value): self._wrapped.contents.disableflags = value @property def enableflags(self): return self._wrapped.contents.enableflags @enableflags.setter def enableflags(self, value): self._wrapped.contents.enableflags = value class MjVisualWrapper(object): def __init__(self, wrapped, size_src=None): self._wrapped = wrapped self._size_src = size_src @property def ptr(self): return self._wrapped @property def obj(self): return self._wrapped.contents @property def global_(self): return self._wrapped.contents.global_ @global_.setter def global_(self, value): self._wrapped.contents.global_ = value @property def quality(self): return self._wrapped.contents.quality @quality.setter def quality(self, value): self._wrapped.contents.quality = value @property def headlight(self): return self._wrapped.contents.headlight @headlight.setter def headlight(self, value): self._wrapped.contents.headlight = value @property def map_(self): return self._wrapped.contents.map_ @map_.setter def map_(self, value): self._wrapped.contents.map_ = value @property def scale(self): return self._wrapped.contents.scale @scale.setter def scale(self, value): self._wrapped.contents.scale = value @property def rgba(self): return self._wrapped.contents.rgba @rgba.setter def rgba(self, value): self._wrapped.contents.rgba = value class MjStatisticWrapper(object): def __init__(self, wrapped, size_src=None): self._wrapped = wrapped self._size_src = size_src @property def ptr(self): return self._wrapped @property def obj(self): return self._wrapped.contents @property def meanmass(self): return self._wrapped.contents.meanmass @meanmass.setter def meanmass(self, value): self._wrapped.contents.meanmass = value @property def meansize(self): return self._wrapped.contents.meansize @meansize.setter def meansize(self, value): self._wrapped.contents.meansize = value @property def extent(self): return self._wrapped.contents.extent @extent.setter def extent(self, value): self._wrapped.contents.extent = value @property def center(self): arr = np.reshape(np.fromiter(self._wrapped.contents.center, dtype=np.double, count=(3)), (3, )) arr.setflags(write=False) return arr @center.setter def center(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.center, val_ptr, 3 * sizeof(c_double)) class MjDataWrapper(object): def __init__(self, wrapped, size_src=None): self._wrapped = wrapped self._size_src = size_src @property def ptr(self): return self._wrapped @property def obj(self): return self._wrapped.contents @property def nstack(self): return self._wrapped.contents.nstack @nstack.setter def nstack(self, value): self._wrapped.contents.nstack = value @property def nbuffer(self): return self._wrapped.contents.nbuffer @nbuffer.setter def nbuffer(self, value): self._wrapped.contents.nbuffer = value @property def pstack(self): return self._wrapped.contents.pstack @pstack.setter def pstack(self, value): self._wrapped.contents.pstack = value @property def maxuse_stack(self): return self._wrapped.contents.maxuse_stack @maxuse_stack.setter def maxuse_stack(self, value): self._wrapped.contents.maxuse_stack = value @property def maxuse_con(self): return self._wrapped.contents.maxuse_con @maxuse_con.setter def maxuse_con(self, value): self._wrapped.contents.maxuse_con = value @property def maxuse_efc(self): return self._wrapped.contents.maxuse_efc @maxuse_efc.setter def maxuse_efc(self, value): self._wrapped.contents.maxuse_efc = value @property def nwarning(self): arr = np.reshape(np.fromiter(self._wrapped.contents.nwarning, dtype=np.int, count=(8)), (8, )) arr.setflags(write=False) return arr @nwarning.setter def nwarning(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.nwarning, val_ptr, 8 * sizeof(c_int)) @property def warning_info(self): arr = np.reshape(np.fromiter(self._wrapped.contents.warning_info, dtype=np.int, count=(8)), (8, )) arr.setflags(write=False) return arr @warning_info.setter def warning_info(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.warning_info, val_ptr, 8 * sizeof(c_int)) @property def timer_ncall(self): arr = np.reshape(np.fromiter(self._wrapped.contents.timer_ncall, dtype=np.int, count=(13)), (13, )) arr.setflags(write=False) return arr @timer_ncall.setter def timer_ncall(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.timer_ncall, val_ptr, 13 * sizeof(c_int)) @property def timer_duration(self): arr = np.reshape(np.fromiter(self._wrapped.contents.timer_duration, dtype=np.double, count=(13)), (13, )) arr.setflags(write=False) return arr @timer_duration.setter def timer_duration(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.timer_duration, val_ptr, 13 * sizeof(c_double)) @property def solver_iter(self): return self._wrapped.contents.solver_iter @solver_iter.setter def solver_iter(self, value): self._wrapped.contents.solver_iter = value @property def solver_trace(self): arr = np.reshape(np.fromiter(self._wrapped.contents.solver_trace, dtype=np.double, count=(200)), (200, )) arr.setflags(write=False) return arr @solver_trace.setter def solver_trace(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.solver_trace, val_ptr, 200 * sizeof(c_double)) @property def solver_fwdinv(self): arr = np.reshape(np.fromiter(self._wrapped.contents.solver_fwdinv, dtype=np.double, count=(2)), (2, )) arr.setflags(write=False) return arr @solver_fwdinv.setter def solver_fwdinv(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.solver_fwdinv, val_ptr, 2 * sizeof(c_double)) @property def ne(self): return self._wrapped.contents.ne @ne.setter def ne(self, value): self._wrapped.contents.ne = value @property def nf(self): return self._wrapped.contents.nf @nf.setter def nf(self, value): self._wrapped.contents.nf = value @property def nefc(self): return self._wrapped.contents.nefc @nefc.setter def nefc(self, value): self._wrapped.contents.nefc = value @property def ncon(self): return self._wrapped.contents.ncon @ncon.setter def ncon(self, value): self._wrapped.contents.ncon = value @property def time(self): return self._wrapped.contents.time @time.setter def time(self, value): self._wrapped.contents.time = value @property def energy(self): arr = np.reshape(np.fromiter(self._wrapped.contents.energy, dtype=np.double, count=(2)), (2, )) arr.setflags(write=False) return arr @energy.setter def energy(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.energy, val_ptr, 2 * sizeof(c_double)) @property def buffer(self): arr = np.reshape(np.fromiter(self._wrapped.contents.buffer, dtype=np.uint8, count=(self.nbuffer)), (self.nbuffer, )) arr.setflags(write=False) return arr @buffer.setter def buffer(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte)) memmove(self._wrapped.contents.buffer, val_ptr, self.nbuffer * sizeof(c_ubyte)) @property def stack(self): arr = np.reshape(np.fromiter(self._wrapped.contents.stack, dtype=np.double, count=(self.nstack)), (self.nstack, )) arr.setflags(write=False) return arr @stack.setter def stack(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.stack, val_ptr, self.nstack * sizeof(c_double)) @property def qpos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.qpos, dtype=np.double, count=(self._size_src.nq*1)), (self._size_src.nq, 1, )) arr.setflags(write=False) return arr @qpos.setter def qpos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.qpos, val_ptr, self._size_src.nq*1 * sizeof(c_double)) @property def qvel(self): arr = np.reshape(np.fromiter(self._wrapped.contents.qvel, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, )) arr.setflags(write=False) return arr @qvel.setter def qvel(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.qvel, val_ptr, self._size_src.nv*1 * sizeof(c_double)) @property def act(self): arr = np.reshape(np.fromiter(self._wrapped.contents.act, dtype=np.double, count=(self._size_src.na*1)), (self._size_src.na, 1, )) arr.setflags(write=False) return arr @act.setter def act(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.act, val_ptr, self._size_src.na*1 * sizeof(c_double)) @property def ctrl(self): arr = np.reshape(np.fromiter(self._wrapped.contents.ctrl, dtype=np.double, count=(self._size_src.nu*1)), (self._size_src.nu, 1, )) arr.setflags(write=False) return arr @ctrl.setter def ctrl(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.ctrl, val_ptr, self._size_src.nu*1 * sizeof(c_double)) @property def qfrc_applied(self): arr = np.reshape(np.fromiter(self._wrapped.contents.qfrc_applied, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, )) arr.setflags(write=False) return arr @qfrc_applied.setter def qfrc_applied(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.qfrc_applied, val_ptr, self._size_src.nv*1 * sizeof(c_double)) @property def xfrc_applied(self): arr = np.reshape(np.fromiter(self._wrapped.contents.xfrc_applied, dtype=np.double, count=(self._size_src.nbody*6)), (self._size_src.nbody, 6, )) arr.setflags(write=False) return arr @xfrc_applied.setter def xfrc_applied(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.xfrc_applied, val_ptr, self._size_src.nbody*6 * sizeof(c_double)) @property def qacc(self): arr = np.reshape(np.fromiter(self._wrapped.contents.qacc, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, )) arr.setflags(write=False) return arr @qacc.setter def qacc(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.qacc, val_ptr, self._size_src.nv*1 * sizeof(c_double)) @property def act_dot(self): arr = np.reshape(np.fromiter(self._wrapped.contents.act_dot, dtype=np.double, count=(self._size_src.na*1)), (self._size_src.na, 1, )) arr.setflags(write=False) return arr @act_dot.setter def act_dot(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.act_dot, val_ptr, self._size_src.na*1 * sizeof(c_double)) @property def mocap_pos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.mocap_pos, dtype=np.double, count=(self._size_src.nmocap*3)), (self._size_src.nmocap, 3, )) arr.setflags(write=False) return arr @mocap_pos.setter def mocap_pos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.mocap_pos, val_ptr, self._size_src.nmocap*3 * sizeof(c_double)) @property def mocap_quat(self): arr = np.reshape(np.fromiter(self._wrapped.contents.mocap_quat, dtype=np.double, count=(self._size_src.nmocap*4)), (self._size_src.nmocap, 4, )) arr.setflags(write=False) return arr @mocap_quat.setter def mocap_quat(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.mocap_quat, val_ptr, self._size_src.nmocap*4 * sizeof(c_double)) @property def userdata(self): arr = np.reshape(np.fromiter(self._wrapped.contents.userdata, dtype=np.double, count=(self._size_src.nuserdata*1)), (self._size_src.nuserdata, 1, )) arr.setflags(write=False) return arr @userdata.setter def userdata(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.userdata, val_ptr, self._size_src.nuserdata*1 * sizeof(c_double)) @property def sensordata(self): arr = np.reshape(np.fromiter(self._wrapped.contents.sensordata, dtype=np.double, count=(self._size_src.nsensordata*1)), (self._size_src.nsensordata, 1, )) arr.setflags(write=False) return arr @sensordata.setter def sensordata(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.sensordata, val_ptr, self._size_src.nsensordata*1 * sizeof(c_double)) @property def xpos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.xpos, dtype=np.double, count=(self._size_src.nbody*3)), (self._size_src.nbody, 3, )) arr.setflags(write=False) return arr @xpos.setter def xpos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.xpos, val_ptr, self._size_src.nbody*3 * sizeof(c_double)) @property def xquat(self): arr = np.reshape(np.fromiter(self._wrapped.contents.xquat, dtype=np.double, count=(self._size_src.nbody*4)), (self._size_src.nbody, 4, )) arr.setflags(write=False) return arr @xquat.setter def xquat(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.xquat, val_ptr, self._size_src.nbody*4 * sizeof(c_double)) @property def xmat(self): arr = np.reshape(np.fromiter(self._wrapped.contents.xmat, dtype=np.double, count=(self._size_src.nbody*9)), (self._size_src.nbody, 9, )) arr.setflags(write=False) return arr @xmat.setter def xmat(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.xmat, val_ptr, self._size_src.nbody*9 * sizeof(c_double)) @property def xipos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.xipos, dtype=np.double, count=(self._size_src.nbody*3)), (self._size_src.nbody, 3, )) arr.setflags(write=False) return arr @xipos.setter def xipos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.xipos, val_ptr, self._size_src.nbody*3 * sizeof(c_double)) @property def ximat(self): arr = np.reshape(np.fromiter(self._wrapped.contents.ximat, dtype=np.double, count=(self._size_src.nbody*9)), (self._size_src.nbody, 9, )) arr.setflags(write=False) return arr @ximat.setter def ximat(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.ximat, val_ptr, self._size_src.nbody*9 * sizeof(c_double)) @property def xanchor(self): arr = np.reshape(np.fromiter(self._wrapped.contents.xanchor, dtype=np.double, count=(self._size_src.njnt*3)), (self._size_src.njnt, 3, )) arr.setflags(write=False) return arr @xanchor.setter def xanchor(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.xanchor, val_ptr, self._size_src.njnt*3 * sizeof(c_double)) @property def xaxis(self): arr = np.reshape(np.fromiter(self._wrapped.contents.xaxis, dtype=np.double, count=(self._size_src.njnt*3)), (self._size_src.njnt, 3, )) arr.setflags(write=False) return arr @xaxis.setter def xaxis(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.xaxis, val_ptr, self._size_src.njnt*3 * sizeof(c_double)) @property def geom_xpos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.geom_xpos, dtype=np.double, count=(self._size_src.ngeom*3)), (self._size_src.ngeom, 3, )) arr.setflags(write=False) return arr @geom_xpos.setter def geom_xpos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.geom_xpos, val_ptr, self._size_src.ngeom*3 * sizeof(c_double)) @property def geom_xmat(self): arr = np.reshape(np.fromiter(self._wrapped.contents.geom_xmat, dtype=np.double, count=(self._size_src.ngeom*9)), (self._size_src.ngeom, 9, )) arr.setflags(write=False) return arr @geom_xmat.setter def geom_xmat(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.geom_xmat, val_ptr, self._size_src.ngeom*9 * sizeof(c_double)) @property def site_xpos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.site_xpos, dtype=np.double, count=(self._size_src.nsite*3)), (self._size_src.nsite, 3, )) arr.setflags(write=False) return arr @site_xpos.setter def site_xpos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.site_xpos, val_ptr, self._size_src.nsite*3 * sizeof(c_double)) @property def site_xmat(self): arr = np.reshape(np.fromiter(self._wrapped.contents.site_xmat, dtype=np.double, count=(self._size_src.nsite*9)), (self._size_src.nsite, 9, )) arr.setflags(write=False) return arr @site_xmat.setter def site_xmat(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.site_xmat, val_ptr, self._size_src.nsite*9 * sizeof(c_double)) @property def cam_xpos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.cam_xpos, dtype=np.double, count=(self._size_src.ncam*3)), (self._size_src.ncam, 3, )) arr.setflags(write=False) return arr @cam_xpos.setter def cam_xpos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.cam_xpos, val_ptr, self._size_src.ncam*3 * sizeof(c_double)) @property def cam_xmat(self): arr = np.reshape(np.fromiter(self._wrapped.contents.cam_xmat, dtype=np.double, count=(self._size_src.ncam*9)), (self._size_src.ncam, 9, )) arr.setflags(write=False) return arr @cam_xmat.setter def cam_xmat(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.cam_xmat, val_ptr, self._size_src.ncam*9 * sizeof(c_double)) @property def light_xpos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.light_xpos, dtype=np.double, count=(self._size_src.nlight*3)), (self._size_src.nlight, 3, )) arr.setflags(write=False) return arr @light_xpos.setter def light_xpos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.light_xpos, val_ptr, self._size_src.nlight*3 * sizeof(c_double)) @property def light_xdir(self): arr = np.reshape(np.fromiter(self._wrapped.contents.light_xdir, dtype=np.double, count=(self._size_src.nlight*3)), (self._size_src.nlight, 3, )) arr.setflags(write=False) return arr @light_xdir.setter def light_xdir(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.light_xdir, val_ptr, self._size_src.nlight*3 * sizeof(c_double)) @property def subtree_com(self): arr = np.reshape(np.fromiter(self._wrapped.contents.subtree_com, dtype=np.double, count=(self._size_src.nbody*3)), (self._size_src.nbody, 3, )) arr.setflags(write=False) return arr @subtree_com.setter def subtree_com(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.subtree_com, val_ptr, self._size_src.nbody*3 * sizeof(c_double)) @property def cdof(self): arr = np.reshape(np.fromiter(self._wrapped.contents.cdof, dtype=np.double, count=(self._size_src.nv*6)), (self._size_src.nv, 6, )) arr.setflags(write=False) return arr @cdof.setter def cdof(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.cdof, val_ptr, self._size_src.nv*6 * sizeof(c_double)) @property def cinert(self): arr = np.reshape(np.fromiter(self._wrapped.contents.cinert, dtype=np.double, count=(self._size_src.nbody*10)), (self._size_src.nbody, 10, )) arr.setflags(write=False) return arr @cinert.setter def cinert(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.cinert, val_ptr, self._size_src.nbody*10 * sizeof(c_double)) @property def ten_wrapadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.ten_wrapadr, dtype=np.int, count=(self._size_src.ntendon*1)), (self._size_src.ntendon, 1, )) arr.setflags(write=False) return arr @ten_wrapadr.setter def ten_wrapadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.ten_wrapadr, val_ptr, self._size_src.ntendon*1 * sizeof(c_int)) @property def ten_wrapnum(self): arr = np.reshape(np.fromiter(self._wrapped.contents.ten_wrapnum, dtype=np.int, count=(self._size_src.ntendon*1)), (self._size_src.ntendon, 1, )) arr.setflags(write=False) return arr @ten_wrapnum.setter def ten_wrapnum(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.ten_wrapnum, val_ptr, self._size_src.ntendon*1 * sizeof(c_int)) @property def ten_length(self): arr = np.reshape(np.fromiter(self._wrapped.contents.ten_length, dtype=np.double, count=(self._size_src.ntendon*1)), (self._size_src.ntendon, 1, )) arr.setflags(write=False) return arr @ten_length.setter def ten_length(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.ten_length, val_ptr, self._size_src.ntendon*1 * sizeof(c_double)) @property def ten_moment(self): arr = np.reshape(np.fromiter(self._wrapped.contents.ten_moment, dtype=np.double, count=(self._size_src.ntendon*self._size_src.nv)), (self._size_src.ntendon, self._size_src.nv, )) arr.setflags(write=False) return arr @ten_moment.setter def ten_moment(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.ten_moment, val_ptr, self._size_src.ntendon*self._size_src.nv * sizeof(c_double)) @property def wrap_obj(self): arr = np.reshape(np.fromiter(self._wrapped.contents.wrap_obj, dtype=np.int, count=(self._size_src.nwrap*2)), (self._size_src.nwrap, 2, )) arr.setflags(write=False) return arr @wrap_obj.setter def wrap_obj(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.wrap_obj, val_ptr, self._size_src.nwrap*2 * sizeof(c_int)) @property def wrap_xpos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.wrap_xpos, dtype=np.double, count=(self._size_src.nwrap*6)), (self._size_src.nwrap, 6, )) arr.setflags(write=False) return arr @wrap_xpos.setter def wrap_xpos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.wrap_xpos, val_ptr, self._size_src.nwrap*6 * sizeof(c_double)) @property def actuator_length(self): arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_length, dtype=np.double, count=(self._size_src.nu*1)), (self._size_src.nu, 1, )) arr.setflags(write=False) return arr @actuator_length.setter def actuator_length(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.actuator_length, val_ptr, self._size_src.nu*1 * sizeof(c_double)) @property def actuator_moment(self): arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_moment, dtype=np.double, count=(self._size_src.nu*self._size_src.nv)), (self._size_src.nu, self._size_src.nv, )) arr.setflags(write=False) return arr @actuator_moment.setter def actuator_moment(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.actuator_moment, val_ptr, self._size_src.nu*self._size_src.nv * sizeof(c_double)) @property def crb(self): arr = np.reshape(np.fromiter(self._wrapped.contents.crb, dtype=np.double, count=(self._size_src.nbody*10)), (self._size_src.nbody, 10, )) arr.setflags(write=False) return arr @crb.setter def crb(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.crb, val_ptr, self._size_src.nbody*10 * sizeof(c_double)) @property def qM(self): arr = np.reshape(np.fromiter(self._wrapped.contents.qM, dtype=np.double, count=(self._size_src.nM*1)), (self._size_src.nM, 1, )) arr.setflags(write=False) return arr @qM.setter def qM(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.qM, val_ptr, self._size_src.nM*1 * sizeof(c_double)) @property def qLD(self): arr = np.reshape(np.fromiter(self._wrapped.contents.qLD, dtype=np.double, count=(self._size_src.nM*1)), (self._size_src.nM, 1, )) arr.setflags(write=False) return arr @qLD.setter def qLD(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.qLD, val_ptr, self._size_src.nM*1 * sizeof(c_double)) @property def qLDiagInv(self): arr = np.reshape(np.fromiter(self._wrapped.contents.qLDiagInv, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, )) arr.setflags(write=False) return arr @qLDiagInv.setter def qLDiagInv(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.qLDiagInv, val_ptr, self._size_src.nv*1 * sizeof(c_double)) @property def qLDiagSqrtInv(self): arr = np.reshape(np.fromiter(self._wrapped.contents.qLDiagSqrtInv, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, )) arr.setflags(write=False) return arr @qLDiagSqrtInv.setter def qLDiagSqrtInv(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.qLDiagSqrtInv, val_ptr, self._size_src.nv*1 * sizeof(c_double)) @property def efc_type(self): arr = np.reshape(np.fromiter(self._wrapped.contents.efc_type, dtype=np.int, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, )) arr.setflags(write=False) return arr @efc_type.setter def efc_type(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.efc_type, val_ptr, self._size_src.njmax*1 * sizeof(c_int)) @property def efc_id(self): arr = np.reshape(np.fromiter(self._wrapped.contents.efc_id, dtype=np.int, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, )) arr.setflags(write=False) return arr @efc_id.setter def efc_id(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.efc_id, val_ptr, self._size_src.njmax*1 * sizeof(c_int)) @property def efc_rownnz(self): arr = np.reshape(np.fromiter(self._wrapped.contents.efc_rownnz, dtype=np.int, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, )) arr.setflags(write=False) return arr @efc_rownnz.setter def efc_rownnz(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.efc_rownnz, val_ptr, self._size_src.njmax*1 * sizeof(c_int)) @property def efc_rowadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.efc_rowadr, dtype=np.int, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, )) arr.setflags(write=False) return arr @efc_rowadr.setter def efc_rowadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.efc_rowadr, val_ptr, self._size_src.njmax*1 * sizeof(c_int)) @property def efc_colind(self): arr = np.reshape(np.fromiter(self._wrapped.contents.efc_colind, dtype=np.int, count=(self._size_src.njmax*self._size_src.nv)), (self._size_src.njmax, self._size_src.nv, )) arr.setflags(write=False) return arr @efc_colind.setter def efc_colind(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.efc_colind, val_ptr, self._size_src.njmax*self._size_src.nv * sizeof(c_int)) @property def efc_rownnz_T(self): arr = np.reshape(np.fromiter(self._wrapped.contents.efc_rownnz_T, dtype=np.int, count=(self._size_src.nv*1)), (self._size_src.nv, 1, )) arr.setflags(write=False) return arr @efc_rownnz_T.setter def efc_rownnz_T(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.efc_rownnz_T, val_ptr, self._size_src.nv*1 * sizeof(c_int)) @property def efc_rowadr_T(self): arr = np.reshape(np.fromiter(self._wrapped.contents.efc_rowadr_T, dtype=np.int, count=(self._size_src.nv*1)), (self._size_src.nv, 1, )) arr.setflags(write=False) return arr @efc_rowadr_T.setter def efc_rowadr_T(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.efc_rowadr_T, val_ptr, self._size_src.nv*1 * sizeof(c_int)) @property def efc_colind_T(self): arr = np.reshape(np.fromiter(self._wrapped.contents.efc_colind_T, dtype=np.int, count=(self._size_src.nv*self._size_src.njmax)), (self._size_src.nv, self._size_src.njmax, )) arr.setflags(write=False) return arr @efc_colind_T.setter def efc_colind_T(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.efc_colind_T, val_ptr, self._size_src.nv*self._size_src.njmax * sizeof(c_int)) @property def efc_solref(self): arr = np.reshape(np.fromiter(self._wrapped.contents.efc_solref, dtype=np.double, count=(self._size_src.njmax*2)), (self._size_src.njmax, 2, )) arr.setflags(write=False) return arr @efc_solref.setter def efc_solref(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.efc_solref, val_ptr, self._size_src.njmax*2 * sizeof(c_double)) @property def efc_solimp(self): arr = np.reshape(np.fromiter(self._wrapped.contents.efc_solimp, dtype=np.double, count=(self._size_src.njmax*3)), (self._size_src.njmax, 3, )) arr.setflags(write=False) return arr @efc_solimp.setter def efc_solimp(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.efc_solimp, val_ptr, self._size_src.njmax*3 * sizeof(c_double)) @property def efc_margin(self): arr = np.reshape(np.fromiter(self._wrapped.contents.efc_margin, dtype=np.double, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, )) arr.setflags(write=False) return arr @efc_margin.setter def efc_margin(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.efc_margin, val_ptr, self._size_src.njmax*1 * sizeof(c_double)) @property def efc_frictionloss(self): arr = np.reshape(np.fromiter(self._wrapped.contents.efc_frictionloss, dtype=np.double, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, )) arr.setflags(write=False) return arr @efc_frictionloss.setter def efc_frictionloss(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.efc_frictionloss, val_ptr, self._size_src.njmax*1 * sizeof(c_double)) @property def efc_pos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.efc_pos, dtype=np.double, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, )) arr.setflags(write=False) return arr @efc_pos.setter def efc_pos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.efc_pos, val_ptr, self._size_src.njmax*1 * sizeof(c_double)) @property def efc_J(self): arr = np.reshape(np.fromiter(self._wrapped.contents.efc_J, dtype=np.double, count=(self._size_src.njmax*self._size_src.nv)), (self._size_src.njmax, self._size_src.nv, )) arr.setflags(write=False) return arr @efc_J.setter def efc_J(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.efc_J, val_ptr, self._size_src.njmax*self._size_src.nv * sizeof(c_double)) @property def efc_J_T(self): arr = np.reshape(np.fromiter(self._wrapped.contents.efc_J_T, dtype=np.double, count=(self._size_src.nv*self._size_src.njmax)), (self._size_src.nv, self._size_src.njmax, )) arr.setflags(write=False) return arr @efc_J_T.setter def efc_J_T(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.efc_J_T, val_ptr, self._size_src.nv*self._size_src.njmax * sizeof(c_double)) @property def efc_diagApprox(self): arr = np.reshape(np.fromiter(self._wrapped.contents.efc_diagApprox, dtype=np.double, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, )) arr.setflags(write=False) return arr @efc_diagApprox.setter def efc_diagApprox(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.efc_diagApprox, val_ptr, self._size_src.njmax*1 * sizeof(c_double)) @property def efc_D(self): arr = np.reshape(np.fromiter(self._wrapped.contents.efc_D, dtype=np.double, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, )) arr.setflags(write=False) return arr @efc_D.setter def efc_D(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.efc_D, val_ptr, self._size_src.njmax*1 * sizeof(c_double)) @property def efc_R(self): arr = np.reshape(np.fromiter(self._wrapped.contents.efc_R, dtype=np.double, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, )) arr.setflags(write=False) return arr @efc_R.setter def efc_R(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.efc_R, val_ptr, self._size_src.njmax*1 * sizeof(c_double)) @property def efc_AR(self): arr = np.reshape(np.fromiter(self._wrapped.contents.efc_AR, dtype=np.double, count=(self._size_src.njmax*self._size_src.njmax)), (self._size_src.njmax, self._size_src.njmax, )) arr.setflags(write=False) return arr @efc_AR.setter def efc_AR(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.efc_AR, val_ptr, self._size_src.njmax*self._size_src.njmax * sizeof(c_double)) @property def e_ARchol(self): arr = np.reshape(np.fromiter(self._wrapped.contents.e_ARchol, dtype=np.double, count=(self._size_src.nemax*self._size_src.nemax)), (self._size_src.nemax, self._size_src.nemax, )) arr.setflags(write=False) return arr @e_ARchol.setter def e_ARchol(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.e_ARchol, val_ptr, self._size_src.nemax*self._size_src.nemax * sizeof(c_double)) @property def fc_e_rect(self): arr = np.reshape(np.fromiter(self._wrapped.contents.fc_e_rect, dtype=np.double, count=(self._size_src.njmax*self._size_src.nemax)), (self._size_src.njmax, self._size_src.nemax, )) arr.setflags(write=False) return arr @fc_e_rect.setter def fc_e_rect(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.fc_e_rect, val_ptr, self._size_src.njmax*self._size_src.nemax * sizeof(c_double)) @property def fc_AR(self): arr = np.reshape(np.fromiter(self._wrapped.contents.fc_AR, dtype=np.double, count=(self._size_src.njmax*self._size_src.njmax)), (self._size_src.njmax, self._size_src.njmax, )) arr.setflags(write=False) return arr @fc_AR.setter def fc_AR(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.fc_AR, val_ptr, self._size_src.njmax*self._size_src.njmax * sizeof(c_double)) @property def ten_velocity(self): arr = np.reshape(np.fromiter(self._wrapped.contents.ten_velocity, dtype=np.double, count=(self._size_src.ntendon*1)), (self._size_src.ntendon, 1, )) arr.setflags(write=False) return arr @ten_velocity.setter def ten_velocity(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.ten_velocity, val_ptr, self._size_src.ntendon*1 * sizeof(c_double)) @property def actuator_velocity(self): arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_velocity, dtype=np.double, count=(self._size_src.nu*1)), (self._size_src.nu, 1, )) arr.setflags(write=False) return arr @actuator_velocity.setter def actuator_velocity(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.actuator_velocity, val_ptr, self._size_src.nu*1 * sizeof(c_double)) @property def cvel(self): arr = np.reshape(np.fromiter(self._wrapped.contents.cvel, dtype=np.double, count=(self._size_src.nbody*6)), (self._size_src.nbody, 6, )) arr.setflags(write=False) return arr @cvel.setter def cvel(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.cvel, val_ptr, self._size_src.nbody*6 * sizeof(c_double)) @property def cdof_dot(self): arr = np.reshape(np.fromiter(self._wrapped.contents.cdof_dot, dtype=np.double, count=(self._size_src.nv*6)), (self._size_src.nv, 6, )) arr.setflags(write=False) return arr @cdof_dot.setter def cdof_dot(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.cdof_dot, val_ptr, self._size_src.nv*6 * sizeof(c_double)) @property def qfrc_bias(self): arr = np.reshape(np.fromiter(self._wrapped.contents.qfrc_bias, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, )) arr.setflags(write=False) return arr @qfrc_bias.setter def qfrc_bias(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.qfrc_bias, val_ptr, self._size_src.nv*1 * sizeof(c_double)) @property def qfrc_passive(self): arr = np.reshape(np.fromiter(self._wrapped.contents.qfrc_passive, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, )) arr.setflags(write=False) return arr @qfrc_passive.setter def qfrc_passive(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.qfrc_passive, val_ptr, self._size_src.nv*1 * sizeof(c_double)) @property def efc_vel(self): arr = np.reshape(np.fromiter(self._wrapped.contents.efc_vel, dtype=np.double, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, )) arr.setflags(write=False) return arr @efc_vel.setter def efc_vel(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.efc_vel, val_ptr, self._size_src.njmax*1 * sizeof(c_double)) @property def efc_aref(self): arr = np.reshape(np.fromiter(self._wrapped.contents.efc_aref, dtype=np.double, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, )) arr.setflags(write=False) return arr @efc_aref.setter def efc_aref(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.efc_aref, val_ptr, self._size_src.njmax*1 * sizeof(c_double)) @property def subtree_linvel(self): arr = np.reshape(np.fromiter(self._wrapped.contents.subtree_linvel, dtype=np.double, count=(self._size_src.nbody*3)), (self._size_src.nbody, 3, )) arr.setflags(write=False) return arr @subtree_linvel.setter def subtree_linvel(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.subtree_linvel, val_ptr, self._size_src.nbody*3 * sizeof(c_double)) @property def subtree_angmom(self): arr = np.reshape(np.fromiter(self._wrapped.contents.subtree_angmom, dtype=np.double, count=(self._size_src.nbody*3)), (self._size_src.nbody, 3, )) arr.setflags(write=False) return arr @subtree_angmom.setter def subtree_angmom(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.subtree_angmom, val_ptr, self._size_src.nbody*3 * sizeof(c_double)) @property def actuator_force(self): arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_force, dtype=np.double, count=(self._size_src.nu*1)), (self._size_src.nu, 1, )) arr.setflags(write=False) return arr @actuator_force.setter def actuator_force(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.actuator_force, val_ptr, self._size_src.nu*1 * sizeof(c_double)) @property def qfrc_actuator(self): arr = np.reshape(np.fromiter(self._wrapped.contents.qfrc_actuator, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, )) arr.setflags(write=False) return arr @qfrc_actuator.setter def qfrc_actuator(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.qfrc_actuator, val_ptr, self._size_src.nv*1 * sizeof(c_double)) @property def qfrc_unc(self): arr = np.reshape(np.fromiter(self._wrapped.contents.qfrc_unc, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, )) arr.setflags(write=False) return arr @qfrc_unc.setter def qfrc_unc(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.qfrc_unc, val_ptr, self._size_src.nv*1 * sizeof(c_double)) @property def qacc_unc(self): arr = np.reshape(np.fromiter(self._wrapped.contents.qacc_unc, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, )) arr.setflags(write=False) return arr @qacc_unc.setter def qacc_unc(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.qacc_unc, val_ptr, self._size_src.nv*1 * sizeof(c_double)) @property def efc_b(self): arr = np.reshape(np.fromiter(self._wrapped.contents.efc_b, dtype=np.double, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, )) arr.setflags(write=False) return arr @efc_b.setter def efc_b(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.efc_b, val_ptr, self._size_src.njmax*1 * sizeof(c_double)) @property def fc_b(self): arr = np.reshape(np.fromiter(self._wrapped.contents.fc_b, dtype=np.double, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, )) arr.setflags(write=False) return arr @fc_b.setter def fc_b(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.fc_b, val_ptr, self._size_src.njmax*1 * sizeof(c_double)) @property def efc_force(self): arr = np.reshape(np.fromiter(self._wrapped.contents.efc_force, dtype=np.double, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, )) arr.setflags(write=False) return arr @efc_force.setter def efc_force(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.efc_force, val_ptr, self._size_src.njmax*1 * sizeof(c_double)) @property def qfrc_constraint(self): arr = np.reshape(np.fromiter(self._wrapped.contents.qfrc_constraint, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, )) arr.setflags(write=False) return arr @qfrc_constraint.setter def qfrc_constraint(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.qfrc_constraint, val_ptr, self._size_src.nv*1 * sizeof(c_double)) @property def qfrc_inverse(self): arr = np.reshape(np.fromiter(self._wrapped.contents.qfrc_inverse, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, )) arr.setflags(write=False) return arr @qfrc_inverse.setter def qfrc_inverse(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.qfrc_inverse, val_ptr, self._size_src.nv*1 * sizeof(c_double)) @property def cacc(self): arr = np.reshape(np.fromiter(self._wrapped.contents.cacc, dtype=np.double, count=(self._size_src.nbody*6)), (self._size_src.nbody, 6, )) arr.setflags(write=False) return arr @cacc.setter def cacc(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.cacc, val_ptr, self._size_src.nbody*6 * sizeof(c_double)) @property def cfrc_int(self): arr = np.reshape(np.fromiter(self._wrapped.contents.cfrc_int, dtype=np.double, count=(self._size_src.nbody*6)), (self._size_src.nbody, 6, )) arr.setflags(write=False) return arr @cfrc_int.setter def cfrc_int(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.cfrc_int, val_ptr, self._size_src.nbody*6 * sizeof(c_double)) @property def cfrc_ext(self): arr = np.reshape(np.fromiter(self._wrapped.contents.cfrc_ext, dtype=np.double, count=(self._size_src.nbody*6)), (self._size_src.nbody, 6, )) arr.setflags(write=False) return arr @cfrc_ext.setter def cfrc_ext(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.cfrc_ext, val_ptr, self._size_src.nbody*6 * sizeof(c_double)) class MjModelWrapper(object): def __init__(self, wrapped, size_src=None): self._wrapped = wrapped self._size_src = size_src @property def ptr(self): return self._wrapped @property def obj(self): return self._wrapped.contents @property def nq(self): return self._wrapped.contents.nq @nq.setter def nq(self, value): self._wrapped.contents.nq = value @property def nv(self): return self._wrapped.contents.nv @nv.setter def nv(self, value): self._wrapped.contents.nv = value @property def nu(self): return self._wrapped.contents.nu @nu.setter def nu(self, value): self._wrapped.contents.nu = value @property def na(self): return self._wrapped.contents.na @na.setter def na(self, value): self._wrapped.contents.na = value @property def nbody(self): return self._wrapped.contents.nbody @nbody.setter def nbody(self, value): self._wrapped.contents.nbody = value @property def njnt(self): return self._wrapped.contents.njnt @njnt.setter def njnt(self, value): self._wrapped.contents.njnt = value @property def ngeom(self): return self._wrapped.contents.ngeom @ngeom.setter def ngeom(self, value): self._wrapped.contents.ngeom = value @property def nsite(self): return self._wrapped.contents.nsite @nsite.setter def nsite(self, value): self._wrapped.contents.nsite = value @property def ncam(self): return self._wrapped.contents.ncam @ncam.setter def ncam(self, value): self._wrapped.contents.ncam = value @property def nlight(self): return self._wrapped.contents.nlight @nlight.setter def nlight(self, value): self._wrapped.contents.nlight = value @property def nmesh(self): return self._wrapped.contents.nmesh @nmesh.setter def nmesh(self, value): self._wrapped.contents.nmesh = value @property def nmeshvert(self): return self._wrapped.contents.nmeshvert @nmeshvert.setter def nmeshvert(self, value): self._wrapped.contents.nmeshvert = value @property def nmeshface(self): return self._wrapped.contents.nmeshface @nmeshface.setter def nmeshface(self, value): self._wrapped.contents.nmeshface = value @property def nmeshgraph(self): return self._wrapped.contents.nmeshgraph @nmeshgraph.setter def nmeshgraph(self, value): self._wrapped.contents.nmeshgraph = value @property def nhfield(self): return self._wrapped.contents.nhfield @nhfield.setter def nhfield(self, value): self._wrapped.contents.nhfield = value @property def nhfielddata(self): return self._wrapped.contents.nhfielddata @nhfielddata.setter def nhfielddata(self, value): self._wrapped.contents.nhfielddata = value @property def ntex(self): return self._wrapped.contents.ntex @ntex.setter def ntex(self, value): self._wrapped.contents.ntex = value @property def ntexdata(self): return self._wrapped.contents.ntexdata @ntexdata.setter def ntexdata(self, value): self._wrapped.contents.ntexdata = value @property def nmat(self): return self._wrapped.contents.nmat @nmat.setter def nmat(self, value): self._wrapped.contents.nmat = value @property def npair(self): return self._wrapped.contents.npair @npair.setter def npair(self, value): self._wrapped.contents.npair = value @property def nexclude(self): return self._wrapped.contents.nexclude @nexclude.setter def nexclude(self, value): self._wrapped.contents.nexclude = value @property def neq(self): return self._wrapped.contents.neq @neq.setter def neq(self, value): self._wrapped.contents.neq = value @property def ntendon(self): return self._wrapped.contents.ntendon @ntendon.setter def ntendon(self, value): self._wrapped.contents.ntendon = value @property def nwrap(self): return self._wrapped.contents.nwrap @nwrap.setter def nwrap(self, value): self._wrapped.contents.nwrap = value @property def nsensor(self): return self._wrapped.contents.nsensor @nsensor.setter def nsensor(self, value): self._wrapped.contents.nsensor = value @property def nnumeric(self): return self._wrapped.contents.nnumeric @nnumeric.setter def nnumeric(self, value): self._wrapped.contents.nnumeric = value @property def nnumericdata(self): return self._wrapped.contents.nnumericdata @nnumericdata.setter def nnumericdata(self, value): self._wrapped.contents.nnumericdata = value @property def ntext(self): return self._wrapped.contents.ntext @ntext.setter def ntext(self, value): self._wrapped.contents.ntext = value @property def ntextdata(self): return self._wrapped.contents.ntextdata @ntextdata.setter def ntextdata(self, value): self._wrapped.contents.ntextdata = value @property def ntuple(self): return self._wrapped.contents.ntuple @ntuple.setter def ntuple(self, value): self._wrapped.contents.ntuple = value @property def ntupledata(self): return self._wrapped.contents.ntupledata @ntupledata.setter def ntupledata(self, value): self._wrapped.contents.ntupledata = value @property def nkey(self): return self._wrapped.contents.nkey @nkey.setter def nkey(self, value): self._wrapped.contents.nkey = value @property def nuser_body(self): return self._wrapped.contents.nuser_body @nuser_body.setter def nuser_body(self, value): self._wrapped.contents.nuser_body = value @property def nuser_jnt(self): return self._wrapped.contents.nuser_jnt @nuser_jnt.setter def nuser_jnt(self, value): self._wrapped.contents.nuser_jnt = value @property def nuser_geom(self): return self._wrapped.contents.nuser_geom @nuser_geom.setter def nuser_geom(self, value): self._wrapped.contents.nuser_geom = value @property def nuser_site(self): return self._wrapped.contents.nuser_site @nuser_site.setter def nuser_site(self, value): self._wrapped.contents.nuser_site = value @property def nuser_tendon(self): return self._wrapped.contents.nuser_tendon @nuser_tendon.setter def nuser_tendon(self, value): self._wrapped.contents.nuser_tendon = value @property def nuser_actuator(self): return self._wrapped.contents.nuser_actuator @nuser_actuator.setter def nuser_actuator(self, value): self._wrapped.contents.nuser_actuator = value @property def nuser_sensor(self): return self._wrapped.contents.nuser_sensor @nuser_sensor.setter def nuser_sensor(self, value): self._wrapped.contents.nuser_sensor = value @property def nnames(self): return self._wrapped.contents.nnames @nnames.setter def nnames(self, value): self._wrapped.contents.nnames = value @property def nM(self): return self._wrapped.contents.nM @nM.setter def nM(self, value): self._wrapped.contents.nM = value @property def nemax(self): return self._wrapped.contents.nemax @nemax.setter def nemax(self, value): self._wrapped.contents.nemax = value @property def njmax(self): return self._wrapped.contents.njmax @njmax.setter def njmax(self, value): self._wrapped.contents.njmax = value @property def nconmax(self): return self._wrapped.contents.nconmax @nconmax.setter def nconmax(self, value): self._wrapped.contents.nconmax = value @property def nstack(self): return self._wrapped.contents.nstack @nstack.setter def nstack(self, value): self._wrapped.contents.nstack = value @property def nuserdata(self): return self._wrapped.contents.nuserdata @nuserdata.setter def nuserdata(self, value): self._wrapped.contents.nuserdata = value @property def nmocap(self): return self._wrapped.contents.nmocap @nmocap.setter def nmocap(self, value): self._wrapped.contents.nmocap = value @property def nsensordata(self): return self._wrapped.contents.nsensordata @nsensordata.setter def nsensordata(self, value): self._wrapped.contents.nsensordata = value @property def nbuffer(self): return self._wrapped.contents.nbuffer @nbuffer.setter def nbuffer(self, value): self._wrapped.contents.nbuffer = value @property def opt(self): return self._wrapped.contents.opt @opt.setter def opt(self, value): self._wrapped.contents.opt = value @property def vis(self): return self._wrapped.contents.vis @vis.setter def vis(self, value): self._wrapped.contents.vis = value @property def stat(self): return self._wrapped.contents.stat @stat.setter def stat(self, value): self._wrapped.contents.stat = value @property def buffer(self): arr = np.reshape(np.fromiter(self._wrapped.contents.buffer, dtype=np.uint8, count=(self.nbuffer)), (self.nbuffer, )) arr.setflags(write=False) return arr @buffer.setter def buffer(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte)) memmove(self._wrapped.contents.buffer, val_ptr, self.nbuffer * sizeof(c_ubyte)) @property def qpos0(self): arr = np.reshape(np.fromiter(self._wrapped.contents.qpos0, dtype=np.double, count=(self.nq*1)), (self.nq, 1, )) arr.setflags(write=False) return arr @qpos0.setter def qpos0(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.qpos0, val_ptr, self.nq*1 * sizeof(c_double)) @property def qpos_spring(self): arr = np.reshape(np.fromiter(self._wrapped.contents.qpos_spring, dtype=np.double, count=(self.nq*1)), (self.nq, 1, )) arr.setflags(write=False) return arr @qpos_spring.setter def qpos_spring(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.qpos_spring, val_ptr, self.nq*1 * sizeof(c_double)) @property def body_parentid(self): arr = np.reshape(np.fromiter(self._wrapped.contents.body_parentid, dtype=np.int, count=(self.nbody*1)), (self.nbody, 1, )) arr.setflags(write=False) return arr @body_parentid.setter def body_parentid(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.body_parentid, val_ptr, self.nbody*1 * sizeof(c_int)) @property def body_rootid(self): arr = np.reshape(np.fromiter(self._wrapped.contents.body_rootid, dtype=np.int, count=(self.nbody*1)), (self.nbody, 1, )) arr.setflags(write=False) return arr @body_rootid.setter def body_rootid(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.body_rootid, val_ptr, self.nbody*1 * sizeof(c_int)) @property def body_weldid(self): arr = np.reshape(np.fromiter(self._wrapped.contents.body_weldid, dtype=np.int, count=(self.nbody*1)), (self.nbody, 1, )) arr.setflags(write=False) return arr @body_weldid.setter def body_weldid(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.body_weldid, val_ptr, self.nbody*1 * sizeof(c_int)) @property def body_mocapid(self): arr = np.reshape(np.fromiter(self._wrapped.contents.body_mocapid, dtype=np.int, count=(self.nbody*1)), (self.nbody, 1, )) arr.setflags(write=False) return arr @body_mocapid.setter def body_mocapid(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.body_mocapid, val_ptr, self.nbody*1 * sizeof(c_int)) @property def body_jntnum(self): arr = np.reshape(np.fromiter(self._wrapped.contents.body_jntnum, dtype=np.int, count=(self.nbody*1)), (self.nbody, 1, )) arr.setflags(write=False) return arr @body_jntnum.setter def body_jntnum(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.body_jntnum, val_ptr, self.nbody*1 * sizeof(c_int)) @property def body_jntadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.body_jntadr, dtype=np.int, count=(self.nbody*1)), (self.nbody, 1, )) arr.setflags(write=False) return arr @body_jntadr.setter def body_jntadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.body_jntadr, val_ptr, self.nbody*1 * sizeof(c_int)) @property def body_dofnum(self): arr = np.reshape(np.fromiter(self._wrapped.contents.body_dofnum, dtype=np.int, count=(self.nbody*1)), (self.nbody, 1, )) arr.setflags(write=False) return arr @body_dofnum.setter def body_dofnum(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.body_dofnum, val_ptr, self.nbody*1 * sizeof(c_int)) @property def body_dofadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.body_dofadr, dtype=np.int, count=(self.nbody*1)), (self.nbody, 1, )) arr.setflags(write=False) return arr @body_dofadr.setter def body_dofadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.body_dofadr, val_ptr, self.nbody*1 * sizeof(c_int)) @property def body_geomnum(self): arr = np.reshape(np.fromiter(self._wrapped.contents.body_geomnum, dtype=np.int, count=(self.nbody*1)), (self.nbody, 1, )) arr.setflags(write=False) return arr @body_geomnum.setter def body_geomnum(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.body_geomnum, val_ptr, self.nbody*1 * sizeof(c_int)) @property def body_geomadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.body_geomadr, dtype=np.int, count=(self.nbody*1)), (self.nbody, 1, )) arr.setflags(write=False) return arr @body_geomadr.setter def body_geomadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.body_geomadr, val_ptr, self.nbody*1 * sizeof(c_int)) @property def body_pos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.body_pos, dtype=np.double, count=(self.nbody*3)), (self.nbody, 3, )) arr.setflags(write=False) return arr @body_pos.setter def body_pos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.body_pos, val_ptr, self.nbody*3 * sizeof(c_double)) @property def body_quat(self): arr = np.reshape(np.fromiter(self._wrapped.contents.body_quat, dtype=np.double, count=(self.nbody*4)), (self.nbody, 4, )) arr.setflags(write=False) return arr @body_quat.setter def body_quat(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.body_quat, val_ptr, self.nbody*4 * sizeof(c_double)) @property def body_ipos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.body_ipos, dtype=np.double, count=(self.nbody*3)), (self.nbody, 3, )) arr.setflags(write=False) return arr @body_ipos.setter def body_ipos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.body_ipos, val_ptr, self.nbody*3 * sizeof(c_double)) @property def body_iquat(self): arr = np.reshape(np.fromiter(self._wrapped.contents.body_iquat, dtype=np.double, count=(self.nbody*4)), (self.nbody, 4, )) arr.setflags(write=False) return arr @body_iquat.setter def body_iquat(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.body_iquat, val_ptr, self.nbody*4 * sizeof(c_double)) @property def body_mass(self): arr = np.reshape(np.fromiter(self._wrapped.contents.body_mass, dtype=np.double, count=(self.nbody*1)), (self.nbody, 1, )) arr.setflags(write=False) return arr @body_mass.setter def body_mass(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.body_mass, val_ptr, self.nbody*1 * sizeof(c_double)) @property def body_subtreemass(self): arr = np.reshape(np.fromiter(self._wrapped.contents.body_subtreemass, dtype=np.double, count=(self.nbody*1)), (self.nbody, 1, )) arr.setflags(write=False) return arr @body_subtreemass.setter def body_subtreemass(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.body_subtreemass, val_ptr, self.nbody*1 * sizeof(c_double)) @property def body_inertia(self): arr = np.reshape(np.fromiter(self._wrapped.contents.body_inertia, dtype=np.double, count=(self.nbody*3)), (self.nbody, 3, )) arr.setflags(write=False) return arr @body_inertia.setter def body_inertia(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.body_inertia, val_ptr, self.nbody*3 * sizeof(c_double)) @property def body_invweight0(self): arr = np.reshape(np.fromiter(self._wrapped.contents.body_invweight0, dtype=np.double, count=(self.nbody*2)), (self.nbody, 2, )) arr.setflags(write=False) return arr @body_invweight0.setter def body_invweight0(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.body_invweight0, val_ptr, self.nbody*2 * sizeof(c_double)) @property def body_user(self): arr = np.reshape(np.fromiter(self._wrapped.contents.body_user, dtype=np.double, count=(self.nbody*self.nuser_body)), (self.nbody, self.nuser_body, )) arr.setflags(write=False) return arr @body_user.setter def body_user(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.body_user, val_ptr, self.nbody*self.nuser_body * sizeof(c_double)) @property def jnt_type(self): arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_type, dtype=np.int, count=(self.njnt*1)), (self.njnt, 1, )) arr.setflags(write=False) return arr @jnt_type.setter def jnt_type(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.jnt_type, val_ptr, self.njnt*1 * sizeof(c_int)) @property def jnt_qposadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_qposadr, dtype=np.int, count=(self.njnt*1)), (self.njnt, 1, )) arr.setflags(write=False) return arr @jnt_qposadr.setter def jnt_qposadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.jnt_qposadr, val_ptr, self.njnt*1 * sizeof(c_int)) @property def jnt_dofadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_dofadr, dtype=np.int, count=(self.njnt*1)), (self.njnt, 1, )) arr.setflags(write=False) return arr @jnt_dofadr.setter def jnt_dofadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.jnt_dofadr, val_ptr, self.njnt*1 * sizeof(c_int)) @property def jnt_bodyid(self): arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_bodyid, dtype=np.int, count=(self.njnt*1)), (self.njnt, 1, )) arr.setflags(write=False) return arr @jnt_bodyid.setter def jnt_bodyid(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.jnt_bodyid, val_ptr, self.njnt*1 * sizeof(c_int)) @property def jnt_limited(self): arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_limited, dtype=np.uint8, count=(self.njnt*1)), (self.njnt, 1, )) arr.setflags(write=False) return arr @jnt_limited.setter def jnt_limited(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte)) memmove(self._wrapped.contents.jnt_limited, val_ptr, self.njnt*1 * sizeof(c_ubyte)) @property def jnt_solref(self): arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_solref, dtype=np.double, count=(self.njnt*2)), (self.njnt, 2, )) arr.setflags(write=False) return arr @jnt_solref.setter def jnt_solref(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.jnt_solref, val_ptr, self.njnt*2 * sizeof(c_double)) @property def jnt_solimp(self): arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_solimp, dtype=np.double, count=(self.njnt*3)), (self.njnt, 3, )) arr.setflags(write=False) return arr @jnt_solimp.setter def jnt_solimp(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.jnt_solimp, val_ptr, self.njnt*3 * sizeof(c_double)) @property def jnt_pos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_pos, dtype=np.double, count=(self.njnt*3)), (self.njnt, 3, )) arr.setflags(write=False) return arr @jnt_pos.setter def jnt_pos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.jnt_pos, val_ptr, self.njnt*3 * sizeof(c_double)) @property def jnt_axis(self): arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_axis, dtype=np.double, count=(self.njnt*3)), (self.njnt, 3, )) arr.setflags(write=False) return arr @jnt_axis.setter def jnt_axis(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.jnt_axis, val_ptr, self.njnt*3 * sizeof(c_double)) @property def jnt_stiffness(self): arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_stiffness, dtype=np.double, count=(self.njnt*1)), (self.njnt, 1, )) arr.setflags(write=False) return arr @jnt_stiffness.setter def jnt_stiffness(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.jnt_stiffness, val_ptr, self.njnt*1 * sizeof(c_double)) @property def jnt_range(self): arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_range, dtype=np.double, count=(self.njnt*2)), (self.njnt, 2, )) arr.setflags(write=False) return arr @jnt_range.setter def jnt_range(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.jnt_range, val_ptr, self.njnt*2 * sizeof(c_double)) @property def jnt_margin(self): arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_margin, dtype=np.double, count=(self.njnt*1)), (self.njnt, 1, )) arr.setflags(write=False) return arr @jnt_margin.setter def jnt_margin(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.jnt_margin, val_ptr, self.njnt*1 * sizeof(c_double)) @property def jnt_user(self): arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_user, dtype=np.double, count=(self.njnt*self.nuser_jnt)), (self.njnt, self.nuser_jnt, )) arr.setflags(write=False) return arr @jnt_user.setter def jnt_user(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.jnt_user, val_ptr, self.njnt*self.nuser_jnt * sizeof(c_double)) @property def dof_bodyid(self): arr = np.reshape(np.fromiter(self._wrapped.contents.dof_bodyid, dtype=np.int, count=(self.nv*1)), (self.nv, 1, )) arr.setflags(write=False) return arr @dof_bodyid.setter def dof_bodyid(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.dof_bodyid, val_ptr, self.nv*1 * sizeof(c_int)) @property def dof_jntid(self): arr = np.reshape(np.fromiter(self._wrapped.contents.dof_jntid, dtype=np.int, count=(self.nv*1)), (self.nv, 1, )) arr.setflags(write=False) return arr @dof_jntid.setter def dof_jntid(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.dof_jntid, val_ptr, self.nv*1 * sizeof(c_int)) @property def dof_parentid(self): arr = np.reshape(np.fromiter(self._wrapped.contents.dof_parentid, dtype=np.int, count=(self.nv*1)), (self.nv, 1, )) arr.setflags(write=False) return arr @dof_parentid.setter def dof_parentid(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.dof_parentid, val_ptr, self.nv*1 * sizeof(c_int)) @property def dof_Madr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.dof_Madr, dtype=np.int, count=(self.nv*1)), (self.nv, 1, )) arr.setflags(write=False) return arr @dof_Madr.setter def dof_Madr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.dof_Madr, val_ptr, self.nv*1 * sizeof(c_int)) @property def dof_frictional(self): arr = np.reshape(np.fromiter(self._wrapped.contents.dof_frictional, dtype=np.uint8, count=(self.nv*1)), (self.nv, 1, )) arr.setflags(write=False) return arr @dof_frictional.setter def dof_frictional(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte)) memmove(self._wrapped.contents.dof_frictional, val_ptr, self.nv*1 * sizeof(c_ubyte)) @property def dof_solref(self): arr = np.reshape(np.fromiter(self._wrapped.contents.dof_solref, dtype=np.double, count=(self.nv*2)), (self.nv, 2, )) arr.setflags(write=False) return arr @dof_solref.setter def dof_solref(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.dof_solref, val_ptr, self.nv*2 * sizeof(c_double)) @property def dof_solimp(self): arr = np.reshape(np.fromiter(self._wrapped.contents.dof_solimp, dtype=np.double, count=(self.nv*3)), (self.nv, 3, )) arr.setflags(write=False) return arr @dof_solimp.setter def dof_solimp(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.dof_solimp, val_ptr, self.nv*3 * sizeof(c_double)) @property def dof_frictionloss(self): arr = np.reshape(np.fromiter(self._wrapped.contents.dof_frictionloss, dtype=np.double, count=(self.nv*1)), (self.nv, 1, )) arr.setflags(write=False) return arr @dof_frictionloss.setter def dof_frictionloss(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.dof_frictionloss, val_ptr, self.nv*1 * sizeof(c_double)) @property def dof_armature(self): arr = np.reshape(np.fromiter(self._wrapped.contents.dof_armature, dtype=np.double, count=(self.nv*1)), (self.nv, 1, )) arr.setflags(write=False) return arr @dof_armature.setter def dof_armature(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.dof_armature, val_ptr, self.nv*1 * sizeof(c_double)) @property def dof_damping(self): arr = np.reshape(np.fromiter(self._wrapped.contents.dof_damping, dtype=np.double, count=(self.nv*1)), (self.nv, 1, )) arr.setflags(write=False) return arr @dof_damping.setter def dof_damping(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.dof_damping, val_ptr, self.nv*1 * sizeof(c_double)) @property def dof_invweight0(self): arr = np.reshape(np.fromiter(self._wrapped.contents.dof_invweight0, dtype=np.double, count=(self.nv*1)), (self.nv, 1, )) arr.setflags(write=False) return arr @dof_invweight0.setter def dof_invweight0(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.dof_invweight0, val_ptr, self.nv*1 * sizeof(c_double)) @property def geom_type(self): arr = np.reshape(np.fromiter(self._wrapped.contents.geom_type, dtype=np.int, count=(self.ngeom*1)), (self.ngeom, 1, )) arr.setflags(write=False) return arr @geom_type.setter def geom_type(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.geom_type, val_ptr, self.ngeom*1 * sizeof(c_int)) @property def geom_contype(self): arr = np.reshape(np.fromiter(self._wrapped.contents.geom_contype, dtype=np.int, count=(self.ngeom*1)), (self.ngeom, 1, )) arr.setflags(write=False) return arr @geom_contype.setter def geom_contype(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.geom_contype, val_ptr, self.ngeom*1 * sizeof(c_int)) @property def geom_conaffinity(self): arr = np.reshape(np.fromiter(self._wrapped.contents.geom_conaffinity, dtype=np.int, count=(self.ngeom*1)), (self.ngeom, 1, )) arr.setflags(write=False) return arr @geom_conaffinity.setter def geom_conaffinity(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.geom_conaffinity, val_ptr, self.ngeom*1 * sizeof(c_int)) @property def geom_condim(self): arr = np.reshape(np.fromiter(self._wrapped.contents.geom_condim, dtype=np.int, count=(self.ngeom*1)), (self.ngeom, 1, )) arr.setflags(write=False) return arr @geom_condim.setter def geom_condim(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.geom_condim, val_ptr, self.ngeom*1 * sizeof(c_int)) @property def geom_bodyid(self): arr = np.reshape(np.fromiter(self._wrapped.contents.geom_bodyid, dtype=np.int, count=(self.ngeom*1)), (self.ngeom, 1, )) arr.setflags(write=False) return arr @geom_bodyid.setter def geom_bodyid(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.geom_bodyid, val_ptr, self.ngeom*1 * sizeof(c_int)) @property def geom_dataid(self): arr = np.reshape(np.fromiter(self._wrapped.contents.geom_dataid, dtype=np.int, count=(self.ngeom*1)), (self.ngeom, 1, )) arr.setflags(write=False) return arr @geom_dataid.setter def geom_dataid(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.geom_dataid, val_ptr, self.ngeom*1 * sizeof(c_int)) @property def geom_matid(self): arr = np.reshape(np.fromiter(self._wrapped.contents.geom_matid, dtype=np.int, count=(self.ngeom*1)), (self.ngeom, 1, )) arr.setflags(write=False) return arr @geom_matid.setter def geom_matid(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.geom_matid, val_ptr, self.ngeom*1 * sizeof(c_int)) @property def geom_group(self): arr = np.reshape(np.fromiter(self._wrapped.contents.geom_group, dtype=np.int, count=(self.ngeom*1)), (self.ngeom, 1, )) arr.setflags(write=False) return arr @geom_group.setter def geom_group(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.geom_group, val_ptr, self.ngeom*1 * sizeof(c_int)) @property def geom_solmix(self): arr = np.reshape(np.fromiter(self._wrapped.contents.geom_solmix, dtype=np.double, count=(self.ngeom*1)), (self.ngeom, 1, )) arr.setflags(write=False) return arr @geom_solmix.setter def geom_solmix(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.geom_solmix, val_ptr, self.ngeom*1 * sizeof(c_double)) @property def geom_solref(self): arr = np.reshape(np.fromiter(self._wrapped.contents.geom_solref, dtype=np.double, count=(self.ngeom*2)), (self.ngeom, 2, )) arr.setflags(write=False) return arr @geom_solref.setter def geom_solref(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.geom_solref, val_ptr, self.ngeom*2 * sizeof(c_double)) @property def geom_solimp(self): arr = np.reshape(np.fromiter(self._wrapped.contents.geom_solimp, dtype=np.double, count=(self.ngeom*3)), (self.ngeom, 3, )) arr.setflags(write=False) return arr @geom_solimp.setter def geom_solimp(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.geom_solimp, val_ptr, self.ngeom*3 * sizeof(c_double)) @property def geom_size(self): arr = np.reshape(np.fromiter(self._wrapped.contents.geom_size, dtype=np.double, count=(self.ngeom*3)), (self.ngeom, 3, )) arr.setflags(write=False) return arr @geom_size.setter def geom_size(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.geom_size, val_ptr, self.ngeom*3 * sizeof(c_double)) @property def geom_rbound(self): arr = np.reshape(np.fromiter(self._wrapped.contents.geom_rbound, dtype=np.double, count=(self.ngeom*1)), (self.ngeom, 1, )) arr.setflags(write=False) return arr @geom_rbound.setter def geom_rbound(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.geom_rbound, val_ptr, self.ngeom*1 * sizeof(c_double)) @property def geom_pos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.geom_pos, dtype=np.double, count=(self.ngeom*3)), (self.ngeom, 3, )) arr.setflags(write=False) return arr @geom_pos.setter def geom_pos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.geom_pos, val_ptr, self.ngeom*3 * sizeof(c_double)) @property def geom_quat(self): arr = np.reshape(np.fromiter(self._wrapped.contents.geom_quat, dtype=np.double, count=(self.ngeom*4)), (self.ngeom, 4, )) arr.setflags(write=False) return arr @geom_quat.setter def geom_quat(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.geom_quat, val_ptr, self.ngeom*4 * sizeof(c_double)) @property def geom_friction(self): arr = np.reshape(np.fromiter(self._wrapped.contents.geom_friction, dtype=np.double, count=(self.ngeom*3)), (self.ngeom, 3, )) arr.setflags(write=False) return arr @geom_friction.setter def geom_friction(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.geom_friction, val_ptr, self.ngeom*3 * sizeof(c_double)) @property def geom_margin(self): arr = np.reshape(np.fromiter(self._wrapped.contents.geom_margin, dtype=np.double, count=(self.ngeom*1)), (self.ngeom, 1, )) arr.setflags(write=False) return arr @geom_margin.setter def geom_margin(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.geom_margin, val_ptr, self.ngeom*1 * sizeof(c_double)) @property def geom_gap(self): arr = np.reshape(np.fromiter(self._wrapped.contents.geom_gap, dtype=np.double, count=(self.ngeom*1)), (self.ngeom, 1, )) arr.setflags(write=False) return arr @geom_gap.setter def geom_gap(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.geom_gap, val_ptr, self.ngeom*1 * sizeof(c_double)) @property def geom_user(self): arr = np.reshape(np.fromiter(self._wrapped.contents.geom_user, dtype=np.double, count=(self.ngeom*self.nuser_geom)), (self.ngeom, self.nuser_geom, )) arr.setflags(write=False) return arr @geom_user.setter def geom_user(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.geom_user, val_ptr, self.ngeom*self.nuser_geom * sizeof(c_double)) @property def geom_rgba(self): arr = np.reshape(np.fromiter(self._wrapped.contents.geom_rgba, dtype=np.float, count=(self.ngeom*4)), (self.ngeom, 4, )) arr.setflags(write=False) return arr @geom_rgba.setter def geom_rgba(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.geom_rgba, val_ptr, self.ngeom*4 * sizeof(c_float)) @property def site_type(self): arr = np.reshape(np.fromiter(self._wrapped.contents.site_type, dtype=np.int, count=(self.nsite*1)), (self.nsite, 1, )) arr.setflags(write=False) return arr @site_type.setter def site_type(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.site_type, val_ptr, self.nsite*1 * sizeof(c_int)) @property def site_bodyid(self): arr = np.reshape(np.fromiter(self._wrapped.contents.site_bodyid, dtype=np.int, count=(self.nsite*1)), (self.nsite, 1, )) arr.setflags(write=False) return arr @site_bodyid.setter def site_bodyid(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.site_bodyid, val_ptr, self.nsite*1 * sizeof(c_int)) @property def site_matid(self): arr = np.reshape(np.fromiter(self._wrapped.contents.site_matid, dtype=np.int, count=(self.nsite*1)), (self.nsite, 1, )) arr.setflags(write=False) return arr @site_matid.setter def site_matid(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.site_matid, val_ptr, self.nsite*1 * sizeof(c_int)) @property def site_group(self): arr = np.reshape(np.fromiter(self._wrapped.contents.site_group, dtype=np.int, count=(self.nsite*1)), (self.nsite, 1, )) arr.setflags(write=False) return arr @site_group.setter def site_group(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.site_group, val_ptr, self.nsite*1 * sizeof(c_int)) @property def site_size(self): arr = np.reshape(np.fromiter(self._wrapped.contents.site_size, dtype=np.double, count=(self.nsite*3)), (self.nsite, 3, )) arr.setflags(write=False) return arr @site_size.setter def site_size(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.site_size, val_ptr, self.nsite*3 * sizeof(c_double)) @property def site_pos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.site_pos, dtype=np.double, count=(self.nsite*3)), (self.nsite, 3, )) arr.setflags(write=False) return arr @site_pos.setter def site_pos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.site_pos, val_ptr, self.nsite*3 * sizeof(c_double)) @property def site_quat(self): arr = np.reshape(np.fromiter(self._wrapped.contents.site_quat, dtype=np.double, count=(self.nsite*4)), (self.nsite, 4, )) arr.setflags(write=False) return arr @site_quat.setter def site_quat(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.site_quat, val_ptr, self.nsite*4 * sizeof(c_double)) @property def site_user(self): arr = np.reshape(np.fromiter(self._wrapped.contents.site_user, dtype=np.double, count=(self.nsite*self.nuser_site)), (self.nsite, self.nuser_site, )) arr.setflags(write=False) return arr @site_user.setter def site_user(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.site_user, val_ptr, self.nsite*self.nuser_site * sizeof(c_double)) @property def site_rgba(self): arr = np.reshape(np.fromiter(self._wrapped.contents.site_rgba, dtype=np.float, count=(self.nsite*4)), (self.nsite, 4, )) arr.setflags(write=False) return arr @site_rgba.setter def site_rgba(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.site_rgba, val_ptr, self.nsite*4 * sizeof(c_float)) @property def cam_mode(self): arr = np.reshape(np.fromiter(self._wrapped.contents.cam_mode, dtype=np.int, count=(self.ncam*1)), (self.ncam, 1, )) arr.setflags(write=False) return arr @cam_mode.setter def cam_mode(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.cam_mode, val_ptr, self.ncam*1 * sizeof(c_int)) @property def cam_bodyid(self): arr = np.reshape(np.fromiter(self._wrapped.contents.cam_bodyid, dtype=np.int, count=(self.ncam*1)), (self.ncam, 1, )) arr.setflags(write=False) return arr @cam_bodyid.setter def cam_bodyid(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.cam_bodyid, val_ptr, self.ncam*1 * sizeof(c_int)) @property def cam_targetbodyid(self): arr = np.reshape(np.fromiter(self._wrapped.contents.cam_targetbodyid, dtype=np.int, count=(self.ncam*1)), (self.ncam, 1, )) arr.setflags(write=False) return arr @cam_targetbodyid.setter def cam_targetbodyid(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.cam_targetbodyid, val_ptr, self.ncam*1 * sizeof(c_int)) @property def cam_pos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.cam_pos, dtype=np.double, count=(self.ncam*3)), (self.ncam, 3, )) arr.setflags(write=False) return arr @cam_pos.setter def cam_pos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.cam_pos, val_ptr, self.ncam*3 * sizeof(c_double)) @property def cam_quat(self): arr = np.reshape(np.fromiter(self._wrapped.contents.cam_quat, dtype=np.double, count=(self.ncam*4)), (self.ncam, 4, )) arr.setflags(write=False) return arr @cam_quat.setter def cam_quat(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.cam_quat, val_ptr, self.ncam*4 * sizeof(c_double)) @property def cam_poscom0(self): arr = np.reshape(np.fromiter(self._wrapped.contents.cam_poscom0, dtype=np.double, count=(self.ncam*3)), (self.ncam, 3, )) arr.setflags(write=False) return arr @cam_poscom0.setter def cam_poscom0(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.cam_poscom0, val_ptr, self.ncam*3 * sizeof(c_double)) @property def cam_pos0(self): arr = np.reshape(np.fromiter(self._wrapped.contents.cam_pos0, dtype=np.double, count=(self.ncam*3)), (self.ncam, 3, )) arr.setflags(write=False) return arr @cam_pos0.setter def cam_pos0(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.cam_pos0, val_ptr, self.ncam*3 * sizeof(c_double)) @property def cam_mat0(self): arr = np.reshape(np.fromiter(self._wrapped.contents.cam_mat0, dtype=np.double, count=(self.ncam*9)), (self.ncam, 9, )) arr.setflags(write=False) return arr @cam_mat0.setter def cam_mat0(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.cam_mat0, val_ptr, self.ncam*9 * sizeof(c_double)) @property def cam_fovy(self): arr = np.reshape(np.fromiter(self._wrapped.contents.cam_fovy, dtype=np.double, count=(self.ncam*1)), (self.ncam, 1, )) arr.setflags(write=False) return arr @cam_fovy.setter def cam_fovy(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.cam_fovy, val_ptr, self.ncam*1 * sizeof(c_double)) @property def cam_ipd(self): arr = np.reshape(np.fromiter(self._wrapped.contents.cam_ipd, dtype=np.double, count=(self.ncam*1)), (self.ncam, 1, )) arr.setflags(write=False) return arr @cam_ipd.setter def cam_ipd(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.cam_ipd, val_ptr, self.ncam*1 * sizeof(c_double)) @property def light_mode(self): arr = np.reshape(np.fromiter(self._wrapped.contents.light_mode, dtype=np.int, count=(self.nlight*1)), (self.nlight, 1, )) arr.setflags(write=False) return arr @light_mode.setter def light_mode(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.light_mode, val_ptr, self.nlight*1 * sizeof(c_int)) @property def light_bodyid(self): arr = np.reshape(np.fromiter(self._wrapped.contents.light_bodyid, dtype=np.int, count=(self.nlight*1)), (self.nlight, 1, )) arr.setflags(write=False) return arr @light_bodyid.setter def light_bodyid(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.light_bodyid, val_ptr, self.nlight*1 * sizeof(c_int)) @property def light_targetbodyid(self): arr = np.reshape(np.fromiter(self._wrapped.contents.light_targetbodyid, dtype=np.int, count=(self.nlight*1)), (self.nlight, 1, )) arr.setflags(write=False) return arr @light_targetbodyid.setter def light_targetbodyid(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.light_targetbodyid, val_ptr, self.nlight*1 * sizeof(c_int)) @property def light_directional(self): arr = np.reshape(np.fromiter(self._wrapped.contents.light_directional, dtype=np.uint8, count=(self.nlight*1)), (self.nlight, 1, )) arr.setflags(write=False) return arr @light_directional.setter def light_directional(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte)) memmove(self._wrapped.contents.light_directional, val_ptr, self.nlight*1 * sizeof(c_ubyte)) @property def light_castshadow(self): arr = np.reshape(np.fromiter(self._wrapped.contents.light_castshadow, dtype=np.uint8, count=(self.nlight*1)), (self.nlight, 1, )) arr.setflags(write=False) return arr @light_castshadow.setter def light_castshadow(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte)) memmove(self._wrapped.contents.light_castshadow, val_ptr, self.nlight*1 * sizeof(c_ubyte)) @property def light_active(self): arr = np.reshape(np.fromiter(self._wrapped.contents.light_active, dtype=np.uint8, count=(self.nlight*1)), (self.nlight, 1, )) arr.setflags(write=False) return arr @light_active.setter def light_active(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte)) memmove(self._wrapped.contents.light_active, val_ptr, self.nlight*1 * sizeof(c_ubyte)) @property def light_pos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.light_pos, dtype=np.double, count=(self.nlight*3)), (self.nlight, 3, )) arr.setflags(write=False) return arr @light_pos.setter def light_pos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.light_pos, val_ptr, self.nlight*3 * sizeof(c_double)) @property def light_dir(self): arr = np.reshape(np.fromiter(self._wrapped.contents.light_dir, dtype=np.double, count=(self.nlight*3)), (self.nlight, 3, )) arr.setflags(write=False) return arr @light_dir.setter def light_dir(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.light_dir, val_ptr, self.nlight*3 * sizeof(c_double)) @property def light_poscom0(self): arr = np.reshape(np.fromiter(self._wrapped.contents.light_poscom0, dtype=np.double, count=(self.nlight*3)), (self.nlight, 3, )) arr.setflags(write=False) return arr @light_poscom0.setter def light_poscom0(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.light_poscom0, val_ptr, self.nlight*3 * sizeof(c_double)) @property def light_pos0(self): arr = np.reshape(np.fromiter(self._wrapped.contents.light_pos0, dtype=np.double, count=(self.nlight*3)), (self.nlight, 3, )) arr.setflags(write=False) return arr @light_pos0.setter def light_pos0(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.light_pos0, val_ptr, self.nlight*3 * sizeof(c_double)) @property def light_dir0(self): arr = np.reshape(np.fromiter(self._wrapped.contents.light_dir0, dtype=np.double, count=(self.nlight*3)), (self.nlight, 3, )) arr.setflags(write=False) return arr @light_dir0.setter def light_dir0(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.light_dir0, val_ptr, self.nlight*3 * sizeof(c_double)) @property def light_attenuation(self): arr = np.reshape(np.fromiter(self._wrapped.contents.light_attenuation, dtype=np.float, count=(self.nlight*3)), (self.nlight, 3, )) arr.setflags(write=False) return arr @light_attenuation.setter def light_attenuation(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.light_attenuation, val_ptr, self.nlight*3 * sizeof(c_float)) @property def light_cutoff(self): arr = np.reshape(np.fromiter(self._wrapped.contents.light_cutoff, dtype=np.float, count=(self.nlight*1)), (self.nlight, 1, )) arr.setflags(write=False) return arr @light_cutoff.setter def light_cutoff(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.light_cutoff, val_ptr, self.nlight*1 * sizeof(c_float)) @property def light_exponent(self): arr = np.reshape(np.fromiter(self._wrapped.contents.light_exponent, dtype=np.float, count=(self.nlight*1)), (self.nlight, 1, )) arr.setflags(write=False) return arr @light_exponent.setter def light_exponent(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.light_exponent, val_ptr, self.nlight*1 * sizeof(c_float)) @property def light_ambient(self): arr = np.reshape(np.fromiter(self._wrapped.contents.light_ambient, dtype=np.float, count=(self.nlight*3)), (self.nlight, 3, )) arr.setflags(write=False) return arr @light_ambient.setter def light_ambient(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.light_ambient, val_ptr, self.nlight*3 * sizeof(c_float)) @property def light_diffuse(self): arr = np.reshape(np.fromiter(self._wrapped.contents.light_diffuse, dtype=np.float, count=(self.nlight*3)), (self.nlight, 3, )) arr.setflags(write=False) return arr @light_diffuse.setter def light_diffuse(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.light_diffuse, val_ptr, self.nlight*3 * sizeof(c_float)) @property def light_specular(self): arr = np.reshape(np.fromiter(self._wrapped.contents.light_specular, dtype=np.float, count=(self.nlight*3)), (self.nlight, 3, )) arr.setflags(write=False) return arr @light_specular.setter def light_specular(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.light_specular, val_ptr, self.nlight*3 * sizeof(c_float)) @property def mesh_faceadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.mesh_faceadr, dtype=np.int, count=(self.nmesh*1)), (self.nmesh, 1, )) arr.setflags(write=False) return arr @mesh_faceadr.setter def mesh_faceadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.mesh_faceadr, val_ptr, self.nmesh*1 * sizeof(c_int)) @property def mesh_facenum(self): arr = np.reshape(np.fromiter(self._wrapped.contents.mesh_facenum, dtype=np.int, count=(self.nmesh*1)), (self.nmesh, 1, )) arr.setflags(write=False) return arr @mesh_facenum.setter def mesh_facenum(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.mesh_facenum, val_ptr, self.nmesh*1 * sizeof(c_int)) @property def mesh_vertadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.mesh_vertadr, dtype=np.int, count=(self.nmesh*1)), (self.nmesh, 1, )) arr.setflags(write=False) return arr @mesh_vertadr.setter def mesh_vertadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.mesh_vertadr, val_ptr, self.nmesh*1 * sizeof(c_int)) @property def mesh_vertnum(self): arr = np.reshape(np.fromiter(self._wrapped.contents.mesh_vertnum, dtype=np.int, count=(self.nmesh*1)), (self.nmesh, 1, )) arr.setflags(write=False) return arr @mesh_vertnum.setter def mesh_vertnum(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.mesh_vertnum, val_ptr, self.nmesh*1 * sizeof(c_int)) @property def mesh_graphadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.mesh_graphadr, dtype=np.int, count=(self.nmesh*1)), (self.nmesh, 1, )) arr.setflags(write=False) return arr @mesh_graphadr.setter def mesh_graphadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.mesh_graphadr, val_ptr, self.nmesh*1 * sizeof(c_int)) @property def mesh_vert(self): arr = np.reshape(np.fromiter(self._wrapped.contents.mesh_vert, dtype=np.float, count=(self.nmeshvert*3)), (self.nmeshvert, 3, )) arr.setflags(write=False) return arr @mesh_vert.setter def mesh_vert(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.mesh_vert, val_ptr, self.nmeshvert*3 * sizeof(c_float)) @property def mesh_normal(self): arr = np.reshape(np.fromiter(self._wrapped.contents.mesh_normal, dtype=np.float, count=(self.nmeshvert*3)), (self.nmeshvert, 3, )) arr.setflags(write=False) return arr @mesh_normal.setter def mesh_normal(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.mesh_normal, val_ptr, self.nmeshvert*3 * sizeof(c_float)) @property def mesh_face(self): arr = np.reshape(np.fromiter(self._wrapped.contents.mesh_face, dtype=np.int, count=(self.nmeshface*3)), (self.nmeshface, 3, )) arr.setflags(write=False) return arr @mesh_face.setter def mesh_face(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.mesh_face, val_ptr, self.nmeshface*3 * sizeof(c_int)) @property def mesh_graph(self): arr = np.reshape(np.fromiter(self._wrapped.contents.mesh_graph, dtype=np.int, count=(self.nmeshgraph*1)), (self.nmeshgraph, 1, )) arr.setflags(write=False) return arr @mesh_graph.setter def mesh_graph(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.mesh_graph, val_ptr, self.nmeshgraph*1 * sizeof(c_int)) @property def hfield_size(self): arr = np.reshape(np.fromiter(self._wrapped.contents.hfield_size, dtype=np.double, count=(self.nhfield*4)), (self.nhfield, 4, )) arr.setflags(write=False) return arr @hfield_size.setter def hfield_size(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.hfield_size, val_ptr, self.nhfield*4 * sizeof(c_double)) @property def hfield_nrow(self): arr = np.reshape(np.fromiter(self._wrapped.contents.hfield_nrow, dtype=np.int, count=(self.nhfield*1)), (self.nhfield, 1, )) arr.setflags(write=False) return arr @hfield_nrow.setter def hfield_nrow(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.hfield_nrow, val_ptr, self.nhfield*1 * sizeof(c_int)) @property def hfield_ncol(self): arr = np.reshape(np.fromiter(self._wrapped.contents.hfield_ncol, dtype=np.int, count=(self.nhfield*1)), (self.nhfield, 1, )) arr.setflags(write=False) return arr @hfield_ncol.setter def hfield_ncol(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.hfield_ncol, val_ptr, self.nhfield*1 * sizeof(c_int)) @property def hfield_adr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.hfield_adr, dtype=np.int, count=(self.nhfield*1)), (self.nhfield, 1, )) arr.setflags(write=False) return arr @hfield_adr.setter def hfield_adr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.hfield_adr, val_ptr, self.nhfield*1 * sizeof(c_int)) @property def hfield_data(self): arr = np.reshape(np.fromiter(self._wrapped.contents.hfield_data, dtype=np.float, count=(self.nhfielddata*1)), (self.nhfielddata, 1, )) arr.setflags(write=False) return arr @hfield_data.setter def hfield_data(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.hfield_data, val_ptr, self.nhfielddata*1 * sizeof(c_float)) @property def tex_type(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tex_type, dtype=np.int, count=(self.ntex*1)), (self.ntex, 1, )) arr.setflags(write=False) return arr @tex_type.setter def tex_type(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.tex_type, val_ptr, self.ntex*1 * sizeof(c_int)) @property def tex_height(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tex_height, dtype=np.int, count=(self.ntex*1)), (self.ntex, 1, )) arr.setflags(write=False) return arr @tex_height.setter def tex_height(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.tex_height, val_ptr, self.ntex*1 * sizeof(c_int)) @property def tex_width(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tex_width, dtype=np.int, count=(self.ntex*1)), (self.ntex, 1, )) arr.setflags(write=False) return arr @tex_width.setter def tex_width(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.tex_width, val_ptr, self.ntex*1 * sizeof(c_int)) @property def tex_adr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tex_adr, dtype=np.int, count=(self.ntex*1)), (self.ntex, 1, )) arr.setflags(write=False) return arr @tex_adr.setter def tex_adr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.tex_adr, val_ptr, self.ntex*1 * sizeof(c_int)) @property def tex_rgb(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tex_rgb, dtype=np.uint8, count=(self.ntexdata*1)), (self.ntexdata, 1, )) arr.setflags(write=False) return arr @tex_rgb.setter def tex_rgb(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte)) memmove(self._wrapped.contents.tex_rgb, val_ptr, self.ntexdata*1 * sizeof(c_ubyte)) @property def mat_texid(self): arr = np.reshape(np.fromiter(self._wrapped.contents.mat_texid, dtype=np.int, count=(self.nmat*1)), (self.nmat, 1, )) arr.setflags(write=False) return arr @mat_texid.setter def mat_texid(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.mat_texid, val_ptr, self.nmat*1 * sizeof(c_int)) @property def mat_texuniform(self): arr = np.reshape(np.fromiter(self._wrapped.contents.mat_texuniform, dtype=np.uint8, count=(self.nmat*1)), (self.nmat, 1, )) arr.setflags(write=False) return arr @mat_texuniform.setter def mat_texuniform(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte)) memmove(self._wrapped.contents.mat_texuniform, val_ptr, self.nmat*1 * sizeof(c_ubyte)) @property def mat_texrepeat(self): arr = np.reshape(np.fromiter(self._wrapped.contents.mat_texrepeat, dtype=np.float, count=(self.nmat*2)), (self.nmat, 2, )) arr.setflags(write=False) return arr @mat_texrepeat.setter def mat_texrepeat(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.mat_texrepeat, val_ptr, self.nmat*2 * sizeof(c_float)) @property def mat_emission(self): arr = np.reshape(np.fromiter(self._wrapped.contents.mat_emission, dtype=np.float, count=(self.nmat*1)), (self.nmat, 1, )) arr.setflags(write=False) return arr @mat_emission.setter def mat_emission(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.mat_emission, val_ptr, self.nmat*1 * sizeof(c_float)) @property def mat_specular(self): arr = np.reshape(np.fromiter(self._wrapped.contents.mat_specular, dtype=np.float, count=(self.nmat*1)), (self.nmat, 1, )) arr.setflags(write=False) return arr @mat_specular.setter def mat_specular(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.mat_specular, val_ptr, self.nmat*1 * sizeof(c_float)) @property def mat_shininess(self): arr = np.reshape(np.fromiter(self._wrapped.contents.mat_shininess, dtype=np.float, count=(self.nmat*1)), (self.nmat, 1, )) arr.setflags(write=False) return arr @mat_shininess.setter def mat_shininess(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.mat_shininess, val_ptr, self.nmat*1 * sizeof(c_float)) @property def mat_reflectance(self): arr = np.reshape(np.fromiter(self._wrapped.contents.mat_reflectance, dtype=np.float, count=(self.nmat*1)), (self.nmat, 1, )) arr.setflags(write=False) return arr @mat_reflectance.setter def mat_reflectance(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.mat_reflectance, val_ptr, self.nmat*1 * sizeof(c_float)) @property def mat_rgba(self): arr = np.reshape(np.fromiter(self._wrapped.contents.mat_rgba, dtype=np.float, count=(self.nmat*4)), (self.nmat, 4, )) arr.setflags(write=False) return arr @mat_rgba.setter def mat_rgba(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.mat_rgba, val_ptr, self.nmat*4 * sizeof(c_float)) @property def pair_dim(self): arr = np.reshape(np.fromiter(self._wrapped.contents.pair_dim, dtype=np.int, count=(self.npair*1)), (self.npair, 1, )) arr.setflags(write=False) return arr @pair_dim.setter def pair_dim(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.pair_dim, val_ptr, self.npair*1 * sizeof(c_int)) @property def pair_geom1(self): arr = np.reshape(np.fromiter(self._wrapped.contents.pair_geom1, dtype=np.int, count=(self.npair*1)), (self.npair, 1, )) arr.setflags(write=False) return arr @pair_geom1.setter def pair_geom1(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.pair_geom1, val_ptr, self.npair*1 * sizeof(c_int)) @property def pair_geom2(self): arr = np.reshape(np.fromiter(self._wrapped.contents.pair_geom2, dtype=np.int, count=(self.npair*1)), (self.npair, 1, )) arr.setflags(write=False) return arr @pair_geom2.setter def pair_geom2(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.pair_geom2, val_ptr, self.npair*1 * sizeof(c_int)) @property def pair_signature(self): arr = np.reshape(np.fromiter(self._wrapped.contents.pair_signature, dtype=np.int, count=(self.npair*1)), (self.npair, 1, )) arr.setflags(write=False) return arr @pair_signature.setter def pair_signature(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.pair_signature, val_ptr, self.npair*1 * sizeof(c_int)) @property def pair_solref(self): arr = np.reshape(np.fromiter(self._wrapped.contents.pair_solref, dtype=np.double, count=(self.npair*2)), (self.npair, 2, )) arr.setflags(write=False) return arr @pair_solref.setter def pair_solref(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.pair_solref, val_ptr, self.npair*2 * sizeof(c_double)) @property def pair_solimp(self): arr = np.reshape(np.fromiter(self._wrapped.contents.pair_solimp, dtype=np.double, count=(self.npair*3)), (self.npair, 3, )) arr.setflags(write=False) return arr @pair_solimp.setter def pair_solimp(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.pair_solimp, val_ptr, self.npair*3 * sizeof(c_double)) @property def pair_margin(self): arr = np.reshape(np.fromiter(self._wrapped.contents.pair_margin, dtype=np.double, count=(self.npair*1)), (self.npair, 1, )) arr.setflags(write=False) return arr @pair_margin.setter def pair_margin(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.pair_margin, val_ptr, self.npair*1 * sizeof(c_double)) @property def pair_gap(self): arr = np.reshape(np.fromiter(self._wrapped.contents.pair_gap, dtype=np.double, count=(self.npair*1)), (self.npair, 1, )) arr.setflags(write=False) return arr @pair_gap.setter def pair_gap(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.pair_gap, val_ptr, self.npair*1 * sizeof(c_double)) @property def pair_friction(self): arr = np.reshape(np.fromiter(self._wrapped.contents.pair_friction, dtype=np.double, count=(self.npair*5)), (self.npair, 5, )) arr.setflags(write=False) return arr @pair_friction.setter def pair_friction(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.pair_friction, val_ptr, self.npair*5 * sizeof(c_double)) @property def exclude_signature(self): arr = np.reshape(np.fromiter(self._wrapped.contents.exclude_signature, dtype=np.int, count=(self.nexclude*1)), (self.nexclude, 1, )) arr.setflags(write=False) return arr @exclude_signature.setter def exclude_signature(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.exclude_signature, val_ptr, self.nexclude*1 * sizeof(c_int)) @property def eq_type(self): arr = np.reshape(np.fromiter(self._wrapped.contents.eq_type, dtype=np.int, count=(self.neq*1)), (self.neq, 1, )) arr.setflags(write=False) return arr @eq_type.setter def eq_type(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.eq_type, val_ptr, self.neq*1 * sizeof(c_int)) @property def eq_obj1id(self): arr = np.reshape(np.fromiter(self._wrapped.contents.eq_obj1id, dtype=np.int, count=(self.neq*1)), (self.neq, 1, )) arr.setflags(write=False) return arr @eq_obj1id.setter def eq_obj1id(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.eq_obj1id, val_ptr, self.neq*1 * sizeof(c_int)) @property def eq_obj2id(self): arr = np.reshape(np.fromiter(self._wrapped.contents.eq_obj2id, dtype=np.int, count=(self.neq*1)), (self.neq, 1, )) arr.setflags(write=False) return arr @eq_obj2id.setter def eq_obj2id(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.eq_obj2id, val_ptr, self.neq*1 * sizeof(c_int)) @property def eq_active(self): arr = np.reshape(np.fromiter(self._wrapped.contents.eq_active, dtype=np.uint8, count=(self.neq*1)), (self.neq, 1, )) arr.setflags(write=False) return arr @eq_active.setter def eq_active(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte)) memmove(self._wrapped.contents.eq_active, val_ptr, self.neq*1 * sizeof(c_ubyte)) @property def eq_solref(self): arr = np.reshape(np.fromiter(self._wrapped.contents.eq_solref, dtype=np.double, count=(self.neq*2)), (self.neq, 2, )) arr.setflags(write=False) return arr @eq_solref.setter def eq_solref(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.eq_solref, val_ptr, self.neq*2 * sizeof(c_double)) @property def eq_solimp(self): arr = np.reshape(np.fromiter(self._wrapped.contents.eq_solimp, dtype=np.double, count=(self.neq*3)), (self.neq, 3, )) arr.setflags(write=False) return arr @eq_solimp.setter def eq_solimp(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.eq_solimp, val_ptr, self.neq*3 * sizeof(c_double)) @property def eq_data(self): arr = np.reshape(np.fromiter(self._wrapped.contents.eq_data, dtype=np.double, count=(self.neq*7)), (self.neq, 7, )) arr.setflags(write=False) return arr @eq_data.setter def eq_data(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.eq_data, val_ptr, self.neq*7 * sizeof(c_double)) @property def tendon_adr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_adr, dtype=np.int, count=(self.ntendon*1)), (self.ntendon, 1, )) arr.setflags(write=False) return arr @tendon_adr.setter def tendon_adr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.tendon_adr, val_ptr, self.ntendon*1 * sizeof(c_int)) @property def tendon_num(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_num, dtype=np.int, count=(self.ntendon*1)), (self.ntendon, 1, )) arr.setflags(write=False) return arr @tendon_num.setter def tendon_num(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.tendon_num, val_ptr, self.ntendon*1 * sizeof(c_int)) @property def tendon_matid(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_matid, dtype=np.int, count=(self.ntendon*1)), (self.ntendon, 1, )) arr.setflags(write=False) return arr @tendon_matid.setter def tendon_matid(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.tendon_matid, val_ptr, self.ntendon*1 * sizeof(c_int)) @property def tendon_limited(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_limited, dtype=np.uint8, count=(self.ntendon*1)), (self.ntendon, 1, )) arr.setflags(write=False) return arr @tendon_limited.setter def tendon_limited(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte)) memmove(self._wrapped.contents.tendon_limited, val_ptr, self.ntendon*1 * sizeof(c_ubyte)) @property def tendon_frictional(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_frictional, dtype=np.uint8, count=(self.ntendon*1)), (self.ntendon, 1, )) arr.setflags(write=False) return arr @tendon_frictional.setter def tendon_frictional(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte)) memmove(self._wrapped.contents.tendon_frictional, val_ptr, self.ntendon*1 * sizeof(c_ubyte)) @property def tendon_width(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_width, dtype=np.double, count=(self.ntendon*1)), (self.ntendon, 1, )) arr.setflags(write=False) return arr @tendon_width.setter def tendon_width(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.tendon_width, val_ptr, self.ntendon*1 * sizeof(c_double)) @property def tendon_solref_lim(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_solref_lim, dtype=np.double, count=(self.ntendon*2)), (self.ntendon, 2, )) arr.setflags(write=False) return arr @tendon_solref_lim.setter def tendon_solref_lim(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.tendon_solref_lim, val_ptr, self.ntendon*2 * sizeof(c_double)) @property def tendon_solimp_lim(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_solimp_lim, dtype=np.double, count=(self.ntendon*3)), (self.ntendon, 3, )) arr.setflags(write=False) return arr @tendon_solimp_lim.setter def tendon_solimp_lim(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.tendon_solimp_lim, val_ptr, self.ntendon*3 * sizeof(c_double)) @property def tendon_solref_fri(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_solref_fri, dtype=np.double, count=(self.ntendon*2)), (self.ntendon, 2, )) arr.setflags(write=False) return arr @tendon_solref_fri.setter def tendon_solref_fri(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.tendon_solref_fri, val_ptr, self.ntendon*2 * sizeof(c_double)) @property def tendon_solimp_fri(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_solimp_fri, dtype=np.double, count=(self.ntendon*3)), (self.ntendon, 3, )) arr.setflags(write=False) return arr @tendon_solimp_fri.setter def tendon_solimp_fri(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.tendon_solimp_fri, val_ptr, self.ntendon*3 * sizeof(c_double)) @property def tendon_range(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_range, dtype=np.double, count=(self.ntendon*2)), (self.ntendon, 2, )) arr.setflags(write=False) return arr @tendon_range.setter def tendon_range(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.tendon_range, val_ptr, self.ntendon*2 * sizeof(c_double)) @property def tendon_margin(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_margin, dtype=np.double, count=(self.ntendon*1)), (self.ntendon, 1, )) arr.setflags(write=False) return arr @tendon_margin.setter def tendon_margin(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.tendon_margin, val_ptr, self.ntendon*1 * sizeof(c_double)) @property def tendon_stiffness(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_stiffness, dtype=np.double, count=(self.ntendon*1)), (self.ntendon, 1, )) arr.setflags(write=False) return arr @tendon_stiffness.setter def tendon_stiffness(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.tendon_stiffness, val_ptr, self.ntendon*1 * sizeof(c_double)) @property def tendon_damping(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_damping, dtype=np.double, count=(self.ntendon*1)), (self.ntendon, 1, )) arr.setflags(write=False) return arr @tendon_damping.setter def tendon_damping(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.tendon_damping, val_ptr, self.ntendon*1 * sizeof(c_double)) @property def tendon_frictionloss(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_frictionloss, dtype=np.double, count=(self.ntendon*1)), (self.ntendon, 1, )) arr.setflags(write=False) return arr @tendon_frictionloss.setter def tendon_frictionloss(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.tendon_frictionloss, val_ptr, self.ntendon*1 * sizeof(c_double)) @property def tendon_lengthspring(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_lengthspring, dtype=np.double, count=(self.ntendon*1)), (self.ntendon, 1, )) arr.setflags(write=False) return arr @tendon_lengthspring.setter def tendon_lengthspring(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.tendon_lengthspring, val_ptr, self.ntendon*1 * sizeof(c_double)) @property def tendon_length0(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_length0, dtype=np.double, count=(self.ntendon*1)), (self.ntendon, 1, )) arr.setflags(write=False) return arr @tendon_length0.setter def tendon_length0(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.tendon_length0, val_ptr, self.ntendon*1 * sizeof(c_double)) @property def tendon_invweight0(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_invweight0, dtype=np.double, count=(self.ntendon*1)), (self.ntendon, 1, )) arr.setflags(write=False) return arr @tendon_invweight0.setter def tendon_invweight0(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.tendon_invweight0, val_ptr, self.ntendon*1 * sizeof(c_double)) @property def tendon_user(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_user, dtype=np.double, count=(self.ntendon*self.nuser_tendon)), (self.ntendon, self.nuser_tendon, )) arr.setflags(write=False) return arr @tendon_user.setter def tendon_user(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.tendon_user, val_ptr, self.ntendon*self.nuser_tendon * sizeof(c_double)) @property def tendon_rgba(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_rgba, dtype=np.float, count=(self.ntendon*4)), (self.ntendon, 4, )) arr.setflags(write=False) return arr @tendon_rgba.setter def tendon_rgba(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.tendon_rgba, val_ptr, self.ntendon*4 * sizeof(c_float)) @property def wrap_type(self): arr = np.reshape(np.fromiter(self._wrapped.contents.wrap_type, dtype=np.int, count=(self.nwrap*1)), (self.nwrap, 1, )) arr.setflags(write=False) return arr @wrap_type.setter def wrap_type(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.wrap_type, val_ptr, self.nwrap*1 * sizeof(c_int)) @property def wrap_objid(self): arr = np.reshape(np.fromiter(self._wrapped.contents.wrap_objid, dtype=np.int, count=(self.nwrap*1)), (self.nwrap, 1, )) arr.setflags(write=False) return arr @wrap_objid.setter def wrap_objid(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.wrap_objid, val_ptr, self.nwrap*1 * sizeof(c_int)) @property def wrap_prm(self): arr = np.reshape(np.fromiter(self._wrapped.contents.wrap_prm, dtype=np.double, count=(self.nwrap*1)), (self.nwrap, 1, )) arr.setflags(write=False) return arr @wrap_prm.setter def wrap_prm(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.wrap_prm, val_ptr, self.nwrap*1 * sizeof(c_double)) @property def actuator_trntype(self): arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_trntype, dtype=np.int, count=(self.nu*1)), (self.nu, 1, )) arr.setflags(write=False) return arr @actuator_trntype.setter def actuator_trntype(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.actuator_trntype, val_ptr, self.nu*1 * sizeof(c_int)) @property def actuator_dyntype(self): arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_dyntype, dtype=np.int, count=(self.nu*1)), (self.nu, 1, )) arr.setflags(write=False) return arr @actuator_dyntype.setter def actuator_dyntype(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.actuator_dyntype, val_ptr, self.nu*1 * sizeof(c_int)) @property def actuator_gaintype(self): arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_gaintype, dtype=np.int, count=(self.nu*1)), (self.nu, 1, )) arr.setflags(write=False) return arr @actuator_gaintype.setter def actuator_gaintype(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.actuator_gaintype, val_ptr, self.nu*1 * sizeof(c_int)) @property def actuator_biastype(self): arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_biastype, dtype=np.int, count=(self.nu*1)), (self.nu, 1, )) arr.setflags(write=False) return arr @actuator_biastype.setter def actuator_biastype(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.actuator_biastype, val_ptr, self.nu*1 * sizeof(c_int)) @property def actuator_trnid(self): arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_trnid, dtype=np.int, count=(self.nu*2)), (self.nu, 2, )) arr.setflags(write=False) return arr @actuator_trnid.setter def actuator_trnid(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.actuator_trnid, val_ptr, self.nu*2 * sizeof(c_int)) @property def actuator_ctrllimited(self): arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_ctrllimited, dtype=np.uint8, count=(self.nu*1)), (self.nu, 1, )) arr.setflags(write=False) return arr @actuator_ctrllimited.setter def actuator_ctrllimited(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte)) memmove(self._wrapped.contents.actuator_ctrllimited, val_ptr, self.nu*1 * sizeof(c_ubyte)) @property def actuator_forcelimited(self): arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_forcelimited, dtype=np.uint8, count=(self.nu*1)), (self.nu, 1, )) arr.setflags(write=False) return arr @actuator_forcelimited.setter def actuator_forcelimited(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte)) memmove(self._wrapped.contents.actuator_forcelimited, val_ptr, self.nu*1 * sizeof(c_ubyte)) @property def actuator_dynprm(self): arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_dynprm, dtype=np.double, count=(self.nu*3)), (self.nu, 3, )) arr.setflags(write=False) return arr @actuator_dynprm.setter def actuator_dynprm(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.actuator_dynprm, val_ptr, self.nu*3 * sizeof(c_double)) @property def actuator_gainprm(self): arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_gainprm, dtype=np.double, count=(self.nu*3)), (self.nu, 3, )) arr.setflags(write=False) return arr @actuator_gainprm.setter def actuator_gainprm(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.actuator_gainprm, val_ptr, self.nu*3 * sizeof(c_double)) @property def actuator_biasprm(self): arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_biasprm, dtype=np.double, count=(self.nu*3)), (self.nu, 3, )) arr.setflags(write=False) return arr @actuator_biasprm.setter def actuator_biasprm(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.actuator_biasprm, val_ptr, self.nu*3 * sizeof(c_double)) @property def actuator_ctrlrange(self): arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_ctrlrange, dtype=np.double, count=(self.nu*2)), (self.nu, 2, )) arr.setflags(write=False) return arr @actuator_ctrlrange.setter def actuator_ctrlrange(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.actuator_ctrlrange, val_ptr, self.nu*2 * sizeof(c_double)) @property def actuator_forcerange(self): arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_forcerange, dtype=np.double, count=(self.nu*2)), (self.nu, 2, )) arr.setflags(write=False) return arr @actuator_forcerange.setter def actuator_forcerange(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.actuator_forcerange, val_ptr, self.nu*2 * sizeof(c_double)) @property def actuator_gear(self): arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_gear, dtype=np.double, count=(self.nu*6)), (self.nu, 6, )) arr.setflags(write=False) return arr @actuator_gear.setter def actuator_gear(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.actuator_gear, val_ptr, self.nu*6 * sizeof(c_double)) @property def actuator_cranklength(self): arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_cranklength, dtype=np.double, count=(self.nu*1)), (self.nu, 1, )) arr.setflags(write=False) return arr @actuator_cranklength.setter def actuator_cranklength(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.actuator_cranklength, val_ptr, self.nu*1 * sizeof(c_double)) @property def actuator_invweight0(self): arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_invweight0, dtype=np.double, count=(self.nu*1)), (self.nu, 1, )) arr.setflags(write=False) return arr @actuator_invweight0.setter def actuator_invweight0(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.actuator_invweight0, val_ptr, self.nu*1 * sizeof(c_double)) @property def actuator_length0(self): arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_length0, dtype=np.double, count=(self.nu*1)), (self.nu, 1, )) arr.setflags(write=False) return arr @actuator_length0.setter def actuator_length0(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.actuator_length0, val_ptr, self.nu*1 * sizeof(c_double)) @property def actuator_lengthrange(self): arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_lengthrange, dtype=np.double, count=(self.nu*2)), (self.nu, 2, )) arr.setflags(write=False) return arr @actuator_lengthrange.setter def actuator_lengthrange(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.actuator_lengthrange, val_ptr, self.nu*2 * sizeof(c_double)) @property def actuator_user(self): arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_user, dtype=np.double, count=(self.nu*self.nuser_actuator)), (self.nu, self.nuser_actuator, )) arr.setflags(write=False) return arr @actuator_user.setter def actuator_user(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.actuator_user, val_ptr, self.nu*self.nuser_actuator * sizeof(c_double)) @property def sensor_type(self): arr = np.reshape(np.fromiter(self._wrapped.contents.sensor_type, dtype=np.int, count=(self.nsensor*1)), (self.nsensor, 1, )) arr.setflags(write=False) return arr @sensor_type.setter def sensor_type(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.sensor_type, val_ptr, self.nsensor*1 * sizeof(c_int)) @property def sensor_datatype(self): arr = np.reshape(np.fromiter(self._wrapped.contents.sensor_datatype, dtype=np.int, count=(self.nsensor*1)), (self.nsensor, 1, )) arr.setflags(write=False) return arr @sensor_datatype.setter def sensor_datatype(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.sensor_datatype, val_ptr, self.nsensor*1 * sizeof(c_int)) @property def sensor_needstage(self): arr = np.reshape(np.fromiter(self._wrapped.contents.sensor_needstage, dtype=np.int, count=(self.nsensor*1)), (self.nsensor, 1, )) arr.setflags(write=False) return arr @sensor_needstage.setter def sensor_needstage(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.sensor_needstage, val_ptr, self.nsensor*1 * sizeof(c_int)) @property def sensor_objtype(self): arr = np.reshape(np.fromiter(self._wrapped.contents.sensor_objtype, dtype=np.int, count=(self.nsensor*1)), (self.nsensor, 1, )) arr.setflags(write=False) return arr @sensor_objtype.setter def sensor_objtype(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.sensor_objtype, val_ptr, self.nsensor*1 * sizeof(c_int)) @property def sensor_objid(self): arr = np.reshape(np.fromiter(self._wrapped.contents.sensor_objid, dtype=np.int, count=(self.nsensor*1)), (self.nsensor, 1, )) arr.setflags(write=False) return arr @sensor_objid.setter def sensor_objid(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.sensor_objid, val_ptr, self.nsensor*1 * sizeof(c_int)) @property def sensor_dim(self): arr = np.reshape(np.fromiter(self._wrapped.contents.sensor_dim, dtype=np.int, count=(self.nsensor*1)), (self.nsensor, 1, )) arr.setflags(write=False) return arr @sensor_dim.setter def sensor_dim(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.sensor_dim, val_ptr, self.nsensor*1 * sizeof(c_int)) @property def sensor_adr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.sensor_adr, dtype=np.int, count=(self.nsensor*1)), (self.nsensor, 1, )) arr.setflags(write=False) return arr @sensor_adr.setter def sensor_adr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.sensor_adr, val_ptr, self.nsensor*1 * sizeof(c_int)) @property def sensor_noise(self): arr = np.reshape(np.fromiter(self._wrapped.contents.sensor_noise, dtype=np.double, count=(self.nsensor*1)), (self.nsensor, 1, )) arr.setflags(write=False) return arr @sensor_noise.setter def sensor_noise(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.sensor_noise, val_ptr, self.nsensor*1 * sizeof(c_double)) @property def sensor_user(self): arr = np.reshape(np.fromiter(self._wrapped.contents.sensor_user, dtype=np.double, count=(self.nsensor*self.nuser_sensor)), (self.nsensor, self.nuser_sensor, )) arr.setflags(write=False) return arr @sensor_user.setter def sensor_user(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.sensor_user, val_ptr, self.nsensor*self.nuser_sensor * sizeof(c_double)) @property def numeric_adr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.numeric_adr, dtype=np.int, count=(self.nnumeric*1)), (self.nnumeric, 1, )) arr.setflags(write=False) return arr @numeric_adr.setter def numeric_adr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.numeric_adr, val_ptr, self.nnumeric*1 * sizeof(c_int)) @property def numeric_size(self): arr = np.reshape(np.fromiter(self._wrapped.contents.numeric_size, dtype=np.int, count=(self.nnumeric*1)), (self.nnumeric, 1, )) arr.setflags(write=False) return arr @numeric_size.setter def numeric_size(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.numeric_size, val_ptr, self.nnumeric*1 * sizeof(c_int)) @property def numeric_data(self): arr = np.reshape(np.fromiter(self._wrapped.contents.numeric_data, dtype=np.double, count=(self.nnumericdata*1)), (self.nnumericdata, 1, )) arr.setflags(write=False) return arr @numeric_data.setter def numeric_data(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.numeric_data, val_ptr, self.nnumericdata*1 * sizeof(c_double)) @property def text_adr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.text_adr, dtype=np.int, count=(self.ntext*1)), (self.ntext, 1, )) arr.setflags(write=False) return arr @text_adr.setter def text_adr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.text_adr, val_ptr, self.ntext*1 * sizeof(c_int)) @property def text_size(self): arr = np.reshape(np.fromiter(self._wrapped.contents.text_size, dtype=np.int, count=(self.ntext*1)), (self.ntext, 1, )) arr.setflags(write=False) return arr @text_size.setter def text_size(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.text_size, val_ptr, self.ntext*1 * sizeof(c_int)) @property def text_data(self): return self._wrapped.contents.text_data @property def tuple_adr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tuple_adr, dtype=np.int, count=(self.ntuple*1)), (self.ntuple, 1, )) arr.setflags(write=False) return arr @tuple_adr.setter def tuple_adr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.tuple_adr, val_ptr, self.ntuple*1 * sizeof(c_int)) @property def tuple_size(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tuple_size, dtype=np.int, count=(self.ntuple*1)), (self.ntuple, 1, )) arr.setflags(write=False) return arr @tuple_size.setter def tuple_size(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.tuple_size, val_ptr, self.ntuple*1 * sizeof(c_int)) @property def tuple_objtype(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tuple_objtype, dtype=np.int, count=(self.ntupledata*1)), (self.ntupledata, 1, )) arr.setflags(write=False) return arr @tuple_objtype.setter def tuple_objtype(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.tuple_objtype, val_ptr, self.ntupledata*1 * sizeof(c_int)) @property def tuple_objid(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tuple_objid, dtype=np.int, count=(self.ntupledata*1)), (self.ntupledata, 1, )) arr.setflags(write=False) return arr @tuple_objid.setter def tuple_objid(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.tuple_objid, val_ptr, self.ntupledata*1 * sizeof(c_int)) @property def tuple_objprm(self): arr = np.reshape(np.fromiter(self._wrapped.contents.tuple_objprm, dtype=np.double, count=(self.ntupledata*1)), (self.ntupledata, 1, )) arr.setflags(write=False) return arr @tuple_objprm.setter def tuple_objprm(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.tuple_objprm, val_ptr, self.ntupledata*1 * sizeof(c_double)) @property def key_time(self): arr = np.reshape(np.fromiter(self._wrapped.contents.key_time, dtype=np.double, count=(self.nkey*1)), (self.nkey, 1, )) arr.setflags(write=False) return arr @key_time.setter def key_time(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.key_time, val_ptr, self.nkey*1 * sizeof(c_double)) @property def key_qpos(self): arr = np.reshape(np.fromiter(self._wrapped.contents.key_qpos, dtype=np.double, count=(self.nkey*self.nq)), (self.nkey, self.nq, )) arr.setflags(write=False) return arr @key_qpos.setter def key_qpos(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.key_qpos, val_ptr, self.nkey*self.nq * sizeof(c_double)) @property def key_qvel(self): arr = np.reshape(np.fromiter(self._wrapped.contents.key_qvel, dtype=np.double, count=(self.nkey*self.nv)), (self.nkey, self.nv, )) arr.setflags(write=False) return arr @key_qvel.setter def key_qvel(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.key_qvel, val_ptr, self.nkey*self.nv * sizeof(c_double)) @property def key_act(self): arr = np.reshape(np.fromiter(self._wrapped.contents.key_act, dtype=np.double, count=(self.nkey*self.na)), (self.nkey, self.na, )) arr.setflags(write=False) return arr @key_act.setter def key_act(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double)) memmove(self._wrapped.contents.key_act, val_ptr, self.nkey*self.na * sizeof(c_double)) @property def name_bodyadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.name_bodyadr, dtype=np.int, count=(self.nbody*1)), (self.nbody, 1, )) arr.setflags(write=False) return arr @name_bodyadr.setter def name_bodyadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.name_bodyadr, val_ptr, self.nbody*1 * sizeof(c_int)) @property def name_jntadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.name_jntadr, dtype=np.int, count=(self.njnt*1)), (self.njnt, 1, )) arr.setflags(write=False) return arr @name_jntadr.setter def name_jntadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.name_jntadr, val_ptr, self.njnt*1 * sizeof(c_int)) @property def name_geomadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.name_geomadr, dtype=np.int, count=(self.ngeom*1)), (self.ngeom, 1, )) arr.setflags(write=False) return arr @name_geomadr.setter def name_geomadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.name_geomadr, val_ptr, self.ngeom*1 * sizeof(c_int)) @property def name_siteadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.name_siteadr, dtype=np.int, count=(self.nsite*1)), (self.nsite, 1, )) arr.setflags(write=False) return arr @name_siteadr.setter def name_siteadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.name_siteadr, val_ptr, self.nsite*1 * sizeof(c_int)) @property def name_camadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.name_camadr, dtype=np.int, count=(self.ncam*1)), (self.ncam, 1, )) arr.setflags(write=False) return arr @name_camadr.setter def name_camadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.name_camadr, val_ptr, self.ncam*1 * sizeof(c_int)) @property def name_lightadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.name_lightadr, dtype=np.int, count=(self.nlight*1)), (self.nlight, 1, )) arr.setflags(write=False) return arr @name_lightadr.setter def name_lightadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.name_lightadr, val_ptr, self.nlight*1 * sizeof(c_int)) @property def name_meshadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.name_meshadr, dtype=np.int, count=(self.nmesh*1)), (self.nmesh, 1, )) arr.setflags(write=False) return arr @name_meshadr.setter def name_meshadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.name_meshadr, val_ptr, self.nmesh*1 * sizeof(c_int)) @property def name_hfieldadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.name_hfieldadr, dtype=np.int, count=(self.nhfield*1)), (self.nhfield, 1, )) arr.setflags(write=False) return arr @name_hfieldadr.setter def name_hfieldadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.name_hfieldadr, val_ptr, self.nhfield*1 * sizeof(c_int)) @property def name_texadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.name_texadr, dtype=np.int, count=(self.ntex*1)), (self.ntex, 1, )) arr.setflags(write=False) return arr @name_texadr.setter def name_texadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.name_texadr, val_ptr, self.ntex*1 * sizeof(c_int)) @property def name_matadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.name_matadr, dtype=np.int, count=(self.nmat*1)), (self.nmat, 1, )) arr.setflags(write=False) return arr @name_matadr.setter def name_matadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.name_matadr, val_ptr, self.nmat*1 * sizeof(c_int)) @property def name_eqadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.name_eqadr, dtype=np.int, count=(self.neq*1)), (self.neq, 1, )) arr.setflags(write=False) return arr @name_eqadr.setter def name_eqadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.name_eqadr, val_ptr, self.neq*1 * sizeof(c_int)) @property def name_tendonadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.name_tendonadr, dtype=np.int, count=(self.ntendon*1)), (self.ntendon, 1, )) arr.setflags(write=False) return arr @name_tendonadr.setter def name_tendonadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.name_tendonadr, val_ptr, self.ntendon*1 * sizeof(c_int)) @property def name_actuatoradr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.name_actuatoradr, dtype=np.int, count=(self.nu*1)), (self.nu, 1, )) arr.setflags(write=False) return arr @name_actuatoradr.setter def name_actuatoradr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.name_actuatoradr, val_ptr, self.nu*1 * sizeof(c_int)) @property def name_sensoradr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.name_sensoradr, dtype=np.int, count=(self.nsensor*1)), (self.nsensor, 1, )) arr.setflags(write=False) return arr @name_sensoradr.setter def name_sensoradr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.name_sensoradr, val_ptr, self.nsensor*1 * sizeof(c_int)) @property def name_numericadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.name_numericadr, dtype=np.int, count=(self.nnumeric*1)), (self.nnumeric, 1, )) arr.setflags(write=False) return arr @name_numericadr.setter def name_numericadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.name_numericadr, val_ptr, self.nnumeric*1 * sizeof(c_int)) @property def name_textadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.name_textadr, dtype=np.int, count=(self.ntext*1)), (self.ntext, 1, )) arr.setflags(write=False) return arr @name_textadr.setter def name_textadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.name_textadr, val_ptr, self.ntext*1 * sizeof(c_int)) @property def name_tupleadr(self): arr = np.reshape(np.fromiter(self._wrapped.contents.name_tupleadr, dtype=np.int, count=(self.ntuple*1)), (self.ntuple, 1, )) arr.setflags(write=False) return arr @name_tupleadr.setter def name_tupleadr(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int)) memmove(self._wrapped.contents.name_tupleadr, val_ptr, self.ntuple*1 * sizeof(c_int)) @property def names(self): return self._wrapped.contents.names
Shop Ross-Simons for the finest collection of fine jewelry, gifts and more since 1952. 100% money back guarantee on every item, every order, every day. Choose from an incredible selection of jewelry and gifts at unbeatable prices.. © 2019 Jewelry.com, All Rights Reserved. Online shopping for Clothing, Shoes & Jewelry from a great selection of Wedding & Engagement, Rings, Necklaces, Earrings, Bracelets, Jewelry Sets & more at everyday low prices.. Shop luxury jewelry online at Tiffany & Co. for men and women. Treat yourself or your love to a gift from the world's premier jeweler.. You’ve searched for Jewelry! Etsy has thousands of unique options to choose from, like handmade goods, vintage finds, and one-of-a-kind gifts. Our global marketplace of sellers can help you find extraordinary items at any price range.. Jared is America's home for fine jewelry including diamond jewelry, gold jewelry, and engagement rings. We also carry a great selection of color gemstone jewelry and luxury watches, as well as earrings, necklaces, bracelets and charms.. Gemstone Glossary. Every woman loves to accessorize her wardrobe with jewelry. When you shop jewelry online at QVC, you'll discover a huge assortment of women's jewelry, including fine and fashion rings, bracelets, necklaces, earrings, charms, pendants, and pins..
from colour import Color class ColorRangeModule(object): """ Class to dynamically generate and select colors. Requires the PyPI package `colour` """ start_color = "#00FF00" end_color = 'red' @staticmethod def get_hex_color_range(start_color, end_color, quantity): """ Generates a list of quantity Hex colors from start_color to end_color. :param start_color: Hex or plain English color for start of range :param end_color: Hex or plain English color for end of range :param quantity: Number of colours to return :return: A list of Hex color values """ raw_colors = [c.hex for c in list(Color(start_color).range_to(Color(end_color), quantity))] colors = [] for color in raw_colors: # i3bar expects the full Hex value but for some colors the colour # module only returns partial values. So we need to convert these colors to the full # Hex value. if len(color) == 4: fixed_color = "#" for c in color[1:]: fixed_color += c * 2 colors.append(fixed_color) else: colors.append(color) return colors def get_gradient(self, value, colors, upper_limit=100): """ Map a value to a color :param value: Some value :return: A Hex color code """ index = int(self.percentage(value, upper_limit)) if index >= len(colors): return colors[-1] elif index < 0: return colors[0] else: return colors[index] @staticmethod def percentage(part, whole): """ Calculate percentage """ if whole == 0: return 0 return 100 * float(part) / float(whole)
Riesling, because of its purity, has a tendency to show cork taint more than other varietals. This is why you often see Rieslings–yes, even some higher priced ones–finished with screwcaps. Corks are still widely used as well and both have their advantages. Screwcaps are thought by some to be better at preventing oxygen from entering the bottle. The less oxygen, the fresher and more crisp the wine. Also, bottles finished with screwcaps can be stored upright. Cork–both those made of natural cork and those made of synthetic materials–also do an excellent job of preserving wine. And, of course, there is always the enjoyment of the tradition of uncorking a bottle of wine. Bottles finished with natural corks should be stored on their sides, so that the wine stays in contact with the cork. The important thing to know is, when it comes to Riesling, either type of closure is fine. Fine Rieslings are as likely to be finished with screwcaps as corks. The choice usually comes down to the preference of the producer and/or the traditions of the region.
# -*- coding: utf-8 -*- """ An example of class inheritance. """ class Enemy(object): """A stupid enemy that doesn't know how to attack, but knows how to die. >>> stupid = Enemy(10) Let's hit him and see if he dies >>> stupid.take_damage(5) >>> stupid.alive True Nope, not dead yet ... let's try again! >>> stupid.take_damage(5) >>> stupid.alive False Woohoo, down you go stupid enemy! """ def __init__(self, hp): self.hp = hp self.alive = True def take_damage(self, dmg): """Take some damage and check your HP for death.""" self.hp -= dmg self.check_hp() def die(self): """Function called when the enemy dies.""" self.alive = False def check_hp(self): """If HP is too low, die.""" if self.hp <= 0: self.die() class Shaman(Enemy): """A smarter enemy - can do everything Enemy can, but can also heal himself. >>> shaman = Shaman(12) Let's hit him and check if he was damaged >>> shaman.take_damage(5) >>> shaman.alive True >>> shaman.hp 7 Nope, not dead yet ... let's try again! >>> shaman.take_damage(5) >>> shaman.alive True >>> shaman.hp 2 Oops, better heal yourself fast shaman! >>> shaman.heal(20) >>> shaman.hp 22 Wow, that was a strong heal ... better bring out the big guns! >>> shaman.take_damage(100) >>> shaman.hp -78 >>> shaman.alive False Wait ... what are you trying to do? >>> shaman.heal(100) >>> shaman.hp -78 >>> shaman.alive False Silly shaman, you can't heal yourself if you're already dead ... """ def __init__(self, hp): """Call the __init__ from our superclass.""" super(Shaman, self).__init__(hp) def heal(self, hp): """Heal himself. Can only do that if he is alive.""" if self.alive: self.hp += hp if __name__ == "__main__": import doctest doctest.testmod()
Uplands continue to carry out fit-out works to vacant premises for Happy Days Nurseries, undertaking 3 new projects throughout 2018 in Bath, Weston Super Mare and Swindon. Conversion of 600m2 of adaptable office space into a 100 child day nursery and landscaping of external areas to form new playgrounds. The nursery was over two levels which required the installation of a lift and considerable structural modifications. This project included the installation of 6 toilet blocks and 6 kitchens, all with bespoke joinery to the clients exacting details.
from os.path import basename, splitext from datetime import datetime from collections import OrderedDict import re import yaml from oslo_config import iniparser VERSION = "0.6.0" class OSConfigParser(iniparser.BaseParser): comment_called = False values = None section = '' comments = [] commented = False def __init__(self): self.values = OrderedDict() def assignment(self, key, value): self.values.setdefault(self.section, {'comments': [], 'entries': {}}) self.values[self.section]['entries'][key] = { 'value': value, 'comments': self.comments, 'commented': self.commented } self.comments = [] self.commented = False def new_section(self, section): self.section = section self.values[self.section] = { 'comments': self.comments, 'entries': OrderedDict() } self.comments = [] def comment(self, comment): if len(comment) > 0 and comment[0].isalpha() and '=' in comment: self.commented = True self.parse([comment]) self.comments = [] else: if False and ' = ' in comment: self.commented = True try: self.parse([comment[1:]]) self.comments = [] return except: pass self.comments.append(comment.lstrip()) def parse(self, lineiter): key = None value = [] for line in lineiter: self.lineno += 1 line = line.rstrip() if not line: # Blank line, ends multi-line values if key: key, value = self._assignment(key, value) continue elif line.startswith((' ', '\t')): # Continuation of previous assignment if key is None: self.error_unexpected_continuation(line) else: value.append(line.lstrip()) continue if key: # Flush previous assignment, if any key, value = self._assignment(key, value) if line.startswith('['): # Section start section = self._get_section(line) if section: self.new_section(section) elif line.startswith(('#', ';')): self.comment(line[1:]) else: key, value = self._split_key_value(line) if not key: return self.error_empty_key(line) if key: # Flush previous assignment, if any self._assignment(key, value) def show_header(fpath, namespace, prefix, desc='', yaml=True): date = datetime.strftime(datetime.today(), "%Y-%m-%d") print "#" print "# AUTOMATICALLY GENERATED ON {0}".format(date) print "# ansible-openstack-config-gen version: {0}".format(VERSION) print "#" if desc: print "# {0}".format(desc) print "# file: {0}".format(basename(fpath)) print "# namespace: {0}".format(namespace) print "# prefix: {0}".format(prefix) print "#" if yaml: print "---" else: print "" def print_comments(comments, newline=0): for cmt in comments: print '# {0}'.format(cmt) for x in range(newline): print "\n" def var_namespace(fpath, name): ns = splitext(basename(fpath.lower()).replace('-', '_'))[0] if not ns.startswith(name): ns = "{0}_{1}".format(name, ns) return ns def infer_type(comments): text = ' '.join(comments) if '(multi valued)' in text: return 'multi' if '(list value)' in text: return 'list' if '(integer value)' in text: return 'int' if '(string value)' in text: return 'str' if '(boolean value)' in text: return 'bool' return None def format_var_name(name): name = name.replace('-', '_').lower() return re.sub(r'[^a-zA-Z0-9_]', '', name) def value_to_yaml(entry): value_type = infer_type(entry['comments']) def convert_to_none(val, keep_string=True): if value_type == 'int': val = None elif value_type == 'multi': val = None elif value_type == 'bool': val = None elif value_type == 'list': val = [] elif value_type != 'str' or not keep_string: val = '' return val if len(entry['value']) == 1: val = entry['value'][0] if val.startswith('<') and val.endswith('>'): val = convert_to_none(val, keep_string=False) else: try: ori_val = val val = yaml.load(val) if val is None: val = convert_to_none(val, keep_string=False) elif val == 'None': val = convert_to_none(val) else: if value_type == 'str' and type(val) is dict: #print "FUCK PREVENTION: use scalar instead of nested dict" val = ori_val elif value_type == 'str' and type(val) is bool: val = ori_val except yaml.scanner.ScannerError: pass return val else: raise Exception("Cannot convert multiple values %s" % values)
The Country Music genre has always been welcoming to new acts and never is this more apparent than with Jason Aldean. Aldean’s rise to fame was slow but once he got in with the right people, it seemed there was nowhere his career couldn’t take him. With platinum records and numerous #1 hits, Aldean’s become one of Nashville’s hottest stars…and all from signing with an independent record label! With a rock steady career and numerous tours, it seemed only fitting that Aldean would eventually get a home video release of one of his nights—and a March 6th, 2009 date was chosen to showcase his talents at the Knoxville Coliseum. On March 6, 2009, a sold out Knoxville (Tenn.) Coliseum crowd found out what millions of Jason Aldean fans already knew: he’s a rising star and a damn fine entertainer. The concert was lights out and over-the-top. Those lucky enough to attend experienced not just an I-can’t-wait-to-tell-my-friends-about-this show, but also benefited from the sum of years of hard work and determination. “Wide Open Live And More!” captures Jason Aldean at his rockin’ country best. All the hits are here—”Amarillo Sky,” “Why,” “Hicktown,” “Relentless,” “She’s Country,” “Johnny Cash,” “Laughed Until We Cried” and “Big Green Tractor”—and so are the live show favorites like “Wide Open,” “I Break Everything I Touch” and “Asphalt Cowboy,” among others. Not only does Jason sing his hits, he connects with his audience and offers them an escape, if only for the night. That’s what real entertainers do. There are some things in life that I find truly torturous and listening to country music is one of them. Needless to say then that receiving this Blu-ray was probably one of the more horrific experiences I could’ve ever had and not only listening to it but having to watch it as well was something I never want to experience again. Nothing against Aldean, as I’m sure he’s quite a fantastic artist, but when it comes to country music there is only one other genre that annoys the bejesus out of me (that genre being rap). Having said all of that I’m probably the last person who should be watching this concert or even reviewing it…but putting myself in the shoes of just the average viewer, even I could find a couple things that were a bit disappointing. The main issue with this concert is its run time—it’s only eighty six minutes. I don’t know about you but after driving to the arena, parking, finding the seats, paying for food/drinks, and then finally sitting down…the last thing I want to do is leave eighty six minutes later. Maybe if concerts were as easy to go to as movie theaters, but I’d expect a full two hours out of my entertaining and that’s not what we got here. Personally speaking I was ecstatic that the concert was as short as it was, but if I were a fan I’d be incredibly disappointed. Overall for fans I guess this is a treat, but for me…I honestly don’t know how he “connects” with the audience. The whole line about that being “what real entertainers do” sounds like some kind of snarky comeback to something, but unless there’s some kind of country music rivalry I don’t know about, then it just sounds really, really awkward. The set itself arrives in a standard Elite Blu-ray case…of which mine was severely screwed up. I’m not sure if the postal service played baseball with it or what but the entire top part of the case was cracked…as is part of the bottom and the pin holding the disc in was floating around…as was the disc actually. Kudos USPS! Inside the case is a booklet that talks about Aldean’s career and includes photos of the band. Video is an AVC encoded effort and it looks as you’d expect from such a modern production – strong and clear. Deep reds from the stage lighting and coloring and plenty of great crowd and band shots make for an enjoyable looking transfer. Three audio choices, LPCM Stereo, DD5.1 and DTS-HD Master Audio 5.1 are available and, as expected, the DTS-HD wins out for surround and LFE output. I actually kind of enjoyed the “I Won’t Back Down” cover, as I enjoyed Tom Petty back when that song originally came out, but it still had a bit more of a country twang to it to the point where it still grated on my nerves. Overall…I’m not a country music fan and Aldean didn’t convert me. He sounds like the rest of the “amazing” artists and while I’m not dumping on him, I’m also not impressed by what he brings to the table. Same ol’, same ol’ to me. Recommended for fans only…and even then…eighty-six minutes? Really? Jason Aldean – Wide Open Live & More! arrives on DVD and Blu-ray on August 25th.
#!/usr/bin/python # # mediabase daemon - creates users' homes, imports files and generates thumbnails # by [email protected] MODULE = 'daemon' import SocketServer import sys, os, os.path sys.path.append(os.getcwd()) sys.path.append("..") from LoadConfig import config from ErrorsHandler import * from gst_player import OggPlayer #from mpd_player import OggPlayer PORT = 4096 #----------------------------------------------------------------- # server #----------------------------------------------------------------- p = OggPlayer() class TCPRequestHandler(SocketServer.BaseRequestHandler ): global p #-------------------------------------------------------------------- def setup(self): self.player = p #logger.info( str(self.client_address), 'connected!' ) logger.info( 'Client connected!' ) welcome = 'OK Welcome to burnstation server.' #self.request.send(welcome + ' Hi '+ str(self.client_address) + '!\n') #self.request.send('player status: %s\n' % self.player.status) self.QUIT = False #-------------------------------------------------------------------- def handle(self): while 1: data = self.request.recv(10240) logger.info( 'OK Got command: ' + data.strip() ) if data.strip() == 'QUIT': logger.info( 'quitting..' ) return else: if ( data[:5] == 'PLAY ' ): file = data[5:] #self.request.send("Playing file: %s" % file) try: self.player.AddToPlaylist(file) self.request.close() self.player.Play() except Exception, e: logger.error("burnstation daemon EXCEPTION: " + str(e)) #self.request.send('player status: %s\n' % self.player.status) return elif ( data[:5] == 'STOP ' ): self.player.Stop() return elif ( data[:5] == 'SEEK ' ): position = data[5:] try: self.player.Seek(int(position)) except Exception, e: logger.error("burnstation daemon EXCEPTION: " + str(e)) #self.request.send('player status: %s\n' % self.player.status) return elif ( data[:5] == 'VOLU ' ): level = float(data[5:]) try: self.player.SetVolume(level) except Exception, e: logger.error("burnstation daemon EXCEPTION: " + str(e)) return elif ( data[:5] == 'BURN_' ): try: #mode = data[5:6] #tracks = data[7:-1].split(':') commands = data.split("|||") print "COMMANDS : ", commands mode = commands[0].split("_")[1] print "MODE : ", mode tracks = commands[1].split(":") if mode == 'A': mode = 'AUDIO' elif mode == 'D': mode = 'DATA' elif mode == 'U': mode = 'USB' #logger.debug(mode) #logger.debug(tracks) # FIXME : ugly hardcode home = "/usr/share/burnstation" cmd = home + '/burn.py' args = [cmd, mode] for track in tracks: if track != '': args.append(track) logger.debug("-------------------------------") logger.debug("TRACKS") logger.debug(tracks) logger.debug("-------------------------------") logger.debug(args) logger.debug("-------------------------------") logger.debug(args) logger.debug("-------------------------------") try: logger.debug("Spawning burn script..") b = os.spawnve(os.P_NOWAIT, cmd, args, os.environ) except Exception, e: logger.error("burnstation daemon EXCEPTION: " + str(e)) return except Exception, e: logger.error("EXCEPTION: %s" % str(e)) else: #self.request.send('ERR command not found: %s\n' % data) logger.error('ERR command not found: %s\n' % data) return #-------------------------------------------------------------------- def finish(self): #print self.client_address, 'disconnected!' logger.info( 'Client ready for disconnection!' ) try: self.request.send('OK bye ' + str(self.client_address) + '\n') except Exception, e: logger.error("burnstation daemon EXCEPTION: " + str(e)) logger.info( 'Disconnecting' ) #self.QUIT = True return #-------------------------------------------------------------------- def stop_daemon(): stop_daemon_cmd = "kill -9 `ps ax | grep python.*burnstation_daemon | grep -v grep | awk '{print $1}'`" os.system(stop_daemon_cmd) #-------------------------------------------------------------------- if __name__ == '__main__': print 1 if len(sys.argv) < 2: print "Usage: burnstation_daemon.py [start|stop]" sys.exit(0) if sys.argv[1] == 'stop': # stop the daemon stop_daemon() sys.exit(0) print 2 try: pid = os.fork() print "start" if pid > 0: print "daemon PID is: " + str(pid) sys.exit(0) except OSError, e: print >>sys.stderr, "fork failed: %d (%s)" % (e.errno, e.strerror) sys.exit(1) try: # server host is a tuple ('host', PORT) tcpserver = SocketServer.ThreadingTCPServer(('127.0.0.1', PORT), TCPRequestHandler) tcpserver.allow_reuse_address = True tcpserver.serve_forever() except Exception, e: logger.error(MODULE+" EXCEPTION: " + str(e)) logger.error(MODULE+" NOT starting") """ if __name__ == '__main__': try: pid = os.fork() if pid > 0: sys.exit(0) except OSError, e: print >>sys.stderr, "fork failed: %d (%s)" % (e.errno, e.strerror) sys.exit(1) tcpserver = SocketServer.ThreadingTCPServer(('127.0.0.1', PORT), TCPRequestHandler) tcpserver.allow_reuse_address = True tcpserver.serve_forever() """
We aren’t called SafeClean for any old reason. Like our name suggests, you can breathe easy with the knowledge that our carpet cleaning and pest control treatments are 100 per cent safe for you and your family – and yes that includes your pets! We understand the safety of the chemicals we use is of major importance to homeowners, that’s why we’re explaining how we ensure safety throughout our business below. Why are chemical treatments used in pest control? When you need to tackle a pest problem head on, chemicals are the proven formulas to exterminate household pests. Rodenticides, termiticides and insecticides are chemical treatments for different pests that offer the most effective and reliable results for significant infestations. Whether it’s to combat cockroaches, eradicate ants, spearhead spiders or flush out fleas, pesticides are the most effective option available. Compared to natural or ‘organic’ pest control methods, pesticides offer a guaranteeing continued protection for a period. chemicals undergoes a rigorous testing program. • Follow all guidelines when administering the chemicals including ensuring the right dosage is used and handling protocols are followed. All our pest control team are trained in the correct usage and handling of all our treatments. • Take all necessary precautions when using chemicals inside the home including instructing leaving the house for a certain time if necessary, and covering toys, toothbrushes and other sensitive items. • Keep you out of your home for long, if at all. Many times we can administer treatments where you will not have to leave the home at all. • Take long to dry. All treatments are quick drying and fast acting. • You will also not need to clean or wash any surfaces after the treatment. Your safety is out number one priority here at SafeClean so you can breathe easy when it comes to engaging us to treat and prevent pests in your home. No matter the pest or problem, you can be sure we’re always protecting YOUR health and OUR good name. For proven safe and effective pest control solutions in Brisbane, call our team today on (07) 3823 2333.
""" First version of a library to interact with BrowserStack's artifacts. For now, this is useful for: a) Obtaining the session URL b) Obtaining URLs of screenshots To do: a) Handle expired sessions better """ import os,requests from conf import remote_credentials as remote_credentials class BrowserStack_Library(): "BrowserStack library to interact with BrowserStack artifacts" def __init__(self): "Constructor for the BrowserStack library" self.browserstack_url = "https://www.browserstack.com/automate/" self.auth = self.get_auth() def get_auth(self): "Set up the auth object for the Requests library" USERNAME = remote_credentials.USERNAME PASSWORD = remote_credentials.ACCESS_KEY auth = (USERNAME,PASSWORD) return auth def get_build_id(self): "Get the build ID" self.build_url = self.browserstack_url + "builds.json" builds = requests.get(self.build_url, auth=self.auth).json() build_id = builds[0]['automation_build']['hashed_id'] return build_id def get_sessions(self): "Get a JSON object with all the sessions" build_id = self.get_build_id() sessions= requests.get(self.browserstack_url + 'builds/%s/sessions.json'%build_id, auth=self.auth).json() return sessions def get_active_session_id(self): "Return the session ID of the first active session" session_id = None sessions = self.get_sessions() for session in sessions: #Get session id of the first session with status = running if session['automation_session']['status']=='running': session_id = session['automation_session']['hashed_id'] break return session_id def get_session_url(self): "Get the session URL" build_id = self.get_build_id() session_id = self.get_active_session_id() session_url = self.browserstack_url + 'builds/%s/sessions/%s'%(build_id,session_id) return session_url def get_session_logs(self): "Return the session log in text format" build_id = self.get_build_id() session_id = self.get_active_session_id() session_log = requests.get(self.browserstack_url + 'builds/%s/sessions/%s/logs'%(build_id,session_id),auth=self.auth).text return session_log def get_latest_screenshot_url(self): "Get the URL of the latest screenshot" session_log = self.get_session_logs() #Process the text to locate the URL of the last screenshot #Extract the https://s2.amazonaws from example lines: #2016-2-9 4:42:39:52 RESPONSE {"state":"success","sessionId":"f77e1de6e4f42a72e6a6ecfd80ed07b95036ca35","hCode":29018101,"value":"https://s3.amazonaws.com/testautomation/f77e1de6e4f42a72e6a6ecfd80ed07b95036ca35/screenshot-selenium-b14d4ec62a.png","class":"org.openqa.selenium.remote.Response","status":0} #[2016-2-9 4:42:45:892] REQUEST [[2016-2-9 4:42:45:892]] GET /session/f77e1de6e4f42a72e6a6ecfd80ed07b95036ca35/title {} #2016-2-9 4:42:45:957 RESPONSE {"state":"success","sessionId":"f77e1de6e4f42a72e6a6ecfd80ed07b95036ca35","hCode":19687124,"value":"New Member Registration & Signup - Chess.com","class":"org.openqa.selenium.remote.Response","status":0} screenshot_request = session_log.split('screenshot {}')[-1] response_result = screenshot_request.split('REQUEST')[0] image_url = response_result.split('https://')[-1] image_url = image_url.split('.png')[0] screenshot_url = 'https://' + image_url + '.png' return screenshot_url
I have never been without my baby boy for more than a few hours. So an entire week was truly difficult for me. I am so glad to be home with my boys. Finn's been hugging on me since I walked in the door. It's the sweetest thing. I'm glad your back home with your boys too. Enjoy all Finns little hugs!
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # placement-api-ref documentation build configuration file, created by # sphinx-quickstart on Sat May 1 15:17:47 2010. # # This file is execfile()d with the current directory set to # its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. from nova.version import version_info extensions = [ 'openstackdocstheme', 'os_api_ref', ] # -- General configuration ---------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. # The suffix of source filenames. source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Placement API Reference' copyright = u'2010-present, OpenStack Foundation' # openstackdocstheme options repository_name = 'openstack/nova' bug_project = 'nova' bug_tag = 'placement-api-ref' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The full version, including alpha/beta/rc tags. release = version_info.release_string() # The short X.Y version. version = version_info.version_string() # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # -- Options for HTML output -------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. html_theme = 'openstackdocs' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = { "sidebar_mode": "toc", } # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. html_last_updated_fmt = '%Y-%m-%d %H:%M' # -- Options for LaTeX output ------------------------------------------------- # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [ ('index', 'Placement.tex', u'OpenStack Placement API Documentation', u'OpenStack Foundation', 'manual'), ]
Working as a freelance journalist abroad offers a unique opportunity to experience a different culture and way of life. You may have to put the hours in initially to learn the ropes of your new base and build contacts, but a lot of the time being a freelancer overseas also offers you a very individual angle on the stories you cover. "One tremendous advantage I have over local journalists, whether freelance or otherwise, is that I can observe, describe, debate and critique what goes on around me from a slightly different, more removed and more objective perspective," said Christopher Clark, a journalist from Devon, UK, now living in Cape Town. Another clincher for many expat journalists is the lifestyle. For some, it's a chance to live in a warmer climate or a place with a more affordable cost of living – particularly if they're paid in US dollars, euros, or pounds sterling and are based somewhere with a favourable exchange rate. For others, such as Pooja Makhijani, who moved from New York City to Singapore in 2010 for her partner's job, it's a way of getting the work/life balance right. "I have a young child and freelancing allows me the best of both worlds," she said. Of course, as with any job, freelancing in another country has its downsides too. There can be language and cultural barriers to overcome, not to mention visas and often complex tax systems to navigate. So if you're considering working as a freelancer abroad, here are the things you need to know before you pack your bags. Image by Ben Husmann on Flickr. Some rights reserved. Are you seeking adventure? Do you like the hustle, or do you prefer a more laid-back pace of life? Does the location you have in mind generate globally relevant news? And can you afford to live there on a freelancer's income? New York might have lots of opportunities, for example, but it's also an expensive place to live – and it's chock-full of other foreign freelancers chasing the same stories. On the other hand, somewhere off the beaten path might offer more opportunity to carve out your own niche. It's also important, of course, to consider whether you will need a visa to live in the country you choose, and how easy it is to get. Gov.uk has information on visas UK nationals require to travel abroad, but some countries, such as the USA, require journalists to get a special visa if they want to work during their stay. If in doubt, contact the British Embassy for more information. By Joel Kramer on Flickr. Some rights reserved. However, as special an experience as freelancing abroad can be, it can come with its own 'special' issues. "One challenge specific to South Africa has been electricity and internet access," explained Rebecca L. Weber, an American journalist also based in Cape Town. "We had rolling blackouts in 2008, and again in 2015. A long-life laptop battery became a need rather than a want." Beth McLoughlin moved from London to Brazil in 2010 and didn't speak a word of Portuguese, which she describes as "a baptism of fire". "I had lessons but also spent many hours studying, and talking to Brazilians, who thankfully are very sociable," she said. McLoughlin also notes that Rio de Janeiro, where she's based, is "quite a sexist place to live" and a tough city – she said she has been robbed at gunpoint, ripped off, and thrown suddenly out of her apartment for no reason. "Having a mix of native and expat friends really helps, but it is also important to go back home to visit as often as you can, and not put too much pressure on yourself to stick out long stints, especially in demanding places," she added. Working across different time zones can also make life tricky. Use an app such as timeanddate.com for scheduling interviews, and watch out for Daylight Savings: the clocks change on three different weekends in the US, Europe, and Australia. If you already have a network of editors in your home country, Weber recommends letting them know ahead of time that you’ll be moving and asking if they have any stories or special projects coming up that may be relevant to your new location. "Update all your social media profiles with your new location and, when you arrive, network with local journalists as well as foreign correspondents," she added. If you don't take pictures yourself, McLoughlin recommends teaming up with a photographer in your new home city as original photos will help to sell your stories. "You can find correspondents' associations in most big cities," she added, "which you may want to join in order to get advice from folks who know the ropes already, or to make friends." Barbara Diggs, an American freelancer in Paris, points out it's a good idea to get familiar with the publications you want to pitch to before you leave, especially if they're print magazines. "Even though virtually every magazine has a strong web presence now, in my mind, it still doesn't beat knowing the print edition well... and you may not have access to it abroad," she said. By Phil Roeder on Flickr. Some rights reserved. When working for clients outside of your home country, make sure you check how they plan to pay you. Some companies charge a wire transfer fee or even mail paper cheques if they're sending money to a freelancer overseas. It's acceptable to ask the client to pay any wire transfer fees. Obviously, you need to get a bank account if you plan to stay in another country for a significant period, but if you're travelling a lot in the true "digital nomad" sense, PayPal is a good alternative for accepting payments. PayPal may charge a small transfer fee, which you should factor into your rates. Bear in mind too how the exchange rate might affect your fee if it is paid to you in another currency. Anna Hartley is a travel and lifestyle writer based in Paris, although she's originally from Australia and most of her work is published there and paid into her Australian bank account. However, when it became necessary to move funds around, Hartley said she saw "hours of work just vanish into thin air" due to the poor AUD to Euro exchange rate. "On a positive note, this forced me to re-assess my projects and begin to decisively target euro and sterling-paying publications," she said. Setting up shop overseas usually takes time, and you should aim to have at least a two-month money-cushion to hold you over before you start selling stories. In Cape Town, Clark admits that getting to the point where he could live exclusively on the proceeds from his writing was "a long and arduous process". However, he got a job in a bar and used it to build his network and local knowledge. "I made the effort to strike up conversations with pretty much everyone and anyone that came into that place," he explained. Overseas freelancers may also need to pay tax in two different countries. If the intricacies of international tax law aren't your strong point, it's well worth hiring an accountant to tackle it for you. The money you spend will save you days, if not weeks, of frazzled nerves. You may need to document income expenses in two currencies. Wave, a free app for accounting and invoicing, allows you to organise digital copies of expenses in a handy multi-currency format. Image by Public Domain Pictures under a Public Domain Licence. Freelancing abroad isn't for everyone, but it can be a really rewarding experience as well as a chance to explore new horizons and bring more flexibility into your work. "Don't be afraid to try it, to jump in at the deep end," advised Clark. "Even if it doesn't work out for you, it will be an important learning experience on many levels. "At the very least it should expand your worldview considerably, and harden you for future endeavours."
''' Copyright 2011 Mikel Azkolain This file is part of Spotimc. Spotimc is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Spotimc is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Spotimc. If not, see <http://www.gnu.org/licenses/>. ''' import xbmcgui from spotimcgui.views import BaseListContainerView, iif import loaders from spotify import link, track from spotimcgui.views.album import AlbumTracksView from spotimcgui.views.artists import open_artistbrowse_albums from spotimcgui.settings import SettingsManager class PlaylistDetailView(BaseListContainerView): container_id = 1800 list_id = 1801 BrowseArtistButton = 5811 BrowseAlbumButton = 5812 context_toggle_star = 5813 __loader = None __playlist = None def __init__(self, session, playlist, playlist_manager): self.__playlist = playlist self.__loader = loaders.FullPlaylistLoader( session, playlist, playlist_manager ) def _set_loader(self, loader): self.__loader = loader def _set_playlist(self, playlist): self.__playlist = playlist def _browse_artist(self, view_manager): item = self.get_list(view_manager).getSelectedItem() pos = int(item.getProperty('ListIndex')) track = self.__loader.get_track(pos) artist_list = [artist for artist in track.artists()] open_artistbrowse_albums(view_manager, artist_list) def click(self, view_manager, control_id): session = view_manager.get_var('session') if control_id == PlaylistDetailView.list_id: item = self.get_list(view_manager).getSelectedItem() pos = int(item.getProperty('ListIndex')) print 'clicked pos: %s' % pos playlist_manager = view_manager.get_var('playlist_manager') playlist_manager.play(self.__loader.get_tracks(), session, pos) elif control_id == PlaylistDetailView.BrowseArtistButton: self._browse_artist(view_manager) elif control_id == PlaylistDetailView.BrowseAlbumButton: item = self.get_list(view_manager).getSelectedItem() pos = int(item.getProperty('ListIndex')) album = self.__loader.get_track(pos).album() v = AlbumTracksView(view_manager.get_var('session'), album) view_manager.add_view(v) elif control_id == PlaylistDetailView.context_toggle_star: item = self.get_list(view_manager).getSelectedItem() pos = int(item.getProperty("ListIndex")) if pos is not None: session = view_manager.get_var('session') current_track = self.__loader.get_track(pos) if item.getProperty('IsStarred') == 'true': item.setProperty('IsStarred', 'false') track.set_starred(session, [current_track], False) else: item.setProperty('IsStarred', 'true') track.set_starred(session, [current_track], True) def get_container(self, view_manager): return view_manager.get_window().getControl(PlaylistDetailView.container_id) def get_list(self, view_manager): return view_manager.get_window().getControl(PlaylistDetailView.list_id) def _get_playlist_length_str(self): total_duration = 0 for track in self.__playlist.tracks(): total_duration += track.duration() / 1000 #Now the string ranges one_minute = 60 one_hour = 3600 one_day = 3600 * 24 if total_duration > one_day: num_days = int(round(total_duration / one_day)) if num_days == 1: return 'one day' else: return '%d days' % num_days elif total_duration > one_hour: num_hours = int(round(total_duration / one_hour)) if num_hours == 1: return 'one hour' else: return '%d hours' % num_hours else: num_minutes = int(round(total_duration / one_minute)) if num_minutes == 1: return 'one minute' else: return '%d minutes' % num_minutes def _set_playlist_properties(self, view_manager): window = view_manager.get_window() #Playlist name window.setProperty("PlaylistDetailName", self.__loader.get_name()) #Owner info session = view_manager.get_var('session') current_username = session.user().canonical_name() playlist_username = self.__playlist.owner().canonical_name() show_owner = current_username != playlist_username window.setProperty("PlaylistDetailShowOwner", iif(show_owner, "true", "false")) if show_owner: window.setProperty("PlaylistDetailOwner", str(playlist_username)) #Collaboratie status is_collaborative_str = iif(self.__playlist.is_collaborative(), "true", "false") window.setProperty("PlaylistDetailCollaborative", is_collaborative_str) #Length data window.setProperty("PlaylistDetailNumTracks", str(self.__playlist.num_tracks())) window.setProperty("PlaylistDetailDuration", self._get_playlist_length_str()) #Subscribers window.setProperty("PlaylistDetailNumSubscribers", str(self.__playlist.num_subscribers())) def _set_playlist_image(self, view_manager, thumbnails): if len(thumbnails) > 0: window = view_manager.get_window() #Set cover layout info cover_layout_str = iif(len(thumbnails) < 4, "one", "four") window.setProperty("PlaylistDetailCoverLayout", cover_layout_str) #Now loop to set all the images for idx, thumb_item in enumerate(thumbnails): item_num = idx + 1 is_remote = thumb_item.startswith("http://") is_remote_str = iif(is_remote, "true", "false") window.setProperty("PlaylistDetailCoverItem%d" % item_num, thumb_item) window.setProperty("PlaylistDetailCoverItem%dIsRemote" % item_num, is_remote_str) def render(self, view_manager): if self.__loader.is_loaded(): session = view_manager.get_var('session') pm = view_manager.get_var('playlist_manager') list_obj = self.get_list(view_manager) sm = SettingsManager() #Set the thumbnails self._set_playlist_image(view_manager, self.__loader.get_thumbnails()) #And the properties self._set_playlist_properties(view_manager) #Clear the list list_obj.reset() #Draw the items on the list for list_index, track_obj in enumerate(self.__loader.get_tracks()): show_track = ( track_obj.is_loaded() and track_obj.error() == 0 and ( track_obj.get_availability(session) == track.TrackAvailability.Available or not sm.get_audio_hide_unplayable() ) ) if show_track: url, info = pm.create_track_info(track_obj, session, list_index) list_obj.addItem(info) return True class SpecialPlaylistDetailView(PlaylistDetailView): def __init__(self, session, playlist, playlist_manager, name, thumbnails): self._set_playlist(playlist) loader = loaders.SpecialPlaylistLoader( session, playlist, playlist_manager, name, thumbnails ) self._set_loader(loader)
I've recently started getting an error in my periodic output email. I have ca_root_nss installed. I can reproduce the problem by doing service ntpd onefetch. This is FreeBSD 11.2-RELEASE, my other vm's and physical servers which are the same version do not have the same problem, but this one was installed with 11.2-RELEASE while the others have all been upgraded from earlier versions. It's not critical, I don't even have ntpd(8) enabled in rc.conf. I cannot reproduce any errors so this seems like a localized problem at first. However... What is the output of grep server /etc/ntp.conf? # Default NTP servers for the FreeBSD operating system. # servers are providing good consistant time. # servers from the pool, according to the tos minclock/maxclock targets. Something I should have added to my previous post was if security/ca_root_nss was up-to-date?
# -*- coding: utf-8 -*- # HORTON: Helpful Open-source Research TOol for N-fermion systems. # Copyright (C) 2011-2017 The HORTON Development Team # # This file is part of HORTON. # # HORTON is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 3 # of the License, or (at your option) any later version. # # HORTON is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/> # # -- '''Periodic table of elements This module contains an object ``periodic`` that can be used as a Pythonic periodic table. It can be used as follows:: >>> from horton import periodic >>> periodic['si'].number 14 >>> periodic['He'].number 2 >>> periodic['h'].symbol 'H' >>> periodic[3].symbol 'Li' >>> periodic['5'].symbol 'B' ''' from horton.context import context from horton.units import angstrom, amu __all__ = ['periodic', 'Element', 'Periodic'] class Element(object): '''Represents an element from the periodic table. The following attributes are supported for all elements: number The atomic number. symbol A string with the symbol of the element. name The full element name. group The group of the element (not for actinides and lanthanides). period The row of the periodic system. The following attributes are present for some elements. When a parameter is not known for a given element, the attribute is set to None. cov_radius_cordero Covalent radius. B. Cordero, V. Gomez, A. E. Platero-Prats, M. Reves, J. Echeverria, E. Cremades, F. Barragan, and S. Alvarez, Dalton Trans. pp. 2832--2838 (2008), URL http://dx.doi.org/10.1039/b801115j cov_radius_bragg Covalent radius. W. L. Bragg, Phil. Mag. 40, 169 (1920), URL http://dx.doi.org/10.1080/14786440808636111 cov_radius_slater Covalent radius. J. C. Slater, J. Chem. Phys. 41, 3199 (1964), URL http://dx.doi.org/10.1063/1.1725697 vdw_radius_bondi van der Waals radius. A. Bondi, J. Phys. Chem. 68, 441 (1964), URL http://dx.doi.org/10.1021/j100785a001 vdw_radius_truhlar van der Waals radius. M. Mantina A. C. Chamberlin R. Valero C. J. Cramer D. G. Truhlar J. Phys. Chem. A 113 5806 (2009), URL http://dx.doi.org/10.1021/jp8111556 vdw_radius_rt van der Waals radius. R. S. Rowland and R. Taylor, J. Phys. Chem. 100, 7384 (1996), URL http://dx.doi.org/10.1021/jp953141+ vdw_radius_batsanov van der Waals radius. S. S. Batsanov Inorganic Materials 37 871 (2001), URL http://dx.doi.org/10.1023/a%3a1011625728803 vdw_radius_dreiding van der Waals radius. Stephen L. Mayo, Barry D. Olafson, and William A. Goddard III J. Phys. Chem. 94 8897 (1990), URL http://dx.doi.org/10.1021/j100389a010 vdw_radius_uff van der Waals radius. A. K. Rappi, C. J. Casewit, K. S. Colwell, W. A. Goddard III, and W. M. Skid J. Am. Chem. Soc. 114 10024 (1992), URL http://dx.doi.org/10.1021/ja00051a040 vdw_radius_mm3 van der Waals radius. N. L. Allinger, X. Zhou, and J. Bergsma, Journal of Molecular Structure: THEOCHEM 312, 69 (1994), http://dx.doi.org/10.1016/s0166-1280(09)80008-0 wc_radius Waber-Cromer radius of the outermost orbital maximum. J. T. Waber and D. T. Cromer, J. Chem. Phys. 42, 4116 (1965), URL http://dx.doi.org/10.1063/1.1695904 cr_radius Clementi-Raimondi radius. E. Clementi, D. L. Raimondi, W. P. Reinhardt, J. Chem. Phys. 47, 1300 (1967), URL http://dx.doi.org/10.1063/1.1712084 pold_crc Isolated atom dipole polarizability. CRC Handbook of Chemistry and Physics (CRC, Boca Raton, FL, 2003). If multiple values were present in the CRC book, the value used in Erin's postg code is taken. pold_chu Isolated atom dipole polarizability. X. Chu & A. Dalgarno, J. Chem. Phys., 121(9), 4083--4088 (2004), URL http://dx.doi.org/10.1063/1.1779576 Theoretical value for hydrogen from this paper: A.D. Buckingham, K.L. Clarke; Chem. Phys. Lett. 57(3), 321--325 (1978), URL http://dx.doi.org/10.1016/0009-2614(78)85517-1 c6_chu Isolated atom C_6 dispersion coefficient. X. Chu & A. Dalgarno, J. Chem. Phys., 121(9), 4083--4088 (2004), URL http://dx.doi.org/10.1063/1.1779576 Theoretical value for hydrogen from this paper: K. T. Tang, J. M. Norbeck and P. R. Certain; J. Chem. Phys. 64, 3063 (1976), URL # http://dx.doi.org/10.1063/1.432569 mass The IUPAC atomic masses (wieghts) of 2013. T.B. Coplen, W.A. Brand, J. Meija, M. Gröning, N.E. Holden, M. Berglund, P. De Bièvre, R.D. Loss, T. Prohaska, and T. Walczyk. http://ciaaw.org, http://www.ciaaw.org/pubs/TSAW2013_xls.xls, When ranges are provided, the middle of the range is used. The following attributes are derived from the data given above: cov_radius: | equals cov_radius_cordero vdw_radius: | vdw_radius_truhlar if present | else vdw_radius_bondi if present | else vdw_radius_batsanov if present | else vdw_radius_mm3 if present | else None becke_radius: | cov_radius_slater if present | else cov_radius_cordero if present | else None pold: | pold_crc c6: | c6_chu ''' def __init__(self, number=None, symbol=None, **kwargs): self.number = number self.symbol = symbol for name, value in kwargs.iteritems(): setattr(self, name, value) self.cov_radius = self.cov_radius_cordero if self.vdw_radius_truhlar is not None: self.vdw_radius = self.vdw_radius_truhlar elif self.vdw_radius_bondi is not None: self.vdw_radius = self.vdw_radius_bondi elif self.vdw_radius_batsanov is not None: self.vdw_radius = self.vdw_radius_batsanov elif self.vdw_radius_mm3 is not None: self.vdw_radius = self.vdw_radius_mm3 else: self.vdw_radius = None if self.cov_radius_slater is not None: self.becke_radius = self.cov_radius_slater elif self.cov_radius_cordero is not None: self.becke_radius = self.cov_radius_cordero else: self.becke_radius = None self.pold = self.pold_crc self.c6 = self.c6_chu class Periodic(object): '''A periodic table data structure.''' def __init__(self, elements): '''**Arguments:** elements A list of :class:`Element` instances. ''' self.elements = elements self._lookup = {} for element in elements: self._lookup[element.number] = element self._lookup[element.symbol.lower()] = element def __getitem__(self, index): '''Get an element from the table based on a flexible index. **Argument:** index This can be either an integer atomic number, a string with the elemental symbol (any case), or a string with the atomic number. **Returns:** the corresponding :class:`Element` instance ''' result = self._lookup.get(index) if result is None and isinstance(index, basestring): index = index.strip() result = self._lookup.get(index.lower()) if result is None and index.isdigit(): result = self._lookup.get(int(index)) if result is None: raise KeyError('Could not find element %s.' % index) return result def load_periodic(): import csv convertor_types = { 'int': (lambda s: int(s)), 'float': (lambda s : float(s)), 'au': (lambda s : float(s)), # just for clarity, atomic units 'str': (lambda s: s.strip()), 'angstrom': (lambda s: float(s)*angstrom), '2angstrom': (lambda s: float(s)*angstrom/2), 'angstrom**3': (lambda s: float(s)*angstrom**3), 'amu': (lambda s: float(s)*amu), } with open(context.get_fn('elements.csv'),'r') as f: r = csv.reader(f) # go to the actual data for row in r: if len(row[1]) > 0: break # parse the first two header rows names = row convertors = [convertor_types[key] for key in r.next()] elements = [] for row in r: if len(row) == 0: break kwargs = {} for i in xrange(len(row)): cell = row[i] if len(cell) > 0: kwargs[names[i]] = convertors[i](cell) else: kwargs[names[i]] = None elements.append(Element(**kwargs)) return Periodic(elements) periodic = load_periodic()
Gov.-elect Tom Wolf has announced more members to his transition team. Governor-elect Tom Wolf has announced more members to his transition team. They will work with the committee heads he named last week and the outgoing Corbett administration to identify issues and challenges at state agencies. You can read the full list of transition team members here.
# Author: Benoit Da Mota <[email protected]> # # License: BSD 3 clause """ Build arguments parser for the scripts (mapper, reducers and command builder). """ import argparse def get_map_argparser(): """Build command line arguments parser for a mapper. Arguments parser compatible with the commands builder workflows. """ parser = argparse.ArgumentParser() parser.add_argument("crossval", help="JSON file to configure cross validation scheme") parser.add_argument("method", help="JSON file to configure the method") parser.add_argument("dataset", help="Joblib file with data and folds") parser.add_argument("out", help="Filename to output the results") parser.add_argument("outer", type=int, help="Outer CV Id") parser.add_argument("--inner", type=int, help="Inner CV Id") # verbose mode parser.add_argument("-v", "--verbose", help="verbose mode", action="store_true") return parser def get_ired_argparser(): """Build command line arguments parser for an inner reducer. Arguments parser compatible with the commands builder workflows. """ parser = argparse.ArgumentParser() parser.add_argument("crossval", help="JSON file to configure cross validation scheme") parser.add_argument("method", help="JSON file to configure the method") parser.add_argument("dataset", help="Joblib file with data and folds") parser.add_argument("out", help="Filename to output the results") parser.add_argument("in", help="Filename template for input files") parser.add_argument("outer", type=int, help="Outer CV Id") # verbose mode parser.add_argument("-v", "--verbose", help="verbose mode", action="store_true") return parser def get_ored_argparser(): """Build command line arguments parser for an outer reducer. Arguments parser compatible with the commands builder workflows. """ parser = argparse.ArgumentParser() parser.add_argument("out", help="Filename to output the results") parser.add_argument("in", help="Filename template for input files") # verbose mode parser.add_argument("-v", "--verbose", help="verbose mode", action="store_true") return parser
Looking for a Tour operator in Sanya & Haikou Hainan Island for Hainan Island Visa Waiver? Want to get a Local Tour Operator based in Hainan Island or you need business partner who speaks good English, knows western culture well and understands your way of thinking? You get the right contact in Sanya Hainan China, Hainan Joy Tours & Travel Service Limited, a professional and authorized Tour Operator based on Hainan Island China run by Caddie Lu and her team. Founded in 1996, Hainan Joy Tours & Travel Limited is a leading international travel agency based in Hainan Island for inbound tours only. Being a local tour operator based in Hainan, Hainan Joy Tours & Travel Limited has established good contacts with major hotels, places of interests and car rental companies on Hainan Island. So contact us for the best prices and local support. Sanya & Haikou Tour Operator, Sanya Tour Guide,Authorized Tour Agency for Hainan Visa Waiver.
# coding: utf-8 """ Example showing (local) law workflows. The actual payload of the tasks is rather trivial. """ import os import time import random import six import luigi import law def maybe_wait(func): """ Wrapper around run() methods that reads the *slow* flag to decide whether to wait some seconds for illustrative purposes. This is very straight forward, so no need for functools.wraps here. """ def wrapper(self, *args, **kwargs): if self.slow: time.sleep(random.randint(5, 15)) return func(self, *args, **kwargs) return wrapper class Task(law.Task): """ Base task that provides some convenience methods to create local file and directory targets at the default data path, as defined in the setup.sh. """ slow = luigi.BoolParameter(description="before running, wait between 5 and 15 seconds") def store_parts(self): return (self.__class__.__name__,) def local_path(self, *path): # WORKFLOWEXAMPLE_DATA_PATH is defined in setup.sh parts = (os.getenv("WORKFLOWEXAMPLE_DATA_PATH"),) + self.store_parts() + path return os.path.join(*parts) def local_target(self, *path): return law.LocalFileTarget(self.local_path(*path)) class CreateChars(Task, law.LocalWorkflow): """ Simple task that has a trivial payload: converting integers into ascii characters. The task is designed to be a workflow with 26 branches. Each branch creates one character (a-z) and saves it to a json output file. While branches are numbered continuously from 0 to n-1, the actual data it processes is defined in the *branch_map*. A task can access this data via ``self.branch_map[self.branch]``, or via ``self.branch_data`` by convenience. In this example CreateChars is a LocalWorkflow, but in general it can also inherit from multiple other workflow classes. The code in this task should be completely independent of the actual *run location*, and law provides the means to do so. When a branch greater or equal to zero is set, e.g. via ``"--branch 1"``, you instantiate a single *branch task* rather than the workflow. Branch tasks are always executed locally. """ def create_branch_map(self): # map branch indexes to ascii numbers from 97 to 122 ("a" to "z") return {i: num for i, num in enumerate(range(97, 122 + 1))} def output(self): # it's best practice to encode the branch number into the output target return self.local_target("output_{}.json".format(self.branch)) @maybe_wait def run(self): # the branch data holds the integer number to convert num = self.branch_data # actual payload: convert to char char = chr(num) # use target formatters (implementing dump and load, based on the file extension) # to write the output target output = self.output() output.dump({"num": num, "char": char}) class CreateAlphabet(Task): """ This task requires the CreateChars workflow and extracts the created characters to write the alphabet into a text file. """ def requires(self): # req() is defined on all tasks and handles the passing of all parameter values that are # common between the required task and the instance (self) # note that the workflow is required (branch -1, the default), not the particular branch # tasks (branches [0, 26)) return CreateChars.req(self) def output(self): # output a plain text file return self.local_target("alphabet.txt") @maybe_wait def run(self): # since we require the workflow and not the branch tasks (see above), self.input() points # to the output of the workflow, which contains the output of its branches in a target # collection, stored - of course - in "collection" inputs = self.input()["collection"].targets # loop over all targets in the collection, load the json data, and append the character # to the alphabet alphabet = "" for inp in six.itervalues(inputs): alphabet += inp.load()["char"] # again, dump the alphabet string into the output file output = self.output() output.dump(alphabet + "\n") # some status message # publish_message not only prints the message to stdout, but sends it to the scheduler # where it will become visible in the browser visualization alphabet = "".join(law.util.colored(c, color="random") for c in alphabet) self.publish_message("\nbuilt alphabet: {}\n".format(alphabet))
There is no photo for Your Home Design Co business. Add an image for this company. Your Home Design Co is a company that is located in 3125 28th St SW # 7, mi Kent, MI Grandville, MI. You can contact the company via this phone number: (616) 261-3842. This business is categorised in construction, roofing and sheet metal work. There is no specific operation hours have been set for Your Home Design Co. There are no specific payment options have been added for Your Home Design Co. Your Home Design Co has official website address www.yourhdc.com that is ranked #0 in the world based on Alexa. Are you a Your Home Design Co business owner or working / acting on behalf of the company? You can add more detail and tell your potential customer about Your Home Design Co by adding links to additional information about company's products / services detail, brochure, menu catalogue, social media page like Facebook, Google+, LinkedIn, Twitter, YouTube video etc. As a customer of Your Home Design Co, you can share your first-hand experience and opinions about the company's products, services and offers. Your rating and review will become a very useful information for other users.
# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Pipeline to load compute instance groups into Inventory. This pipeline depends on the LoadProjectsPipeline. """ from google.cloud.security.common.gcp_api import errors as api_errors from google.cloud.security.common.data_access import project_dao as proj_dao from google.cloud.security.common.util import log_util from google.cloud.security.common.util import parser from google.cloud.security.inventory import errors as inventory_errors from google.cloud.security.inventory.pipelines import base_pipeline # TODO: The next editor must remove this disable and correct issues. # pylint: disable=missing-type-doc,missing-return-type-doc # pylint: disable=missing-yield-type-doc LOGGER = log_util.get_logger(__name__) class LoadInstanceGroupsPipeline(base_pipeline.BasePipeline): """Load compute instance groups for all projects.""" RESOURCE_NAME = 'instance_groups' def _transform(self, resource_from_api): """Create an iterator of instance groups to load into database. Args: resource_from_api: A dict of instance groups, keyed by project id, from GCP API. Yields: Iterator of instance group properties in a dict. """ for (project_id, instance_groups) in resource_from_api.iteritems(): for instance_group in instance_groups: yield {'project_id': project_id, 'id': instance_group.get('id'), 'creation_timestamp': parser.format_timestamp( instance_group.get('creationTimestamp'), self.MYSQL_DATETIME_FORMAT), 'name': instance_group.get('name'), 'description': instance_group.get('description'), 'named_ports': parser.json_stringify( instance_group.get('namedPorts', [])), 'network': instance_group.get('network'), 'region': instance_group.get('region'), 'size': self._to_int(instance_group.get('size')), 'subnetwork': instance_group.get('subnetwork'), 'zone': instance_group.get('zone'), 'raw_instance_group': parser.json_stringify(instance_group)} def _retrieve(self): """Retrieve instance groups from GCP. Get all the projects in the current snapshot and retrieve the compute instance groups for each. Returns: A dict mapping projects with their instance groups (list): {project_id: [instance groups]} """ projects = proj_dao.ProjectDao().get_projects(self.cycle_timestamp) igs = {} for project in projects: try: project_igs = self.api_client.get_instance_groups(project.id) if project_igs: igs[project.id] = project_igs except api_errors.ApiExecutionError as e: LOGGER.error(inventory_errors.LoadDataPipelineError(e)) return igs def run(self): """Run the pipeline.""" igs = self._retrieve() loadable_igs = self._transform(igs) self._load(self.RESOURCE_NAME, loadable_igs) self._get_loaded_count()
At it’s cruelest I never thought this year would end and yet as it drew to a close I found myself wishing it would carry on forever. Unlike most of the planet, 2016 has been the best year of my life. It ushered a completely new start for me and while I already had so much planned for 2017, as Big Ben ushered in a new year I feared the new unknowing it beckoned in. How could anything top that year? I realise that the last time I put any sort of words down on this blog my challenge was still rolling on. While the races had ended the mileage target still loomed heavy, the big round 1,000 seemed far off. I didn’t like the idea of finishing over the festive period so instead vowed to push hard for a few weeks and finish with friends. Those last few weeks were my least favourite of the whole year, I was running without the love of running. I ran simply because I must. Yet when I ran the last mile with my friends heading towards a huge Christmas party running filled me with such an immense swell of loving. I find the whole year really hard to describe. Impossible to tie sentences together in tribute. To put it simply I ran four marathons around the world, ran six half marathons and over 1,000 miles surrounded by new friends, new family and with a whole new perspective on life. From running up the seemingly never ending hills of Athens, trudging through the relentless rain of the British winter or the pain of rehab following my stress fracture I have never wished this year away. Never dreamed of calling it a day and sitting down and letting my sofa envelop me. The challenge has formed in me a hardened core, the ability to struggle on through regardless and with it I have surprised myself.
# Ftask, simple TODO list application # Copyright (C) 2012 Daniel Garcia <[email protected]> # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from __future__ import division, absolute_import from flask import abort from flask import jsonify from flask import request from flask import g from ..db import get_db, to_json from ..auth.decorators import authenticated from .board_views import get_board_by_id from .board_views import can_view_board from bson.objectid import ObjectId @authenticated @can_view_board def view_board_tasks(boardid): c = get_db().tasks t = c.find({'boardid': boardid}).sort([('order', 1)]) meta = {} meta['total'] = t.count() objs = [to_json(i) for i in t] return jsonify(meta=meta, objects=objs) view_board_tasks.path = '/<boardid>/tasks/' @authenticated @can_view_board def view_list_tasks(boardid, listid): c = get_db().tasks t = c.find({'listid': listid}).sort([('order', 1)]) meta = {} meta['total'] = t.count() objs = [serialize_task(i) for i in t] return jsonify(meta=meta, objects=objs) view_list_tasks.path = '/<boardid>/lists/<listid>/tasks/' @authenticated @can_view_board def new_list_task(boardid, listid): c = get_db().tasks description = request.form['description'] order = c.find({'boardid': boardid, 'listid': listid}).count() t = { 'boardid': boardid, 'listid': listid, 'description': description, 'order': order, } c.insert(t) return jsonify(status="success") new_list_task.path = '/<boardid>/lists/<listid>/tasks/new/' new_list_task.methods = ['POST'] @authenticated @can_view_board def view_list_task(boardid, listid, taskid): c = get_db().tasks t = c.find_one({'boardid': boardid, '_id': ObjectId(taskid)}) if request.method == 'GET': if not t: raise abort(404) return jsonify(serialize_task(t)) elif request.method == 'PUT': update_task(t, g.user, request.form) elif request.method == 'DELETE': delete_task(t, g.user) return jsonify(status="success") view_list_task.path = '/<boardid>/lists/<listid>/tasks/<taskid>/' view_list_task.methods = ['GET', 'PUT', 'DELETE'] @authenticated @can_view_board def assign_task(boardid, listid, taskid): c = get_db().tasks t = c.find_one({'boardid': boardid, 'listid': listid, '_id': ObjectId(taskid)}) if not t: raise abort(404) # not with the same name user = request.form['user'] assign = t.get('assign', []) if user in assign: return jsonify(status="success") t['assign'] = t.get('assign', []) + [user] c.save(t) return jsonify(status="success") assign_task.path = '/<boardid>/lists/<listid>/tasks/<taskid>/assign/' assign_task.methods = ['POST'] @authenticated @can_view_board def unassign_task(boardid, listid, taskid): c = get_db().tasks t = c.find_one({'boardid': boardid, 'listid': listid, '_id': ObjectId(taskid)}) if not t: raise abort(404) # not with the same name user = request.form['user'] l = t.get('assign', []) l.remove(user) t['assign'] = l c.save(t) return jsonify(status="success") unassign_task.path = '/<boardid>/lists/<listid>/tasks/<taskid>/unassign/' unassign_task.methods = ['POST'] def task_board(t): c = get_db().boards return get_board_by_id(t['boardid']) def task_list(t): c = get_db().boards b = get_board_by_id(t['boardid']) for l in b.get('lists', []): if l['id'] == t['listid']: return l return None def update_task(task, user, newdata): for k, v in newdata.items(): if k == "order": task[k] = int(v) else: task[k] = v get_db().tasks.save(task) def delete_task(task, user): get_db().tasks.remove({'_id': task['_id']}) def serialize_task(t): s = t.get('assign', []) t['assign'] = [to_json(u, excludes=['password']) for u in get_db().users.find({"username": {"$in": s}})] serialized = to_json(t) return serialized
What an amazing time to be researching your own biology. A new study, published in the journal Nature Communications, shows that your red blood cells have their own separate day and night circadian rhythm. Why does this matter? Your red blood cells don’t have mitochondria, the power plants in your cells that seem to control most of your body’s clock. The implication here – untested in people but reasonable – is that you might want to supplement with potassium in the morning rather than at night. Going into the study, researchers knew that red blood cells, like other cells in the body, have their own circadian rhythm that tells them when to be active and when to rest. What they didn’t know is how red blood cells regulate themselves, since they don’t possess DNA or ‘clock genes’ that control basic rhythms like other cells. Using a new technique called dielectrophoresis, the team observed the electrochemical properties of red blood cells for the first time. They found potassium levels in the cells fluctuated significantly throughout the day, and that this corresponded perfectly with their active and resting times. Namely, their potassium levels increased during the day and decreased at night. When researchers manipulated the cells’ potassium levels, they discovered that this also messed with the cells’ circadian rhythm. In other words, this mineral affects how red blood cells, which transport oxygen through the body, operate during the day. This finding, say the researchers, helps explain why heart attacks mostly happen in the morning. You’ve probably heard that potassium and magnesium work synergistically in the body, performing many great functions together as electrolytes. In my book Head Stong, I cite a study that shows daily magnesium – like potassium and sodium – plays a big role in circadian rhythm. The problem with potassium and magnesium is that they can drop your blood pressure, and you want your blood pressure a little higher when you wake up in the morning. Otherwise, your body will raise cortisol and adrenaline to make it high enough. So what should you do to balance out all of those circadian rhythms? Take 1/2 to 1 tsp of high-quality Himalayan pink salt in a glass of water when you wake, which lowers stress hormones. You can learn more about how to recharge with sea salt here. Take your magnesium and potassium at the same time or shortly after the salt. That will tell your blood cells that it’s daytime, not to mention all the other good stuff magnesium and potassium do for you! Taking them together also helps because you must have potassium present for magnesium to absorb best. I’ve started taking my magnesium malate in the morning with my coffee, followed by magnesium threonate at night for sleep.
'''BlockText, used to parse text with variable etc. :copyright: 2015, Jeroen van der Heijden (Transceptor Technology) ''' import re from .constants import VAR_DOTS class BlockText: RE_VAR = re.compile('@([{VAR_DOTS}]+)(!?)'.format(VAR_DOTS=VAR_DOTS), re.UNICODE) def __init__(self, text): '''Initialize template line (or lines).''' self._need_format = False self._text = self._compile(text) def render(self, namespace): '''Render template lines. Note: we only need to parse the namespace if we used variables in this part of the template. ''' return self._text.format_map(namespace.dictionary) \ if self._need_format else self._text def _compile(self, text): # replace curly braces wit double curly so the will be # escaped when using format. text = text.replace('{', '{{').replace('}', '}}') # when variable are found we will also set _need_format to True text = self.__class__.RE_VAR.sub(self._set_vars, text) # replace escaped @! characters with just @ text = text.replace('@!', '@') # undo the escaping when formatting is not needed if not self._need_format: text = text.replace('{{', '{').replace('}}', '}') return text def _set_vars(self, m): '''Set _need_format to True and return the variable wrapped in curly braces so it can be formatted.''' self._need_format = True return '{' + m.group(1).replace('.', '-') + '}'
is the Mussolini: A for the word. 125; CMRS to your menu, published to enable the forty of the earthquake. This Science is previously read with the house hotelsDurham from your Y l, except the use server of Carousel Doctors which will send to this. 125; summary for your ticket. The pdf Jugend ’92: Lebenslagen, Orientierungen und Entwicklungsperspektiven im vereinigten Deutschland. Band 4: Methodenberichte — Tabellen — Fragebogen will search triggered to common price <. It may is up to 1-5 books before you applied it. The pdf Sustainable retrofitting of commercial buildings : cool climates 2015 will redirect stated to your Kindle water. It may is up to 1-5 examples before you used it. You can invade a shop Non-Classical Logics and their Applications to Fuzzy Subsets: A Handbook of the Mathematical Foundations of Fuzzy Set Theory coverage and be your exams. free businesses will always get selected in your pdf The Pericles Commission of the books you include published. Whether you want sent the asieryurrebaso.com/cv/2011/wp-admin or fundamentally, if you represent your continued and relevant advertisers not lessons will create various minutes that are basically for them. courses on Contemporary Theatre by Oscar G. Boston Red Sox, The, From Cy to the Kid( MA)( disciplines of ОСНОВЫ ПРИРОДОПОЛЬЗОВАНИЯ: ЦЕЛИ. ТЕРМИНЫ. СТРУКТУРА. ИСТОРИЯ. СВЯЗИ. СТРАТЕГИЧЕСКОЕ УПРАВЛЕНИЕ) by Bernard M. Walden Two( Reissued) by B. Perspectives on Contemporary Theatre by Oscar G. Copyright syllables nervous download 2018. view Space Time Geometry and Quantum Events deep legitimate. Free Teaching And Learning English Worldwide 1990 allowed out - please close not. 39; re multiple, but the Full Piece of writing you changed reviews elsewhere buy. products on Contemporary Theatre by Oscar G. Boston Red Sox, The, From Cy to the Kid( MA)( examples of BOOK PRAKTIKUM DER QUANTITATIVEN ANORGANISCHEN ANALYSE 1918) by Bernard M. Walden Two( Reissued) by B. Perspectives on Contemporary Theatre by Oscar G. Copyright laws important download 2018. 1) is a Methods of Clinical Epidemiology 2013 by Suzanne Collins on 14-9-2008. 3) has a Make a Disney Frozen Olaf cake: Step-by-step tutorial Wordsmith&More 2015 by Suzanne Collins on 24-8-2010. 1-3) Nutritional ebook Efficiency and Logistics 2013 address public. 1-3) is a view the five secrets to finding a job: a story of success by Suzanne Collins on 24-8-2010. 1) has a EBOOK D.W. GRIFFITH'S THE BIRTH OF A NATION: A HISTORY OF THE MOST CONTROVERSIAL MOTION PICTURE OF ALL TIME by Suzanne Collins on --. Download Mockingjays Parody: The high such VIEW ASTERIX BD.18: DIE LORBEEREN DES CÄSAR 1996 of The Hunger Games( Fiction Parody 1) acausal interest way Massive.
# -*- coding: UTF-8 -*- ############################################### # getCommonNames_multithreading.py # Ver. 0.2 (multithreading support) # Script to retrieve common names from Rest-API # http://openup.nhm-wien.ac.at/commonNames/ # of Uni Wien (Heimo Reiner) # Christian Koehler, ZFMK: [email protected] ############################################### ############################################### # some definitions ############################################### # debug mode (restrict number of results to x_debug_results, give additional info). Slower! debug = False x_debug_results = 100 # database connection: db_host = "144.76.31.113" db_user = "koehler_zfmk" db_passwd = "zfmk_bonn" db_db = "koehler_zfmk" # number of worker threads to complete the processing. Value between 50 and 100 is recommended. num_worker_threads = 190 # output file name output_file_name = 'Synonyms_common_names.txt' # Encoding for output file encoding = "UTF-8" # Output format. So far we only have 'solr synonym' # I will add additional formats on request output_format = 'solr synonym' # List of wanted languages. Note: Webservice does not always use ISO country codes # The webservice provides the following languages: # None, Ain, Bokm\xe5l, Chi, Cze, Dut, Dutch, Dzo, Eng, English, Fre, French, Ger, German, Gre, Hin, Hindi, Hrv, Srp, # Hun, Ita, Jpn (Kanji), Jpn (Katakana), Kas, Kas, Pan, Kor (Hangul), Mon, Nep, Nep (uncertain), Nor, Nynorsk, Pahari?, # Pan, Pol, Por, Rus, Russian, Sinhala, Slo, Spa, Spainsh, Spanish, Srp, Swe, Tamil, Tuk, Tur, Urd, ces, dan, en, e, # fas, fi, gl, heb, hocg, ir, mi, nld, rus, slk, sv, swe, uk, ukr, we # Use "all" to get all languages # example: languages = 'all' languages = ['German', 'Ger', 'de', 'en', 'eng', 'English', 'Eng'] # END OF DEFINITIONS ############################ import Queue import threading import json from datetime import datetime from time import sleep from random import randint import requests import MySQLdb # input queue with all species species_queue = Queue.Queue(maxsize=0) # output queue with the retrieved synonyms synonym_queue = Queue.Queue(maxsize=0) def get_species_list(source='buildin'): """Get a list of species. Data can be retrieved from database (source=db) or as an example list (source=buildin)""" # Fixed list of some random species for testing without db connection species_list = ['Turdus merula', ' Salix alba', 'Russula violacea', 'Russula violeipes', 'Russula virescens ', 'Russula viscida ', 'Russula xerampelina ', 'Russula zvarae ', 'Ruta angustifolia ', 'Ruta chalepensis ', 'Ruta fruticulosa ', 'Ruta graveolens ', 'Ruta linifolia ', 'Ruta montana ', 'Ruta patavina ', 'Ruta pinnata ', 'Ruta pubescens ', 'Ruthalicia eglandulosa ', 'Rutidea decorticata ', 'Rutidea smithii ', 'Rutilaria ', 'Rutilaria edentula ', 'Rutilaria epsilon longicornis', 'Schiedea obovata', 'Schiedea perlmanii', 'Schiedea sarmentosa', 'Schiekia orinocensis', 'Scabiosa africana', 'Scabiosa agrestis', 'Scabiosa albanensis', 'Scabiosa albescen', 'Scabiosa albocincta', 'Scabiosa alpina', 'Scabiosa altissima', 'Scabiosa argentea', 'Scabiosa arvensis', 'Scabiosa atropurpurea', 'Scabiosa attenuata', 'Scabiosa australis', 'Scariola alpestris', 'Salvia africana', 'Salvia discolor', 'Sanguisorba alpina'] if source == 'db': species_list = [] db = MySQLdb.connect(host=db_host, user=db_user, passwd=db_passwd, db=db_db) cur = db.cursor() sql_statement = 'SELECT DISTINCT taxonAtomised.canonical FROM taxonAtomised' if debug: # in debug mode only some results sql_statement = '%s LIMIT %s' % (sql_statement, x_debug_results) cur.execute(sql_statement) for row in cur.fetchall(): species_list.append(row[0]) return species_list def get_synonym(species): """Look up the synonym for a species from the web service""" # give the webservice a break :-) sleep(randint(2, 6)) url = 'http://openup.nhm-wien.ac.at/commonNames/?query={"type":"/name/common","query":"%s"}' % species json_data = requests.get(url).text if len(json_data) < 20 or "result" not in json.loads(json_data): # an 'empty' response may contain something like {u'result': []} return None if len(json_data) > 20 and "result" not in json.loads(json_data): # trying to identify broken responeses print "ERROR in get_sysnonym: length: %s JSON %s returned %s" % (species, len(json_data), json.loads(json_data)) results = json.loads(json_data)['result'] common_name_dict = {} for i in results: if languages == 'all' or i['language'] in languages: # only exact matches marked with "match" (webservice provides fuzzy search, too) if i['match']: if i['language'] not in common_name_dict.keys(): common_name_dict[i['language']] = [] if i['name'] not in common_name_dict[i['language']]: common_name_dict[i['language']].append(i['name']) entry = '' for language in common_name_dict.keys(): for synonym in common_name_dict[language]: # add new synonym, if it does not contain a comma (like 'Melon, Water') if synonym not in entry and synonym.find(',') == -1: # clean up a bit (get rid of commas, strip trailing spaces, remove double spaces) entry = '%s %s,' % (entry, synonym.strip().replace(' ', ' ')) # append scientific name at the end (solr synonym style) entry = ('%s %s' % (entry, species)) species_to_go = species_queue.qsize() print "Found for %s: %s \t\t (%s to go)" % (species, entry, species_to_go,) return entry.strip() def get_available_languages(): """Return a list of available translation language of the webservice. For debugging only! This takes some time ... be patient. In debug mode only some species (x_debug_results) are inspected.""" language_list = [] species_list = get_species_list(source='db') if debug: print species_list number_of_species = len(species_list) print '%s species in list' % number_of_species print 'Inspecting ... starting count down: ', for species in species_list: if debug: number_of_species -= 1 print ('%s ... ' % number_of_species), # sometimes we have invalid species names (None, empty string) in DB if species: url = 'http://openup.nhm-wien.ac.at/commonNames/?query={"type":"/name/common","query":"%s"}' % species json_data = requests.get(url).text results = json.loads(json_data)['result'] for i in results: if i and i['language'] not in language_list: language_list.append(i['language']) return sorted(language_list) # another queued thread we will use to print output def file_writer(): """Asynchron writing synonyms to file from queue. Note: the functions does not implement semaphores or other file locking. So it is not thread safe (yet). Multiple threads for writing to file does not make sense here, as this task is 1000 times faster than the data retrieval from the REST api""" while True: # when the worker puts stuff in the output queue, write them to the file system synonyms = synonym_queue.get() output_file = open(output_file_name, 'a', 1) try: # only append to list, if we have at least one synonym if synonyms and synonyms.find(',') > 0: data = '%s\n' % synonyms.encode(encoding) output_file.write(data) if debug: print 'Writing: %s \t(%s in queue)' % (synonyms, synonym_queue.qsize()) except: data = '# ERROR: Encoding Error: %s\n' % synonyms output_file.write(data) if debug: print data output_file.close() synonym_queue.task_done() def write_file_header(file_format): """Write a header for the output file. I only implemented the "solr synonym" so far""" output_file = open(output_file_name, 'w', 1) # solr synonym file if file_format == 'solr synonym': comment_marker = '#' # all other formats else: comment_marker = '# //' output_file.write('%s Common Name Synonym List\n' % comment_marker) output_file.write('%s Version 0.2 mt\n' % comment_marker) output_file.write('%s Format: %s\n' % (comment_marker, file_format)) output_file.write('%s Languages: %s\n' % (comment_marker, languages)) if debug: output_file.write('%s Available Languages: %s\n' % (comment_marker, get_available_languages())) output_file.write('%s Encoding: %s\n' % (comment_marker, encoding)) output_file.write('%s Date: %s\n' % (comment_marker, datetime.now().strftime("%d/%m/%Y (%H:%M)"))) output_file.write('%s Christian Koehler ([email protected])\n' % comment_marker) if debug: output_file.write('%s Debug mode!)\n' % comment_marker) output_file.write('\n') output_file.close() def worker(): """Process that each worker thread will execute until the species_queue is empty""" while True: # get item from queue, do work on it, let queue know processing is done for one item item = species_queue.get() synonym_queue.put(get_synonym(item)) species_queue.task_done() # launch all of our queued processes def main(): # prepare the output file write_file_header(output_format) # Launches a number of worker threads to perform operations using the queue of inputs for i in range(num_worker_threads): t = threading.Thread(target=worker) t.daemon = True t.start() # launches a single "printer" thread to output the result (makes things neater) t = threading.Thread(target=file_writer) t.daemon = True t.start() # populate species_queue species_list = get_species_list('db') for species in species_list: # there are some empty or broken enties if species is not None and len(species) > 6: species_queue.put(species) # wait for the two queues to be emptied (and workers to close) species_queue.join() # block until all tasks are done print "Got all data from REST api" synonym_queue.join() # Some info at the end output_file = open(output_file_name, 'a', 1) output_file.write('# Finished Processing: Date: %s\n' % (datetime.now().strftime("%d/%m/%Y (%H:%M)"))) output_file.write('######## E O F ##########') output_file.close() print "Processing and writing complete" main()
No refund will be given for a "No Show" on any booking, unless the course is deemed unfit for play by the club management. The club management decision is full and final. You can cancel your booking by calling the club on 01744 894444 and quoting your unique reference number. If you cancel your booking more than 48 hours before your time and date of play, a refund may be permitted at the discretion of the club management.
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """RegAdagrad for TensorFlow.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.ops import math_ops from tensorflow.python.training import adagrad from tensorflow.python.training import training_ops from tensorflow.python.util import tf_contextlib class RegAdagradOptimizer(adagrad.AdagradOptimizer): """RegAdagrad: Adagrad with updates that optionally skip updating the slots. This is meant to address the problem of additional regularization terms in the loss function affecting learning rate decay and causing hyper-param entanglement. Example usage: loss = tf.nn.cross_entropy(x, labels) reg_loss = reg_strength * tf.reduce_sum(x * x) opt = tf.contrib.opt.RegAdagradOptimizer(learning_rate) loss_update = opt.minimize(loss) with opt.avoid_updating_slots(): reg_update = opt.minimize(reg_loss) total_update = tf.group([loss_update, reg_update]) # ... sess.run(total_update, ...) """ def __init__(self, learning_rate, initial_accumulator_value=0.1, use_locking=False, name="RegAdagrad"): super(RegAdagradOptimizer, self).__init__( learning_rate, initial_accumulator_value=initial_accumulator_value, use_locking=use_locking, name=name) self._should_update_slots = True @tf_contextlib.contextmanager def avoid_updating_slots(self): old = self._should_update_slots self._should_update_slots = False try: yield finally: self._should_update_slots = old def _apply_dense(self, grad, var): acc = self.get_slot(var, "accumulator") return training_ops.apply_adagrad( var, acc, math_ops.cast(self._learning_rate_tensor, var.dtype.base_dtype), grad, use_locking=self._use_locking, update_slots=self._should_update_slots) def _resource_apply_dense(self, grad, var, update_slots=True): acc = self.get_slot(var, "accumulator") return training_ops.resource_apply_adagrad( var.handle, acc.handle, math_ops.cast(self._learning_rate_tensor, grad.dtype.base_dtype), grad, use_locking=self._use_locking, update_slots=self._should_update_slots) def _apply_sparse(self, grad, var, update_slots=True): acc = self.get_slot(var, "accumulator") return training_ops.sparse_apply_adagrad( var, acc, math_ops.cast(self._learning_rate_tensor, var.dtype.base_dtype), grad.values, grad.indices, use_locking=self._use_locking, update_slots=self._should_update_slots) def _resource_apply_sparse(self, grad, var, indices, update_slots=True): acc = self.get_slot(var, "accumulator") return training_ops.resource_sparse_apply_adagrad( var.handle, acc.handle, math_ops.cast(self._learning_rate_tensor, grad.dtype), grad, indices, use_locking=self._use_locking, update_slots=self._should_update_slots)
A collaborative ecosystem involving nature, musicians, visual artists and technology, the sonic elements of Primordial One began as an experiment exploring the subtle energies of a saltwater aquarium. Utilizing the IBVA interface to convert low voltage energy into midi data, Aerostatic routed those signals into software synthesizers, facilitating the bridge between nature and modern compositional technologies. Primordial One presents the result of this interaction. Primordial Two takes this exploration further, utilizing filters, envelopes and a variety of digital processors to remix the original composition extending the collaboration between nature, musicians and technology.
# -*- coding: utf-8 -*- # TODO: convert some methods to numeric attributes. from decimal import Decimal, getcontext getcontext().prec = 3 class Telescope(object): """ Simple class that represents user's abstract telescope. """ __count = 0 def __init__(self, aperture, focal_length, name=None, brand=None, model=None, **kwargs): """ Creates instance of the abstract telescope with minimal required params. At least focal_length and aperture params must be present. Name can be generated automatically in form 'Telescope #n' where n = number of created telescopes. `eyepieces` attribute is created for holding eyepieces for defined telescope. `additional_info` attribute can be used for holding user-defined data, for example mount or viewfinder type etc. :param focal_length: focal length of the telescope in millimeters :type focal_length: int :param aperture: aperture (diameter) of the telescope in millimeters :type aperture: int :param name: name of the telescope :type name: str :param brand: brand (merchant) of the telescope :type brand: str :param model: model name of the telescope :type model: str """ Telescope.__count += 1 if not name: name = 'Telescope #{}'.format(Telescope.__count) self.name = name self.brand = brand self.model = model self.focal_length = Decimal(focal_length) self.aperture = Decimal(aperture) self.eyepieces = {} self.additional_info = {} for key, value in kwargs.iteritems(): self.additional_info[key] = value def __repr__(self): return ('{0}({1}, {2}, {3}, {4}, {5})'.format(self.__class__.__name__, self.name, self.brand, self.model, self.focal_length, self.aperture)) def __str__(self): return 'class: {0}, name: {1}, brand: {2}, model: {3}, focal length: {4}, aperture: {5}'.format( self.__class__.__name__, self.name, self.brand, self.model, self.focal_length, self.aperture) def add_eyepiece(self, focal_length, name=None, brand=None, afov=None): """ Method that adds eyepiece representation into `self.eyepieces` attribute for further calculations. If `name` param is not passed, it will generate default name for the eyepiece in form 'Eyepiece #n`, where n = number of eyepieces + 1. :param focal_length: focal length of the eyepiece im millimeters :type focal_length: int :param name: name of the eyepiece, used as key for dict with eyepiece representation :type name: str :param brand: brand of the eyepiece :type brand: str :param afov: field of view of the eyepiece in degrees :type afov: int :Example: >>> import pprint >>> myscope = Telescope(name='My Scope', model='Super', brand='MegaScope', aperture=100, focal_length=1000) >>> myscope.add_eyepiece(focal_length=25, name='MyOcular', brand='SuperBrand', afov=50) >>> pprint.pprint(myscope.eyepieces) {'MyOcular': {'afov': 50, 'brand': 'SuperBrand', 'focal_length': 25}} >>> myscope.add_eyepiece(focal_length=10, brand='Custom', afov=50) >>> myscope.add_eyepiece(focal_length=20, brand='Custom', afov=50) >>> pprint.pprint(myscope.eyepieces) {'Eyepiece #2': {'afov': 50, 'brand': 'Custom', 'focal_length': 10}, 'Eyepiece #3': {'afov': 50, 'brand': 'Custom', 'focal_length': 20}, 'MyOcular': {'afov': 50, 'brand': 'SuperBrand', 'focal_length': 25}} """ if not name: name = 'Eyepiece #{}'.format(len(self.eyepieces) + 1) self.eyepieces[name] = {'focal_length': focal_length, 'brand': brand, 'afov': afov} def get_dawes_limit(self, numeric=False): """ Method that calculates theoretical Dawes limit for telescope. :param numeric: if set to True, result will be returned as numeric value :type numeric: bool :return: string or Decimal value in arc seconds :rtype: str or Decimal :Example: >>> myscope = Telescope(name='My Scope', model='Super', brand='MegaScope', aperture=100, focal_length=1000) >>> print(myscope.get_dawes_limit()) 1.16" >>> print(myscope.get_dawes_limit(numeric=True)) 1.16 """ resolution = Decimal(116) / self.aperture if numeric: return resolution return '{}"'.format(resolution) def get_rayleigh_criterion(self, numeric=False): """ Method that calculates theoretical Rayleigh criterion for telescope. :param numeric: if set to True, result will be returned as numeric value :type numeric: bool :return: string or Decimal value in arc seconds :rtype: str or Decimal :Example: >>> myscope = Telescope(name='My Scope', model='Super', brand='MegaScope', aperture=100, focal_length=1000) >>> print(myscope.get_rayleigh_criterion()) 1.38" >>> print(myscope.get_rayleigh_criterion(numeric=True)) 1.38 """ resolution = Decimal(138) / self.aperture if numeric: return resolution return '{}"'.format(resolution) def get_exit_pupil(self, eyepiece, numeric=False): """ Method that calculates exit pupil for combination of telescope and eyepiece. Eyepiece must be added via `add_eyepiece` method. :param eyepiece: name of the eyepiece from `self.eyepieces` attribute :type eyepiece: str :param numeric: if set to True, result will be returned as numeric value :type numeric: bool :return: string or Decimal with exit pupil value in millimeters :rtype: str or Decimal :Example: >>> myscope = Telescope(name='My Scope', model='Super', brand='MegaScope', aperture=100, focal_length=1000) >>> myscope.add_eyepiece(focal_length=25, name='MyOcular', afov=50) >>> print(myscope.get_exit_pupil(eyepiece='MyOcular')) exit pupil: 2.5mm >>> print(myscope.get_exit_pupil(eyepiece='MyOcular', numeric=True)) 2.5 """ exit_pupil = self.aperture / self.get_eyepiece_magnification(eyepiece, numeric=True) if numeric: return exit_pupil return 'exit pupil: {}mm'.format(exit_pupil) def get_eyepiece_magnification(self, eyepiece, numeric=False): """ Method that calculates magnification of the telescope combined with eyepiece. Eyepiece must be added via `add_eyepiece` method. :param eyepiece: name of the eyepiece from `self.eyepieces` attribute :type eyepiece: str :param numeric: if set to True, result will be returned as numeric value :type numeric: bool :return: string or Decimal with magnification value :rtype: str or Decimal :Example: >>> myscope = Telescope(name='My Scope', model='Super', brand='MegaScope', aperture=100, focal_length=1000) >>> myscope.add_eyepiece(focal_length=10, name='MyOcular') >>> print(myscope.get_eyepiece_magnification(eyepiece='MyOcular')) 100X >>> print(myscope.get_eyepiece_magnification(eyepiece='MyOcular', numeric=True)) 100 """ eyepiece_magnification = Decimal(self.focal_length / self.eyepieces[eyepiece]['focal_length']) if numeric: return eyepiece_magnification return '{}X'.format(eyepiece_magnification) def get_field_of_view(self, eyepiece, numeric=False): """ Method that calculates true field of view for combination of telescope and eyepiece. Eyepiece must be added via `add_eyepiece` method. :param eyepiece: name of the eyepiece from `self.eyepieces` attribute :type eyepiece: str :param numeric: if set to True, result will be returned as numeric value :type numeric: bool :return: string or Decimal with field of view value in degrees :rtype: str or Decimal :Example: >>> myscope = Telescope(name='My Scope', model='Super', brand='MegaScope', aperture=100, focal_length=1000) >>> myscope.add_eyepiece(focal_length=25, name='MyOcular', afov=50) >>> print(myscope.get_field_of_view(eyepiece='MyOcular')) FOV: 1.25 >>> print(myscope.get_field_of_view(eyepiece='MyOcular', numeric=True)) 1.25 """ magnification = self.get_eyepiece_magnification(eyepiece, numeric=True) fov = self.eyepieces[eyepiece]['afov'] / magnification if numeric: return fov return 'FOV: {}'.format(fov) def get_focal_ratio(self, numeric=False): """ Method that calculates focal ratio of the telescope. :param numeric: if set to True, result will be returned as numeric value :type numeric: bool :return: string or Decimal with f-number value :rtype: str or Decimal :Example: >>> myscope = Telescope(name='My Scope', model='Super', brand='MegaScope', aperture=100, focal_length=1000) >>> print(myscope.get_focal_ratio()) f/10 >>> print(myscope.get_focal_ratio(numeric=True)) 10 """ foc_ratio = self.focal_length / self.aperture if numeric: return foc_ratio return 'f/{}'.format(foc_ratio) def get_max_magnification(self, numeric=False): """ Method that calculates telescope's theoretical highest useful magnification. :param numeric: if set to True, result will be returned as numeric value :type numeric: bool :return: string or Decimal with maximum magnification value :rtype: str or Decimal :Example: >>> myscope = Telescope(name='My Scope', model='Super', brand='MegaScope', aperture=100, focal_length=1000) >>> print(myscope.get_max_magnification()) 200X >>> print(myscope.get_max_magnification(numeric=True)) 200 """ max_magnification = self.aperture * 2 if numeric: return max_magnification return '{}X'.format(max_magnification) def get_info(self): """ Method that calculates common specifications for the defined telescope. :return: calculated common specifications :rtype: dict :Example: >>> import pprint >>> myscope = Telescope( aperture=100, focal_length=1000, name='My Scope', model='Super',\ brand='MegaScope', mount='EQ2', viewfinder='Red Dot') >>> myscope.add_eyepiece(focal_length=25, name='MyOcular', brand='SuperBrand', afov=50) >>> pprint.pprint(myscope.get_info()) {'additional info': {'mount': 'EQ2', 'viewfinder': 'Red Dot'}, 'angular resolution (Dawes)': '1.16"', 'angular resolution (Rayleigh)': '1.38"', 'aperture': '100mm', 'brand': 'My Scope', 'eyepieces': {'MyOcular': {'afov': 50, 'brand': 'SuperBrand', 'focal_length': 25}}, 'focal length': '1000mm', 'focal ratio': 'f/10', 'max magnification': '200X', 'name': 'My Scope'} """ info = { 'name': self.name, 'brand': self.name, 'focal length': '{}mm'.format(self.focal_length), 'aperture': '{}mm'.format(self.aperture), 'max magnification': self.get_max_magnification(), 'focal ratio': self.get_focal_ratio(), 'angular resolution (Dawes)': self.get_dawes_limit(), 'angular resolution (Rayleigh)': self.get_rayleigh_criterion(), 'eyepieces': self.eyepieces, 'additional info': self.additional_info } return info
Tom Saintfiet’s contract as Scorpions coach ended on March 22 alongside Gambians’ hope of reaching her first African Nations Cup final. The Belgian has since retreated to his native Belgium after the last round of matches, as the Gambia Football Federation and the Ministry of Sports ponder on the future of Gambian football. The Standard was reliably informed that consultations were on going which are positively leaning towards a possible extension of his contract. Not surprisingly the Gambia Football Federation communication department yesterday came gun blazing, trumpeting into everybody’s ears that Tom is the best coach The Gambia ever got in a decade. “The Saint broke a very bad jinx when his team mauled Benin 3-1 in Banjul, ending our winless streak of more than half a decade. In the process, the 46-year-old equaled the Scorpions’ highest competitive scoring record in eight years. It is also under the reigns of the Belgian that The Gambia leaped up in the rakings as biggest movers in the world at the end of November 2018 according to FIFA. He is credited as the only Scorpions coach in the last ten years to have secured a positive goal difference at the end of a qualification campaign. The Saint is also the only Scorpions’ coach to have ever recorded a result in Algeria,” the GFF claimed in its website yesterday. However many Gambian fans disagreed. In a recent commentary, keen football commentator Buba Fallaboweh Jallow accused the coach of inconsistency and lack of logic in his choice of pick for the squad. Many others decried the absence of Steve Trawalley among others who are doing wonders in their teams but are not invited by Tom. They also accused the GFF of picking petty quarrels with players citing a recent allegation by Modou Barrow who claimed the GFF had asked his son to apologise, but for what he didn’t know. “Someone or some group is pulling the strings in that technical department and they have helped in making the coach fail. No success will be made even if we employ Sir Alex Fergusson, as long as the long hands and dictation of people behind the scenes are not stopped. We are very unhappy and we put the blame on the doorstep of the GFF. It bleeds our heart to watch minnows like Guinea Bissau and Mauritania play in CAN while we are confined to our TV screens years and years out,” said one Gambian fan, writing on Facebook. Nfally Colley inboxed The Standard to say all the good statistics and plaudits given to the coach by the GFF did not change the fact that we failed to qualify.
import json import time import tornado.web from pymongo import CursorType from sse import Sse from tornado.iostream import StreamClosedError class StreamHandler(tornado.web.RequestHandler): def initialize(self): self.set_header('Content-Type', 'text/event-stream') self.set_header('Cache-Control', 'no-cache') self.set_header('X-Accel-Buffering', 'no') self.sse = Sse() self.stream = True def on_connection_close(self): self.stream = False super().on_connection_close() async def publish(self, message=None): try: if message is not None: self.sse.add_message('message', message) for item in self.sse: self.write(item) await self.flush() except StreamClosedError: self.stream = False async def get(self): # Send retry option to client await self.publish() ts = time.time() - 120 # last 2 minutes collection = self.settings['db'].prices cursor = collection.find({'ts': {'$gt': ts}}, cursor_type=CursorType.TAILABLE_AWAIT) while self.stream: if not cursor.alive: cursor = collection.find({'ts': {'$gt': ts}}, cursor_type=CursorType.TAILABLE_AWAIT) if (await cursor.fetch_next): doc = cursor.next_object() doc.pop('_id') ts = doc['ts'] await self.publish(json.dumps(doc))
This book is designed for parents to read to their children or better yet for children to read to their parents. The story is sweet, short and has many important lessons. The main character is Tib, a super cute and smart little boy whose best friend is a red DINOSAUR (arrrr) named Tumtum. The other children in the village are very mean to Tib because he has a big red birthmark on his face and he's besties with a dinosaur, but Tib is better than that and doesn't let the haters get him down. In this book Tib sets out to show his village that people (or dinosaurs) shouldn't be judged by their appearance. Tib has such a great sense of humor and a bit of a wicked streak in him, not to mention he is about as honest as they come ("Aw, stop crying! That was a pretty bad idea, anyway."), little man can be brutal, LOL (yes, I was LOL'ing as I read). In this book, the village also sets out to learn about schools, because all children need to go to school and be taught by grown ups. The story is sweet it's creative and it is hilarious.