text_prompt
stringlengths 100
17.7k
⌀ | code_prompt
stringlengths 7
9.86k
⌀ |
---|---|
<SYSTEM_TASK:>
Deletes a user from the server.
<END_TASK>
<USER_TASK:>
Description:
def delete_user(self, username):
"""
Deletes a user from the server.
:param string username: Name of the user to delete from the server.
""" |
path = Client.urls['users_by_name'] % username
return self._call(path, 'DELETE') |
<SYSTEM_TASK:>
Redirects to the default wiki index name.
<END_TASK>
<USER_TASK:>
Description:
def index(request):
"""
Redirects to the default wiki index name.
""" |
kwargs = {'slug': getattr(settings, 'WAKAWAKA_DEFAULT_INDEX', 'WikiIndex')}
redirect_to = reverse('wakawaka_page', kwargs=kwargs)
return HttpResponseRedirect(redirect_to) |
<SYSTEM_TASK:>
Displays a wiki page. Redirects to the edit view if the page doesn't exist.
<END_TASK>
<USER_TASK:>
Description:
def page(
request,
slug,
rev_id=None,
template_name='wakawaka/page.html',
extra_context=None,
):
"""
Displays a wiki page. Redirects to the edit view if the page doesn't exist.
""" |
try:
queryset = WikiPage.objects.all()
page = queryset.get(slug=slug)
rev = page.current
# Display an older revision if rev_id is given
if rev_id:
revision_queryset = Revision.objects.all()
rev_specific = revision_queryset.get(pk=rev_id)
if rev.pk != rev_specific.pk:
rev_specific.is_not_current = True
rev = rev_specific
# The Page does not exist, redirect to the edit form or
# deny, if the user has no permission to add pages
except WikiPage.DoesNotExist:
if request.user.is_authenticated:
kwargs = {'slug': slug}
redirect_to = reverse('wakawaka_edit', kwargs=kwargs)
return HttpResponseRedirect(redirect_to)
raise Http404
template_context = {'page': page, 'rev': rev}
template_context.update(extra_context or {})
return render(request, template_name, template_context) |
<SYSTEM_TASK:>
Displays the form for editing and deleting a page.
<END_TASK>
<USER_TASK:>
Description:
def edit(
request,
slug,
rev_id=None,
template_name='wakawaka/edit.html',
extra_context=None,
wiki_page_form=WikiPageForm,
wiki_delete_form=DeleteWikiPageForm,
):
"""
Displays the form for editing and deleting a page.
""" |
# Get the page for slug and get a specific revision, if given
try:
queryset = WikiPage.objects.all()
page = queryset.get(slug=slug)
rev = page.current
initial = {'content': page.current.content}
# Do not allow editing wiki pages if the user has no permission
if not request.user.has_perms(
('wakawaka.change_wikipage', 'wakawaka.change_revision')
):
return HttpResponseForbidden(
ugettext('You don\'t have permission to edit pages.')
)
if rev_id:
# There is a specific revision, fetch this
rev_specific = Revision.objects.get(pk=rev_id)
if rev.pk != rev_specific.pk:
rev = rev_specific
rev.is_not_current = True
initial = {
'content': rev.content,
'message': _('Reverted to "%s"' % rev.message),
}
# This page does not exist, create a dummy page
# Note that it's not saved here
except WikiPage.DoesNotExist:
# Do not allow adding wiki pages if the user has no permission
if not request.user.has_perms(
('wakawaka.add_wikipage', 'wakawaka.add_revision')
):
return HttpResponseForbidden(
ugettext('You don\'t have permission to add wiki pages.')
)
page = WikiPage(slug=slug)
page.is_initial = True
rev = None
initial = {
'content': _('Describe your new page %s here...' % slug),
'message': _('Initial revision'),
}
# Don't display the delete form if the user has nor permission
delete_form = None
# The user has permission, then do
if request.user.has_perm(
'wakawaka.delete_wikipage'
) or request.user.has_perm('wakawaka.delete_revision'):
delete_form = wiki_delete_form(request)
if request.method == 'POST' and request.POST.get('delete'):
delete_form = wiki_delete_form(request, request.POST)
if delete_form.is_valid():
return delete_form.delete_wiki(request, page, rev)
# Page add/edit form
form = wiki_page_form(initial=initial)
if request.method == 'POST':
form = wiki_page_form(data=request.POST)
if form.is_valid():
# Check if the content is changed, except there is a rev_id and the
# user possibly only reverted the HEAD to it
if (
not rev_id
and initial['content'] == form.cleaned_data['content']
):
form.errors['content'] = (_('You have made no changes!'),)
# Save the form and redirect to the page view
else:
try:
# Check that the page already exist
queryset = WikiPage.objects.all()
page = queryset.get(slug=slug)
except WikiPage.DoesNotExist:
# Must be a new one, create that page
page = WikiPage(slug=slug)
page.save()
form.save(request, page)
kwargs = {'slug': page.slug}
redirect_to = reverse('wakawaka_page', kwargs=kwargs)
messages.success(
request,
ugettext('Your changes to %s were saved' % page.slug),
)
return HttpResponseRedirect(redirect_to)
template_context = {
'form': form,
'delete_form': delete_form,
'page': page,
'rev': rev,
}
template_context.update(extra_context or {})
return render(request, template_name, template_context) |
<SYSTEM_TASK:>
Displays the list of all revisions for a specific WikiPage
<END_TASK>
<USER_TASK:>
Description:
def revisions(
request, slug, template_name='wakawaka/revisions.html', extra_context=None
):
"""
Displays the list of all revisions for a specific WikiPage
""" |
queryset = WikiPage.objects.all()
page = get_object_or_404(queryset, slug=slug)
template_context = {'page': page}
template_context.update(extra_context or {})
return render(request, template_name, template_context) |
<SYSTEM_TASK:>
Displays the changes between two revisions.
<END_TASK>
<USER_TASK:>
Description:
def changes(
request, slug, template_name='wakawaka/changes.html', extra_context=None
):
"""
Displays the changes between two revisions.
""" |
rev_a_id = request.GET.get('a', None)
rev_b_id = request.GET.get('b', None)
# Some stinky fingers manipulated the url
if not rev_a_id or not rev_b_id:
return HttpResponseBadRequest('Bad Request')
try:
revision_queryset = Revision.objects.all()
wikipage_queryset = WikiPage.objects.all()
rev_a = revision_queryset.get(pk=rev_a_id)
rev_b = revision_queryset.get(pk=rev_b_id)
page = wikipage_queryset.get(slug=slug)
except ObjectDoesNotExist:
raise Http404
if rev_a.content != rev_b.content:
d = difflib.unified_diff(
rev_b.content.splitlines(),
rev_a.content.splitlines(),
'Original',
'Current',
lineterm='',
)
difftext = '\n'.join(d)
else:
difftext = _(u'No changes were made between this two files.')
template_context = {
'page': page,
'diff': difftext,
'rev_a': rev_a,
'rev_b': rev_b,
}
template_context.update(extra_context or {})
return render(request, template_name, template_context) |
<SYSTEM_TASK:>
Displays a list of all recent revisions.
<END_TASK>
<USER_TASK:>
Description:
def revision_list(
request, template_name='wakawaka/revision_list.html', extra_context=None
):
"""
Displays a list of all recent revisions.
""" |
revision_list = Revision.objects.all()
template_context = {'revision_list': revision_list}
template_context.update(extra_context or {})
return render(request, template_name, template_context) |
<SYSTEM_TASK:>
Deletes the page with all revisions or the revision, based on the
<END_TASK>
<USER_TASK:>
Description:
def delete_wiki(self, request, page, rev):
"""
Deletes the page with all revisions or the revision, based on the
users choice.
Returns a HttpResponseRedirect.
""" |
# Delete the page
if (
self.cleaned_data.get('delete') == 'page'
and request.user.has_perm('wakawaka.delete_revision')
and request.user.has_perm('wakawaka.delete_wikipage')
):
self._delete_page(page)
messages.success(
request, ugettext('The page %s was deleted' % page.slug)
)
return HttpResponseRedirect(reverse('wakawaka_index'))
# Revision handling
if self.cleaned_data.get('delete') == 'rev':
revision_length = len(page.revisions.all())
# Delete the revision if there are more than 1 and the user has permission
if revision_length > 1 and request.user.has_perm(
'wakawaka.delete_revision'
):
self._delete_revision(rev)
messages.success(
request,
ugettext('The revision for %s was deleted' % page.slug),
)
return HttpResponseRedirect(
reverse('wakawaka_page', kwargs={'slug': page.slug})
)
# Do not allow deleting the revision, if it's the only one and the user
# has no permisson to delete the page.
if revision_length <= 1 and not request.user.has_perm(
'wakawaka.delete_wikipage'
):
messages.error(
request,
ugettext(
'You can not delete this revison for %s because it\'s the '
'only one and you have no permission to delete the whole page.'
% page.slug
),
)
return HttpResponseRedirect(
reverse('wakawaka_page', kwargs={'slug': page.slug})
)
# Delete the page and the revision if the user has both permissions
if (
revision_length <= 1
and request.user.has_perm('wakawaka.delete_revision')
and request.user.has_perm('wakawaka.delete_wikipage')
):
self._delete_page(page)
messages.success(
request,
ugettext(
'The page for %s was deleted because you deleted the only revision'
% page.slug
),
)
return HttpResponseRedirect(reverse('wakawaka_index')) |
<SYSTEM_TASK:>
Decorator to check if Smappee's access token has expired.
<END_TASK>
<USER_TASK:>
Description:
def authenticated(func):
"""
Decorator to check if Smappee's access token has expired.
If it has, use the refresh token to request a new access token
""" |
@wraps(func)
def wrapper(*args, **kwargs):
self = args[0]
if self.refresh_token is not None and \
self.token_expiration_time <= dt.datetime.utcnow():
self.re_authenticate()
return func(*args, **kwargs)
return wrapper |
<SYSTEM_TASK:>
Join terms together with forward slashes
<END_TASK>
<USER_TASK:>
Description:
def urljoin(*parts):
"""
Join terms together with forward slashes
Parameters
----------
parts
Returns
-------
str
""" |
# first strip extra forward slashes (except http:// and the likes) and
# create list
part_list = []
for part in parts:
p = str(part)
if p.endswith('//'):
p = p[0:-1]
else:
p = p.strip('/')
part_list.append(p)
# join everything together
url = '/'.join(part_list)
return url |
<SYSTEM_TASK:>
Uses a Smappee username and password to request an access token,
<END_TASK>
<USER_TASK:>
Description:
def authenticate(self, username, password):
"""
Uses a Smappee username and password to request an access token,
refresh token and expiry date.
Parameters
----------
username : str
password : str
Returns
-------
requests.Response
access token is saved in self.access_token
refresh token is saved in self.refresh_token
expiration time is set in self.token_expiration_time as
datetime.datetime
""" |
url = URLS['token']
data = {
"grant_type": "password",
"client_id": self.client_id,
"client_secret": self.client_secret,
"username": username,
"password": password
}
r = requests.post(url, data=data)
r.raise_for_status()
j = r.json()
self.access_token = j['access_token']
self.refresh_token = j['refresh_token']
self._set_token_expiration_time(expires_in=j['expires_in'])
return r |
<SYSTEM_TASK:>
Request service location info
<END_TASK>
<USER_TASK:>
Description:
def get_service_location_info(self, service_location_id):
"""
Request service location info
Parameters
----------
service_location_id : int
Returns
-------
dict
""" |
url = urljoin(URLS['servicelocation'], service_location_id, "info")
headers = {"Authorization": "Bearer {}".format(self.access_token)}
r = requests.get(url, headers=headers)
r.raise_for_status()
return r.json() |
<SYSTEM_TASK:>
Request Elektricity consumption and Solar production
<END_TASK>
<USER_TASK:>
Description:
def get_consumption(self, service_location_id, start, end, aggregation, raw=False):
"""
Request Elektricity consumption and Solar production
for a given service location.
Parameters
----------
service_location_id : int
start : int | dt.datetime | pd.Timestamp
end : int | dt.datetime | pd.Timestamp
start and end support epoch (in milliseconds),
datetime and Pandas Timestamp
aggregation : int
1 = 5 min values (only available for the last 14 days)
2 = hourly values
3 = daily values
4 = monthly values
5 = quarterly values
raw : bool
default False
if True: Return the data "as is" from the server
if False: convert the 'alwaysOn' value to Wh.
(the server returns this value as the sum of the power,
measured in 5 minute blocks. This means that it is 12 times
higher than the consumption in Wh.
See https://github.com/EnergieID/smappy/issues/24)
Returns
-------
dict
""" |
url = urljoin(URLS['servicelocation'], service_location_id,
"consumption")
d = self._get_consumption(url=url, start=start, end=end,
aggregation=aggregation)
if not raw:
for block in d['consumptions']:
if 'alwaysOn' not in block.keys():
break
block.update({'alwaysOn': block['alwaysOn'] / 12})
return d |
<SYSTEM_TASK:>
Request consumption for a given sensor in a given service location
<END_TASK>
<USER_TASK:>
Description:
def get_sensor_consumption(self, service_location_id, sensor_id, start,
end, aggregation):
"""
Request consumption for a given sensor in a given service location
Parameters
----------
service_location_id : int
sensor_id : int
start : int | dt.datetime | pd.Timestamp
end : int | dt.datetime | pd.Timestamp
start and end support epoch (in milliseconds),
datetime and Pandas Timestamp
timezone-naive datetimes are assumed to be in UTC
aggregation : int
1 = 5 min values (only available for the last 14 days)
2 = hourly values
3 = daily values
4 = monthly values
5 = quarterly values
Returns
-------
dict
""" |
url = urljoin(URLS['servicelocation'], service_location_id, "sensor",
sensor_id, "consumption")
return self._get_consumption(url=url, start=start, end=end,
aggregation=aggregation) |
<SYSTEM_TASK:>
Request for both the get_consumption and
<END_TASK>
<USER_TASK:>
Description:
def _get_consumption(self, url, start, end, aggregation):
"""
Request for both the get_consumption and
get_sensor_consumption methods.
Parameters
----------
url : str
start : dt.datetime
end : dt.datetime
aggregation : int
Returns
-------
dict
""" |
start = self._to_milliseconds(start)
end = self._to_milliseconds(end)
headers = {"Authorization": "Bearer {}".format(self.access_token)}
params = {
"aggregation": aggregation,
"from": start,
"to": end
}
r = requests.get(url, headers=headers, params=params)
r.raise_for_status()
return r.json() |
<SYSTEM_TASK:>
Request events for a given appliance
<END_TASK>
<USER_TASK:>
Description:
def get_events(self, service_location_id, appliance_id, start, end,
max_number=None):
"""
Request events for a given appliance
Parameters
----------
service_location_id : int
appliance_id : int
start : int | dt.datetime | pd.Timestamp
end : int | dt.datetime | pd.Timestamp
start and end support epoch (in milliseconds),
datetime and Pandas Timestamp
timezone-naive datetimes are assumed to be in UTC
max_number : int, optional
The maximum number of events that should be returned by this query
Default returns all events in the selected period
Returns
-------
dict
""" |
start = self._to_milliseconds(start)
end = self._to_milliseconds(end)
url = urljoin(URLS['servicelocation'], service_location_id, "events")
headers = {"Authorization": "Bearer {}".format(self.access_token)}
params = {
"from": start,
"to": end,
"applianceId": appliance_id,
"maxNumber": max_number
}
r = requests.get(url, headers=headers, params=params)
r.raise_for_status()
return r.json() |
<SYSTEM_TASK:>
Turn actuator on
<END_TASK>
<USER_TASK:>
Description:
def actuator_on(self, service_location_id, actuator_id, duration=None):
"""
Turn actuator on
Parameters
----------
service_location_id : int
actuator_id : int
duration : int, optional
300,900,1800 or 3600 , specifying the time in seconds the actuator
should be turned on. Any other value results in turning on for an
undetermined period of time.
Returns
-------
requests.Response
""" |
return self._actuator_on_off(
on_off='on', service_location_id=service_location_id,
actuator_id=actuator_id, duration=duration) |
<SYSTEM_TASK:>
Turn actuator off
<END_TASK>
<USER_TASK:>
Description:
def actuator_off(self, service_location_id, actuator_id, duration=None):
"""
Turn actuator off
Parameters
----------
service_location_id : int
actuator_id : int
duration : int, optional
300,900,1800 or 3600 , specifying the time in seconds the actuator
should be turned on. Any other value results in turning on for an
undetermined period of time.
Returns
-------
requests.Response
""" |
return self._actuator_on_off(
on_off='off', service_location_id=service_location_id,
actuator_id=actuator_id, duration=duration) |
<SYSTEM_TASK:>
Turn actuator on or off
<END_TASK>
<USER_TASK:>
Description:
def _actuator_on_off(self, on_off, service_location_id, actuator_id,
duration=None):
"""
Turn actuator on or off
Parameters
----------
on_off : str
'on' or 'off'
service_location_id : int
actuator_id : int
duration : int, optional
300,900,1800 or 3600 , specifying the time in seconds the actuator
should be turned on. Any other value results in turning on for an
undetermined period of time.
Returns
-------
requests.Response
""" |
url = urljoin(URLS['servicelocation'], service_location_id,
"actuator", actuator_id, on_off)
headers = {"Authorization": "Bearer {}".format(self.access_token)}
if duration is not None:
data = {"duration": duration}
else:
data = {}
r = requests.post(url, headers=headers, json=data)
r.raise_for_status()
return r |
<SYSTEM_TASK:>
Converts a datetime-like object to epoch, in milliseconds
<END_TASK>
<USER_TASK:>
Description:
def _to_milliseconds(self, time):
"""
Converts a datetime-like object to epoch, in milliseconds
Timezone-naive datetime objects are assumed to be in UTC
Parameters
----------
time : dt.datetime | pd.Timestamp | int
Returns
-------
int
epoch milliseconds
""" |
if isinstance(time, dt.datetime):
if time.tzinfo is None:
time = time.replace(tzinfo=pytz.UTC)
return int(time.timestamp() * 1e3)
elif isinstance(time, numbers.Number):
return time
else:
raise NotImplementedError("Time format not supported. Use milliseconds since epoch,\
Datetime or Pandas Datetime") |
<SYSTEM_TASK:>
Because basically every post request is the same
<END_TASK>
<USER_TASK:>
Description:
def _basic_post(self, url, data=None):
"""
Because basically every post request is the same
Parameters
----------
url : str
data : str, optional
Returns
-------
requests.Response
""" |
_url = urljoin(self.base_url, url)
r = self.session.post(_url, data=data, headers=self.headers, timeout=5)
r.raise_for_status()
return r |
<SYSTEM_TASK:>
Takes the sum of all instantaneous active power values
<END_TASK>
<USER_TASK:>
Description:
def active_power(self):
"""
Takes the sum of all instantaneous active power values
Returns them in kWh
Returns
-------
float
""" |
inst = self.load_instantaneous()
values = [float(i['value']) for i in inst if i['key'].endswith('ActivePower')]
return sum(values) / 1000 |
<SYSTEM_TASK:>
Takes the average of all instantaneous cosfi values
<END_TASK>
<USER_TASK:>
Description:
def active_cosfi(self):
"""
Takes the average of all instantaneous cosfi values
Returns
-------
float
""" |
inst = self.load_instantaneous()
values = [float(i['value']) for i in inst if i['key'].endswith('Cosfi')]
return sum(values) / len(values) |
<SYSTEM_TASK:>
Produce similar fs, hs and ss interface and endpoints descriptors.
<END_TASK>
<USER_TASK:>
Description:
def getInterfaceInAllSpeeds(interface, endpoint_list, class_descriptor_list=()):
"""
Produce similar fs, hs and ss interface and endpoints descriptors.
Should be useful for devices desiring to work in all 3 speeds with maximum
endpoint wMaxPacketSize. Reduces data duplication from descriptor
declarations.
Not intended to cover fancy combinations.
interface (dict):
Keyword arguments for
getDescriptor(USBInterfaceDescriptor, ...)
in all speeds.
bNumEndpoints must not be provided.
endpoint_list (list of dicts)
Each dict represents an endpoint, and may contain the following items:
- "endpoint": required, contains keyword arguments for
getDescriptor(USBEndpointDescriptorNoAudio, ...)
or
getDescriptor(USBEndpointDescriptor, ...)
The with-audio variant is picked when its extra fields are assigned a
value.
wMaxPacketSize may be missing, in which case it will be set to the
maximum size for given speed and endpoint type.
bmAttributes must be provided.
If bEndpointAddress is zero (excluding direction bit) on the first
endpoint, endpoints will be assigned their rank in this list,
starting at 1. Their direction bit is preserved.
If bInterval is present on a INT or ISO endpoint, it must be in
millisecond units (but may not be an integer), and will be converted
to the nearest integer millisecond for full-speed descriptor, and
nearest possible interval for high- and super-speed descriptors.
If bInterval is present on a BULK endpoint, it is set to zero on
full-speed descriptor and used as provided on high- and super-speed
descriptors.
- "superspeed": optional, contains keyword arguments for
getDescriptor(USBSSEPCompDescriptor, ...)
- "superspeed_iso": optional, contains keyword arguments for
getDescriptor(USBSSPIsocEndpointDescriptor, ...)
Must be provided and non-empty only when endpoint is isochronous and
"superspeed" dict has "bmAttributes" bit 7 set.
class_descriptor (list of descriptors of any type)
Descriptors to insert in all speeds between the interface descriptor and
endpoint descriptors.
Returns a 3-tuple of lists:
- fs descriptors
- hs descriptors
- ss descriptors
""" |
interface = getDescriptor(
USBInterfaceDescriptor,
bNumEndpoints=len(endpoint_list),
**interface
)
class_descriptor_list = list(class_descriptor_list)
fs_list = [interface] + class_descriptor_list
hs_list = [interface] + class_descriptor_list
ss_list = [interface] + class_descriptor_list
need_address = (
endpoint_list[0]['endpoint'].get(
'bEndpointAddress',
0,
) & ~ch9.USB_DIR_IN == 0
)
for index, endpoint in enumerate(endpoint_list, 1):
endpoint_kw = endpoint['endpoint'].copy()
transfer_type = endpoint_kw[
'bmAttributes'
] & ch9.USB_ENDPOINT_XFERTYPE_MASK
fs_max, hs_max, ss_max = _MAX_PACKET_SIZE_DICT[transfer_type]
if need_address:
endpoint_kw['bEndpointAddress'] = index | (
endpoint_kw.get('bEndpointAddress', 0) & ch9.USB_DIR_IN
)
klass = (
USBEndpointDescriptor
if 'bRefresh' in endpoint_kw or 'bSynchAddress' in endpoint_kw else
USBEndpointDescriptorNoAudio
)
interval = endpoint_kw.pop('bInterval', _MARKER)
if interval is _MARKER:
fs_interval = hs_interval = 0
else:
if transfer_type == ch9.USB_ENDPOINT_XFER_BULK:
fs_interval = 0
hs_interval = interval
else: # USB_ENDPOINT_XFER_ISOC or USB_ENDPOINT_XFER_INT
fs_interval = max(1, min(255, round(interval)))
# 8 is the number of microframes in a millisecond
hs_interval = max(
1,
min(16, int(round(1 + math.log(interval * 8, 2)))),
)
packet_size = endpoint_kw.pop('wMaxPacketSize', _MARKER)
if packet_size is _MARKER:
fs_packet_size = fs_max
hs_packet_size = hs_max
ss_packet_size = ss_max
else:
fs_packet_size = min(fs_max, packet_size)
hs_packet_size = min(hs_max, packet_size)
ss_packet_size = min(ss_max, packet_size)
fs_list.append(getDescriptor(
klass,
wMaxPacketSize=fs_max,
bInterval=fs_interval,
**endpoint_kw
))
hs_list.append(getDescriptor(
klass,
wMaxPacketSize=hs_max,
bInterval=hs_interval,
**endpoint_kw
))
ss_list.append(getDescriptor(
klass,
wMaxPacketSize=ss_max,
bInterval=hs_interval,
**endpoint_kw
))
ss_companion_kw = endpoint.get('superspeed', _EMPTY_DICT)
ss_list.append(getDescriptor(
USBSSEPCompDescriptor,
**ss_companion_kw
))
ssp_iso_kw = endpoint.get('superspeed_iso', _EMPTY_DICT)
if bool(ssp_iso_kw) != (
endpoint_kw.get('bmAttributes', 0) &
ch9.USB_ENDPOINT_XFERTYPE_MASK ==
ch9.USB_ENDPOINT_XFER_ISOC and
bool(ch9.USB_SS_SSP_ISOC_COMP(
ss_companion_kw.get('bmAttributes', 0),
))
):
raise ValueError('Inconsistent isochronous companion')
if ssp_iso_kw:
ss_list.append(getDescriptor(
USBSSPIsocEndpointDescriptor,
**ssp_iso_kw
))
return (fs_list, hs_list, ss_list) |
<SYSTEM_TASK:>
Automatically fills bLength and bDescriptorType.
<END_TASK>
<USER_TASK:>
Description:
def getDescriptor(klass, **kw):
"""
Automatically fills bLength and bDescriptorType.
""" |
# XXX: ctypes Structure.__init__ ignores arguments which do not exist
# as structure fields. So check it.
# This is annoying, but not doing it is a huge waste of time for the
# developer.
empty = klass()
assert hasattr(empty, 'bLength')
assert hasattr(empty, 'bDescriptorType')
unknown = [x for x in kw if not hasattr(empty, x)]
if unknown:
raise TypeError('Unknown fields %r' % (unknown, ))
# XXX: not very pythonic...
return klass(
bLength=ctypes.sizeof(klass),
# pylint: disable=protected-access
bDescriptorType=klass._bDescriptorType,
# pylint: enable=protected-access
**kw
) |
<SYSTEM_TASK:>
Returns the host-visible interface number, or None if there is no such
<END_TASK>
<USER_TASK:>
Description:
def getRealInterfaceNumber(self, interface):
"""
Returns the host-visible interface number, or None if there is no such
interface.
""" |
try:
return self._ioctl(INTERFACE_REVMAP, interface)
except IOError as exc:
if exc.errno == errno.EDOM:
return None
raise |
<SYSTEM_TASK:>
Close all endpoint file descriptors.
<END_TASK>
<USER_TASK:>
Description:
def close(self):
"""
Close all endpoint file descriptors.
""" |
ep_list = self._ep_list
while ep_list:
ep_list.pop().close()
self._closed = True |
<SYSTEM_TASK:>
Called when a setup USB transaction was received.
<END_TASK>
<USER_TASK:>
Description:
def onSetup(self, request_type, request, value, index, length):
"""
Called when a setup USB transaction was received.
Default implementation:
- handles USB_REQ_GET_STATUS on interface and endpoints
- handles USB_REQ_CLEAR_FEATURE(USB_ENDPOINT_HALT) on endpoints
- handles USB_REQ_SET_FEATURE(USB_ENDPOINT_HALT) on endpoints
- halts on everything else
If this method raises anything, endpoint 0 is halted by its caller and
exception is let through.
May be overridden in subclass.
""" |
if (request_type & ch9.USB_TYPE_MASK) == ch9.USB_TYPE_STANDARD:
recipient = request_type & ch9.USB_RECIP_MASK
is_in = (request_type & ch9.USB_DIR_IN) == ch9.USB_DIR_IN
if request == ch9.USB_REQ_GET_STATUS:
if is_in and length == 2:
if recipient == ch9.USB_RECIP_INTERFACE:
if value == 0:
status = 0
if index == 0:
if self.function_remote_wakeup_capable:
status |= 1 << 0
if self.function_remote_wakeup:
status |= 1 << 1
self.ep0.write(struct.pack('<H', status)[:length])
return
elif recipient == ch9.USB_RECIP_ENDPOINT:
if value == 0:
try:
endpoint = self.getEndpoint(index)
except IndexError:
pass
else:
status = 0
if endpoint.isHalted():
status |= 1 << 0
self.ep0.write(
struct.pack('<H', status)[:length],
)
return
elif request == ch9.USB_REQ_CLEAR_FEATURE:
if not is_in and length == 0:
if recipient == ch9.USB_RECIP_ENDPOINT:
if value == ch9.USB_ENDPOINT_HALT:
try:
endpoint = self.getEndpoint(index)
except IndexError:
pass
else:
endpoint.clearHalt()
self.ep0.read(0)
return
elif recipient == ch9.USB_RECIP_INTERFACE:
if value == ch9.USB_INTRF_FUNC_SUSPEND:
if self.function_remote_wakeup_capable:
self.disableRemoteWakeup()
self.ep0.read(0)
return
elif request == ch9.USB_REQ_SET_FEATURE:
if not is_in and length == 0:
if recipient == ch9.USB_RECIP_ENDPOINT:
if value == ch9.USB_ENDPOINT_HALT:
try:
endpoint = self.getEndpoint(index)
except IndexError:
pass
else:
endpoint.halt()
self.ep0.read(0)
return
elif recipient == ch9.USB_RECIP_INTERFACE:
if value == ch9.USB_INTRF_FUNC_SUSPEND:
if self.function_remote_wakeup_capable:
self.enableRemoteWakeup()
self.ep0.read(0)
return
self.ep0.halt(request_type) |
<SYSTEM_TASK:>
The configuration containing this function has been enabled by host.
<END_TASK>
<USER_TASK:>
Description:
def onEnable(self):
"""
The configuration containing this function has been enabled by host.
Endpoints become working files, so submit some read operations.
""" |
trace('onEnable')
self._disable()
self._aio_context.submit(self._aio_recv_block_list)
self._real_onCanSend()
self._enabled = True |
<SYSTEM_TASK:>
The configuration containing this function has been disabled by host.
<END_TASK>
<USER_TASK:>
Description:
def _disable(self):
"""
The configuration containing this function has been disabled by host.
Endpoint do not work anymore, so cancel AIO operation blocks.
""" |
if self._enabled:
self._real_onCannotSend()
has_cancelled = 0
for block in self._aio_recv_block_list + self._aio_send_block_list:
try:
self._aio_context.cancel(block)
except OSError as exc:
trace(
'cancelling %r raised: %s' % (block, exc),
)
else:
has_cancelled += 1
if has_cancelled:
noIntr(functools.partial(self._aio_context.getEvents, min_nr=None))
self._enabled = False |
<SYSTEM_TASK:>
Call when eventfd notified events are available.
<END_TASK>
<USER_TASK:>
Description:
def onAIOCompletion(self):
"""
Call when eventfd notified events are available.
""" |
event_count = self.eventfd.read()
trace('eventfd reports %i events' % event_count)
# Even though eventfd signaled activity, even though it may give us
# some number of pending events, some events seem to have been already
# processed (maybe during io_cancel call ?).
# So do not trust eventfd value, and do not even trust that there must
# be even one event to process.
self._aio_context.getEvents(0) |
<SYSTEM_TASK:>
Perform additional validation not possible merely with JSON schemas.
<END_TASK>
<USER_TASK:>
Description:
def _iter_errors_custom(instance, checks, options):
"""Perform additional validation not possible merely with JSON schemas.
Args:
instance: The STIX object to be validated.
checks: A sequence of callables which do the checks. Each callable
may be written to accept 1 arg, which is the object to check,
or 2 args, which are the object and a ValidationOptions instance.
options: ValidationOptions instance with settings affecting how
validation should be done.
""" |
# Perform validation
for v_function in checks:
try:
result = v_function(instance)
except TypeError:
result = v_function(instance, options)
if isinstance(result, Iterable):
for x in result:
yield x
elif result is not None:
yield result
# Validate any child STIX objects
for field in instance:
if type(instance[field]) is list:
for obj in instance[field]:
if _is_stix_obj(obj):
for err in _iter_errors_custom(obj, checks, options):
yield err |
<SYSTEM_TASK:>
Return a list of file paths for JSON files within `directory`.
<END_TASK>
<USER_TASK:>
Description:
def list_json_files(directory, recursive=False):
"""Return a list of file paths for JSON files within `directory`.
Args:
directory: A path to a directory.
recursive: If ``True``, this function will descend into all
subdirectories.
Returns:
A list of JSON file paths directly under `directory`.
""" |
json_files = []
for top, dirs, files in os.walk(directory):
dirs.sort()
# Get paths to each file in `files`
paths = (os.path.join(top, f) for f in sorted(files))
# Add all the .json files to our return collection
json_files.extend(x for x in paths if is_json(x))
if not recursive:
break
return json_files |
<SYSTEM_TASK:>
Return a list of files to validate from `files`. If a member of `files`
<END_TASK>
<USER_TASK:>
Description:
def get_json_files(files, recursive=False):
"""Return a list of files to validate from `files`. If a member of `files`
is a directory, its children with a ``.json`` extension will be added to
the return value.
Args:
files: A list of file paths and/or directory paths.
recursive: If ``true``, this will descend into any subdirectories
of input directories.
Returns:
A list of file paths to validate.
""" |
json_files = []
if not files:
return json_files
for fn in files:
if os.path.isdir(fn):
children = list_json_files(fn, recursive)
json_files.extend(children)
elif is_json(fn):
json_files.append(fn)
else:
continue
if not json_files:
raise NoJSONFileFoundError("No JSON files found!")
return json_files |
<SYSTEM_TASK:>
Validate files based on command line options.
<END_TASK>
<USER_TASK:>
Description:
def run_validation(options):
"""Validate files based on command line options.
Args:
options: An instance of ``ValidationOptions`` containing options for
this validation run.
""" |
if options.files == sys.stdin:
results = validate(options.files, options)
return [FileValidationResults(is_valid=results.is_valid,
filepath='stdin',
object_results=results)]
files = get_json_files(options.files, options.recursive)
results = [validate_file(fn, options) for fn in files]
return results |
<SYSTEM_TASK:>
Validate objects from parsed JSON. This supports a single object, or a
<END_TASK>
<USER_TASK:>
Description:
def validate_parsed_json(obj_json, options=None):
"""
Validate objects from parsed JSON. This supports a single object, or a
list of objects. If a single object is given, a single result is
returned. Otherwise, a list of results is returned.
If an error occurs, a ValidationErrorResults instance or list which
includes one of these instances, is returned.
:param obj_json: The parsed json
:param options: Validation options
:return: An ObjectValidationResults instance, or a list of such.
""" |
validating_list = isinstance(obj_json, list)
if not options:
options = ValidationOptions()
if not options.no_cache:
init_requests_cache(options.refresh_cache)
results = None
if validating_list:
results = []
for obj in obj_json:
try:
results.append(validate_instance(obj, options))
except SchemaInvalidError as ex:
error_result = ObjectValidationResults(is_valid=False,
object_id=obj.get('id', ''),
errors=[str(ex)])
results.append(error_result)
else:
try:
results = validate_instance(obj_json, options)
except SchemaInvalidError as ex:
error_result = ObjectValidationResults(is_valid=False,
object_id=obj_json.get('id', ''),
errors=[str(ex)])
results = error_result
if not options.no_cache and options.clear_cache:
clear_requests_cache()
return results |
<SYSTEM_TASK:>
Validate objects from JSON data in a textual stream.
<END_TASK>
<USER_TASK:>
Description:
def validate(in_, options=None):
"""
Validate objects from JSON data in a textual stream.
:param in_: A textual stream of JSON data.
:param options: Validation options
:return: An ObjectValidationResults instance, or a list of such.
""" |
obj_json = json.load(in_)
results = validate_parsed_json(obj_json, options)
return results |
<SYSTEM_TASK:>
Validate the input document `fn` according to the options passed in.
<END_TASK>
<USER_TASK:>
Description:
def validate_file(fn, options=None):
"""Validate the input document `fn` according to the options passed in.
If any exceptions are raised during validation, no further validation
will take place.
Args:
fn: The filename of the JSON file to be validated.
options: An instance of ``ValidationOptions``.
Returns:
An instance of FileValidationResults.
""" |
file_results = FileValidationResults(filepath=fn)
output.info("Performing JSON schema validation on %s" % fn)
if not options:
options = ValidationOptions(files=fn)
try:
with open(fn) as instance_file:
file_results.object_results = validate(instance_file, options)
except Exception as ex:
if 'Expecting value' in str(ex):
line_no = str(ex).split()[3]
file_results.fatal = ValidationErrorResults(
'Invalid JSON input on line %s' % line_no
)
else:
file_results.fatal = ValidationErrorResults(ex)
msg = ("Unexpected error occurred with file '{fn}'. No further "
"validation will be performed: {error}")
output.info(msg.format(fn=fn, error=str(ex)))
file_results.is_valid = (all(object_result.is_valid
for object_result in file_results.object_results)
and not file_results.fatal)
return file_results |
<SYSTEM_TASK:>
Validate the input `string` according to the options passed in.
<END_TASK>
<USER_TASK:>
Description:
def validate_string(string, options=None):
"""Validate the input `string` according to the options passed in.
If any exceptions are raised during validation, no further validation
will take place.
Args:
string: The string containing the JSON to be validated.
options: An instance of ``ValidationOptions``.
Returns:
An ObjectValidationResults instance, or a list of such.
""" |
output.info("Performing JSON schema validation on input string: " + string)
stream = io.StringIO(string)
return validate(stream, options) |
<SYSTEM_TASK:>
Create a JSON schema validator for the given schema.
<END_TASK>
<USER_TASK:>
Description:
def load_validator(schema_path, schema):
"""Create a JSON schema validator for the given schema.
Args:
schema_path: The filename of the JSON schema.
schema: A Python object representation of the same schema.
Returns:
An instance of Draft4Validator.
""" |
# Get correct prefix based on OS
if os.name == 'nt':
file_prefix = 'file:///'
else:
file_prefix = 'file:'
resolver = RefResolver(file_prefix + schema_path.replace("\\", "/"), schema)
validator = Draft4Validator(schema, resolver=resolver)
return validator |
<SYSTEM_TASK:>
Search the `schema_dir` directory for a schema called `obj_type`.json.
<END_TASK>
<USER_TASK:>
Description:
def find_schema(schema_dir, obj_type):
"""Search the `schema_dir` directory for a schema called `obj_type`.json.
Return the file path of the first match it finds.
""" |
schema_filename = obj_type + '.json'
for root, dirnames, filenames in os.walk(schema_dir):
if schema_filename in filenames:
return os.path.join(root, schema_filename) |
<SYSTEM_TASK:>
Load the JSON schema at the given path as a Python object.
<END_TASK>
<USER_TASK:>
Description:
def load_schema(schema_path):
"""Load the JSON schema at the given path as a Python object.
Args:
schema_path: A filename for a JSON schema.
Returns:
A Python object representation of the schema.
""" |
try:
with open(schema_path) as schema_file:
schema = json.load(schema_file)
except ValueError as e:
raise SchemaInvalidError('Invalid JSON in schema or included schema: '
'%s\n%s' % (schema_file.name, str(e)))
return schema |
<SYSTEM_TASK:>
Get a generator for validating against the schema for the given object type.
<END_TASK>
<USER_TASK:>
Description:
def _get_error_generator(type, obj, schema_dir=None, version=DEFAULT_VER, default='core'):
"""Get a generator for validating against the schema for the given object type.
Args:
type (str): The object type to find the schema for.
obj: The object to be validated.
schema_dir (str): The path in which to search for schemas.
version (str): The version of the STIX specification to validate
against. Only used to find base schemas when schema_dir is None.
default (str): If the schema for the given type cannot be found, use
the one with this name instead.
Returns:
A generator for errors found when validating the object against the
appropriate schema, or None if schema_dir is None and the schema
cannot be found.
""" |
# If no schema directory given, use default for the given STIX version,
# which comes bundled with this package
if schema_dir is None:
schema_dir = os.path.abspath(os.path.dirname(__file__) + '/schemas-'
+ version + '/')
try:
schema_path = find_schema(schema_dir, type)
schema = load_schema(schema_path)
except (KeyError, TypeError):
# Assume a custom object with no schema
try:
schema_path = find_schema(schema_dir, default)
schema = load_schema(schema_path)
except (KeyError, TypeError):
# Only raise an error when checking against default schemas, not custom
if schema_dir is not None:
return None
raise SchemaInvalidError("Cannot locate a schema for the object's "
"type, nor the base schema ({}.json).".format(default))
if type == 'observed-data' and schema_dir is None:
# Validate against schemas for specific observed data object types later.
# If schema_dir is not None the schema is custom and won't need to be modified.
schema['allOf'][1]['properties']['objects'] = {
"objects": {
"type": "object",
"minProperties": 1
}
}
# Don't use custom validator; only check schemas, no additional checks
validator = load_validator(schema_path, schema)
try:
error_gen = validator.iter_errors(obj)
except schema_exceptions.RefResolutionError:
raise SchemaInvalidError('Invalid JSON schema: a JSON '
'reference failed to resolve')
return error_gen |
<SYSTEM_TASK:>
Return the list of 'MUST' validators for the correct version of STIX.
<END_TASK>
<USER_TASK:>
Description:
def _get_musts(options):
"""Return the list of 'MUST' validators for the correct version of STIX.
Args:
options: ValidationOptions instance with validation options for this
validation run, including the STIX spec version.
""" |
if options.version == '2.0':
return musts20.list_musts(options)
else:
return musts21.list_musts(options) |
<SYSTEM_TASK:>
Return the list of 'SHOULD' validators for the correct version of STIX.
<END_TASK>
<USER_TASK:>
Description:
def _get_shoulds(options):
"""Return the list of 'SHOULD' validators for the correct version of STIX.
Args:
options: ValidationOptions instance with validation options for this
validation run, including the STIX spec version.
""" |
if options.version == '2.0':
return shoulds20.list_shoulds(options)
else:
return shoulds21.list_shoulds(options) |
<SYSTEM_TASK:>
Set up validation of a single STIX object against its type's schema.
<END_TASK>
<USER_TASK:>
Description:
def _schema_validate(sdo, options):
"""Set up validation of a single STIX object against its type's schema.
This does no actual validation; it just returns generators which must be
iterated to trigger the actual generation.
This function first creates generators for the built-in schemas, then adds
generators for additional schemas from the options, if specified.
Do not call this function directly; use validate_instance() instead, as it
calls this one. This function does not perform any custom checks.
""" |
error_gens = []
if 'id' in sdo:
try:
error_prefix = sdo['id'] + ": "
except TypeError:
error_prefix = 'unidentifiable object: '
else:
error_prefix = ''
# Get validator for built-in schema
base_sdo_errors = _get_error_generator(sdo['type'], sdo, version=options.version)
if base_sdo_errors:
error_gens.append((base_sdo_errors, error_prefix))
# Get validator for any user-supplied schema
if options.schema_dir:
custom_sdo_errors = _get_error_generator(sdo['type'], sdo, options.schema_dir)
if custom_sdo_errors:
error_gens.append((custom_sdo_errors, error_prefix))
# Validate each cyber observable object separately
if sdo['type'] == 'observed-data' and 'objects' in sdo:
# Check if observed data property is in dictionary format
if not isinstance(sdo['objects'], dict):
error_gens.append(([schema_exceptions.ValidationError("Observed Data objects must be in dict format.", error_prefix)],
error_prefix))
return error_gens
for key, obj in iteritems(sdo['objects']):
if 'type' not in obj:
error_gens.append(([schema_exceptions.ValidationError("Observable object must contain a 'type' property.", error_prefix)],
error_prefix + 'object \'' + key + '\': '))
continue
# Get validator for built-in schemas
base_obs_errors = _get_error_generator(obj['type'],
obj,
None,
options.version,
'cyber-observable-core')
if base_obs_errors:
error_gens.append((base_obs_errors,
error_prefix + 'object \'' + key + '\': '))
# Get validator for any user-supplied schema
custom_obs_errors = _get_error_generator(obj['type'],
obj,
options.schema_dir,
options.version,
'cyber-observable-core')
if custom_obs_errors:
error_gens.append((custom_obs_errors,
error_prefix + 'object \'' + key + '\': '))
return error_gens |
<SYSTEM_TASK:>
Perform STIX JSON Schema validation against STIX input.
<END_TASK>
<USER_TASK:>
Description:
def validate_instance(instance, options=None):
"""Perform STIX JSON Schema validation against STIX input.
Find the correct schema by looking at the 'type' property of the
`instance` JSON object.
Args:
instance: A Python dictionary representing a STIX object with a
'type' property.
options: ValidationOptions instance with validation options for this
validation run.
Returns:
A dictionary of validation results
""" |
if 'type' not in instance:
raise ValidationError("Input must be an object with a 'type' property.")
if not options:
options = ValidationOptions()
error_gens = []
# Schema validation
if instance['type'] == 'bundle' and 'objects' in instance:
# Validate each object in a bundle separately
for sdo in instance['objects']:
if 'type' not in sdo:
raise ValidationError("Each object in bundle must have a 'type' property.")
error_gens += _schema_validate(sdo, options)
else:
error_gens += _schema_validate(instance, options)
# Custom validation
must_checks = _get_musts(options)
should_checks = _get_shoulds(options)
output.info("Running the following additional checks: %s."
% ", ".join(x.__name__ for x in chain(must_checks, should_checks)))
try:
errors = _iter_errors_custom(instance, must_checks, options)
warnings = _iter_errors_custom(instance, should_checks, options)
if options.strict:
chained_errors = chain(errors, warnings)
warnings = []
else:
chained_errors = errors
warnings = [pretty_error(x, options.verbose) for x in warnings]
except schema_exceptions.RefResolutionError:
raise SchemaInvalidError('Invalid JSON schema: a JSON reference '
'failed to resolve')
# List of error generators and message prefixes (to denote which object the
# error comes from)
error_gens += [(chained_errors, '')]
# Prepare the list of errors (this actually triggers the custom validation
# functions).
error_list = []
for gen, prefix in error_gens:
for error in gen:
msg = prefix + pretty_error(error, options.verbose)
error_list.append(SchemaError(msg))
if error_list:
valid = False
else:
valid = True
return ObjectValidationResults(is_valid=valid, object_id=instance.get('id', ''),
errors=error_list, warnings=warnings) |
<SYSTEM_TASK:>
Ensure custom content follows strict naming style conventions.
<END_TASK>
<USER_TASK:>
Description:
def custom_prefix_strict(instance):
"""Ensure custom content follows strict naming style conventions.
""" |
for error in chain(custom_object_prefix_strict(instance),
custom_property_prefix_strict(instance),
custom_observable_object_prefix_strict(instance),
custom_object_extension_prefix_strict(instance),
custom_observable_properties_prefix_strict(instance)):
yield error |
<SYSTEM_TASK:>
Ensure custom content follows lenient naming style conventions
<END_TASK>
<USER_TASK:>
Description:
def custom_prefix_lax(instance):
"""Ensure custom content follows lenient naming style conventions
for forward-compatibility.
""" |
for error in chain(custom_object_prefix_lax(instance),
custom_property_prefix_lax(instance),
custom_observable_object_prefix_lax(instance),
custom_object_extension_prefix_lax(instance),
custom_observable_properties_prefix_lax(instance)):
yield error |
<SYSTEM_TASK:>
Ensure custom objects follow strict naming style conventions.
<END_TASK>
<USER_TASK:>
Description:
def custom_object_prefix_strict(instance):
"""Ensure custom objects follow strict naming style conventions.
""" |
if (instance['type'] not in enums.TYPES and
instance['type'] not in enums.RESERVED_OBJECTS and
not CUSTOM_TYPE_PREFIX_RE.match(instance['type'])):
yield JSONError("Custom object type '%s' should start with 'x-' "
"followed by a source unique identifier (like a "
"domain name with dots replaced by hyphens), a hyphen "
"and then the name." % instance['type'],
instance['id'], 'custom-prefix') |
<SYSTEM_TASK:>
Ensure custom objects follow lenient naming style conventions
<END_TASK>
<USER_TASK:>
Description:
def custom_object_prefix_lax(instance):
"""Ensure custom objects follow lenient naming style conventions
for forward-compatibility.
""" |
if (instance['type'] not in enums.TYPES and
instance['type'] not in enums.RESERVED_OBJECTS and
not CUSTOM_TYPE_LAX_PREFIX_RE.match(instance['type'])):
yield JSONError("Custom object type '%s' should start with 'x-' in "
"order to be compatible with future versions of the "
"STIX 2 specification." % instance['type'],
instance['id'], 'custom-prefix-lax') |
<SYSTEM_TASK:>
Ensure custom properties follow strict naming style conventions.
<END_TASK>
<USER_TASK:>
Description:
def custom_property_prefix_strict(instance):
"""Ensure custom properties follow strict naming style conventions.
Does not check property names in custom objects.
""" |
for prop_name in instance.keys():
if (instance['type'] in enums.PROPERTIES and
prop_name not in enums.PROPERTIES[instance['type']] and
prop_name not in enums.RESERVED_PROPERTIES and
not CUSTOM_PROPERTY_PREFIX_RE.match(prop_name)):
yield JSONError("Custom property '%s' should have a type that "
"starts with 'x_' followed by a source unique "
"identifier (like a domain name with dots "
"replaced by hyphen), a hyphen and then the name."
% prop_name, instance['id'],
'custom-prefix') |
<SYSTEM_TASK:>
Ensure custom properties follow lenient naming style conventions
<END_TASK>
<USER_TASK:>
Description:
def custom_property_prefix_lax(instance):
"""Ensure custom properties follow lenient naming style conventions
for forward-compatibility.
Does not check property names in custom objects.
""" |
for prop_name in instance.keys():
if (instance['type'] in enums.PROPERTIES and
prop_name not in enums.PROPERTIES[instance['type']] and
prop_name not in enums.RESERVED_PROPERTIES and
not CUSTOM_PROPERTY_LAX_PREFIX_RE.match(prop_name)):
yield JSONError("Custom property '%s' should have a type that "
"starts with 'x_' in order to be compatible with "
"future versions of the STIX 2 specification." %
prop_name, instance['id'],
'custom-prefix-lax') |
<SYSTEM_TASK:>
Ensure that the values of all properties which use open vocabularies are
<END_TASK>
<USER_TASK:>
Description:
def open_vocab_values(instance):
"""Ensure that the values of all properties which use open vocabularies are
in lowercase and use hyphens instead of spaces or underscores as word
separators.
""" |
if instance['type'] not in enums.VOCAB_PROPERTIES:
return
properties = enums.VOCAB_PROPERTIES[instance['type']]
for prop in properties:
if prop in instance:
if type(instance[prop]) is list:
values = instance[prop]
else:
values = [instance[prop]]
for v in values:
if not v.islower() or '_' in v or ' ' in v:
yield JSONError("Open vocabulary value '%s' should be all"
" lowercase and use hyphens instead of"
" spaces or underscores as word"
" separators." % v, instance['id'],
'open-vocab-format') |
<SYSTEM_TASK:>
Ensure the `kill_chain_name` and `phase_name` properties of
<END_TASK>
<USER_TASK:>
Description:
def kill_chain_phase_names(instance):
"""Ensure the `kill_chain_name` and `phase_name` properties of
`kill_chain_phase` objects follow naming style conventions.
""" |
if instance['type'] in enums.KILL_CHAIN_PHASE_USES and 'kill_chain_phases' in instance:
for phase in instance['kill_chain_phases']:
if 'kill_chain_name' not in phase:
# Since this field is required, schemas will already catch the error
return
chain_name = phase['kill_chain_name']
if not chain_name.islower() or '_' in chain_name or ' ' in chain_name:
yield JSONError("kill_chain_name '%s' should be all lowercase"
" and use hyphens instead of spaces or "
"underscores as word separators." % chain_name,
instance['id'], 'kill-chain-names')
phase_name = phase['phase_name']
if not phase_name.islower() or '_' in phase_name or ' ' in phase_name:
yield JSONError("phase_name '%s' should be all lowercase and "
"use hyphens instead of spaces or underscores "
"as word separators." % phase_name,
instance['id'], 'kill-chain-names') |
<SYSTEM_TASK:>
Ensure that the open vocabulary specified by `vocab` is used properly.
<END_TASK>
<USER_TASK:>
Description:
def check_vocab(instance, vocab, code):
"""Ensure that the open vocabulary specified by `vocab` is used properly.
This checks properties of objects specified in the appropriate `_USES`
dictionary to determine which properties SHOULD use the given vocabulary,
then checks that the values in those properties are from the vocabulary.
""" |
vocab_uses = getattr(enums, vocab + "_USES")
for k in vocab_uses.keys():
if instance['type'] == k:
for prop in vocab_uses[k]:
if prop not in instance:
continue
vocab_ov = getattr(enums, vocab + "_OV")
if type(instance[prop]) is list:
is_in = set(instance[prop]).issubset(set(vocab_ov))
else:
is_in = instance[prop] in vocab_ov
if not is_in:
vocab_name = vocab.replace('_', '-').lower()
yield JSONError("%s contains a value not in the %s-ov "
"vocabulary." % (prop, vocab_name),
instance['id'], code) |
<SYSTEM_TASK:>
Ensure that the `definition_type` property of `marking-definition`
<END_TASK>
<USER_TASK:>
Description:
def vocab_marking_definition(instance):
"""Ensure that the `definition_type` property of `marking-definition`
objects is one of the values in the STIX 2.0 specification.
""" |
if (instance['type'] == 'marking-definition' and
'definition_type' in instance and not
instance['definition_type'] in enums.MARKING_DEFINITION_TYPES):
return JSONError("Marking definition `definition_type` should be one "
"of: %s." % ', '.join(enums.MARKING_DEFINITION_TYPES),
instance['id'], 'marking-definition-type') |
<SYSTEM_TASK:>
Ensure that only the relationship types defined in the specification are
<END_TASK>
<USER_TASK:>
Description:
def relationships_strict(instance):
"""Ensure that only the relationship types defined in the specification are
used.
""" |
# Don't check objects that aren't relationships or that are custom objects
if (instance['type'] != 'relationship' or
instance['type'] not in enums.TYPES):
return
if ('relationship_type' not in instance or 'source_ref' not in instance or
'target_ref' not in instance):
# Since these fields are required, schemas will already catch the error
return
r_type = instance['relationship_type']
try:
r_source = re.search(r"(.+)\-\-", instance['source_ref']).group(1)
r_target = re.search(r"(.+)\-\-", instance['target_ref']).group(1)
except (AttributeError, TypeError):
# Schemas already catch errors of these properties not being strings or
# not containing the string '--'.
return
if (r_type in enums.COMMON_RELATIONSHIPS or
r_source in enums.NON_SDOS or
r_target in enums.NON_SDOS):
# If all objects can have this relationship type, no more checks needed
# Schemas already catch if source/target type cannot have relationship
return
if r_source not in enums.RELATIONSHIPS:
return JSONError("'%s' is not a suggested relationship source object "
"for the '%s' relationship." % (r_source, r_type),
instance['id'], 'relationship-types')
if r_type not in enums.RELATIONSHIPS[r_source]:
return JSONError("'%s' is not a suggested relationship type for '%s' "
"objects." % (r_type, r_source), instance['id'],
'relationship-types')
if r_target not in enums.RELATIONSHIPS[r_source][r_type]:
return JSONError("'%s' is not a suggested relationship target object "
"for '%s' objects with the '%s' relationship."
% (r_target, r_source, r_type), instance['id'],
'relationship-types') |
<SYSTEM_TASK:>
Return true if given value is a valid, recommended hash name according
<END_TASK>
<USER_TASK:>
Description:
def valid_hash_value(hashname):
"""Return true if given value is a valid, recommended hash name according
to the STIX 2 specification.
""" |
custom_hash_prefix_re = re.compile(r"^x_")
if hashname in enums.HASH_ALGO_OV or custom_hash_prefix_re.match(hashname):
return True
else:
return False |
<SYSTEM_TASK:>
Ensure file objects with the windows-pebinary-ext extension have a
<END_TASK>
<USER_TASK:>
Description:
def vocab_windows_pebinary_type(instance):
"""Ensure file objects with the windows-pebinary-ext extension have a
'pe-type' property that is from the windows-pebinary-type-ov vocabulary.
""" |
for key, obj in instance['objects'].items():
if 'type' in obj and obj['type'] == 'file':
try:
pe_type = obj['extensions']['windows-pebinary-ext']['pe_type']
except KeyError:
continue
if pe_type not in enums.WINDOWS_PEBINARY_TYPE_OV:
yield JSONError("Object '%s' has a Windows PE Binary File "
"extension with a 'pe_type' of '%s', which is not a "
"value in the windows-pebinary-type-ov vocabulary."
% (key, pe_type), instance['id'],
'windows-pebinary-type') |
<SYSTEM_TASK:>
Ensure a user-account objects' 'account-type' property is from the
<END_TASK>
<USER_TASK:>
Description:
def vocab_account_type(instance):
"""Ensure a user-account objects' 'account-type' property is from the
account-type-ov vocabulary.
""" |
for key, obj in instance['objects'].items():
if 'type' in obj and obj['type'] == 'user-account':
try:
acct_type = obj['account_type']
except KeyError:
continue
if acct_type not in enums.ACCOUNT_TYPE_OV:
yield JSONError("Object '%s' is a User Account Object "
"with an 'account_type' of '%s', which is not a "
"value in the account-type-ov vocabulary."
% (key, acct_type), instance['id'], 'account-type') |
<SYSTEM_TASK:>
Ensure observable-objects keys are non-negative integers.
<END_TASK>
<USER_TASK:>
Description:
def observable_object_keys(instance):
"""Ensure observable-objects keys are non-negative integers.
""" |
digits_re = re.compile(r"^\d+$")
for key in instance['objects']:
if not digits_re.match(key):
yield JSONError("'%s' is not a good key value. Observable Objects "
"should use non-negative integers for their keys."
% key, instance['id'], 'observable-object-keys') |
<SYSTEM_TASK:>
Ensure custom observable objects follow strict naming style conventions.
<END_TASK>
<USER_TASK:>
Description:
def custom_observable_object_prefix_strict(instance):
"""Ensure custom observable objects follow strict naming style conventions.
""" |
for key, obj in instance['objects'].items():
if ('type' in obj and obj['type'] not in enums.OBSERVABLE_TYPES and
obj['type'] not in enums.OBSERVABLE_RESERVED_OBJECTS and
not CUSTOM_TYPE_PREFIX_RE.match(obj['type'])):
yield JSONError("Custom Observable Object type '%s' should start "
"with 'x-' followed by a source unique identifier "
"(like a domain name with dots replaced by "
"hyphens), a hyphen and then the name."
% obj['type'], instance['id'],
'custom-prefix') |
<SYSTEM_TASK:>
Ensure custom observable object extensions follow strict naming style
<END_TASK>
<USER_TASK:>
Description:
def custom_object_extension_prefix_strict(instance):
"""Ensure custom observable object extensions follow strict naming style
conventions.
""" |
for key, obj in instance['objects'].items():
if not ('extensions' in obj and 'type' in obj and
obj['type'] in enums.OBSERVABLE_EXTENSIONS):
continue
for ext_key in obj['extensions']:
if (ext_key not in enums.OBSERVABLE_EXTENSIONS[obj['type']] and
not CUSTOM_TYPE_PREFIX_RE.match(ext_key)):
yield JSONError("Custom Cyber Observable Object extension type"
" '%s' should start with 'x-' followed by a source "
"unique identifier (like a domain name with dots "
"replaced by hyphens), a hyphen and then the name."
% ext_key, instance['id'],
'custom-prefix') |
<SYSTEM_TASK:>
Ensure custom observable object extensions follow naming style
<END_TASK>
<USER_TASK:>
Description:
def custom_object_extension_prefix_lax(instance):
"""Ensure custom observable object extensions follow naming style
conventions.
""" |
for key, obj in instance['objects'].items():
if not ('extensions' in obj and 'type' in obj and
obj['type'] in enums.OBSERVABLE_EXTENSIONS):
continue
for ext_key in obj['extensions']:
if (ext_key not in enums.OBSERVABLE_EXTENSIONS[obj['type']] and
not CUSTOM_TYPE_LAX_PREFIX_RE.match(ext_key)):
yield JSONError("Custom Cyber Observable Object extension type"
" '%s' should start with 'x-'."
% ext_key, instance['id'],
'custom-prefix-lax') |
<SYSTEM_TASK:>
Ensure network-traffic objects contain both src_port and dst_port.
<END_TASK>
<USER_TASK:>
Description:
def network_traffic_ports(instance):
"""Ensure network-traffic objects contain both src_port and dst_port.
""" |
for key, obj in instance['objects'].items():
if ('type' in obj and obj['type'] == 'network-traffic' and
('src_port' not in obj or 'dst_port' not in obj)):
yield JSONError("The Network Traffic object '%s' should contain "
"both the 'src_port' and 'dst_port' properties."
% key, instance['id'], 'network-traffic-ports') |
<SYSTEM_TASK:>
Ensure the 'mime_type' property of file objects comes from the Template
<END_TASK>
<USER_TASK:>
Description:
def mime_type(instance):
"""Ensure the 'mime_type' property of file objects comes from the Template
column in the IANA media type registry.
""" |
mime_pattern = re.compile(r'^(application|audio|font|image|message|model'
'|multipart|text|video)/[a-zA-Z0-9.+_-]+')
for key, obj in instance['objects'].items():
if ('type' in obj and obj['type'] == 'file' and 'mime_type' in obj):
if enums.media_types():
if obj['mime_type'] not in enums.media_types():
yield JSONError("The 'mime_type' property of object '%s' "
"('%s') should be an IANA registered MIME "
"Type of the form 'type/subtype'."
% (key, obj['mime_type']), instance['id'],
'mime-type')
else:
info("Can't reach IANA website; using regex for mime types.")
if not mime_pattern.match(obj['mime_type']):
yield JSONError("The 'mime_type' property of object '%s' "
"('%s') should be an IANA MIME Type of the"
" form 'type/subtype'."
% (key, obj['mime_type']), instance['id'],
'mime-type') |
<SYSTEM_TASK:>
Ensure the 'protocols' property of network-traffic objects contains only
<END_TASK>
<USER_TASK:>
Description:
def protocols(instance):
"""Ensure the 'protocols' property of network-traffic objects contains only
values from the IANA Service Name and Transport Protocol Port Number
Registry.
""" |
for key, obj in instance['objects'].items():
if ('type' in obj and obj['type'] == 'network-traffic' and
'protocols' in obj):
for prot in obj['protocols']:
if enums.protocols():
if prot not in enums.protocols():
yield JSONError("The 'protocols' property of object "
"'%s' contains a value ('%s') not in "
"IANA Service Name and Transport "
"Protocol Port Number Registry."
% (key, prot), instance['id'],
'protocols')
else:
info("Can't reach IANA website; using regex for protocols.")
if not PROTOCOL_RE.match(prot):
yield JSONError("The 'protocols' property of object "
"'%s' contains a value ('%s') not in "
"IANA Service Name and Transport "
"Protocol Port Number Registry."
% (key, prot), instance['id'],
'protocols') |
<SYSTEM_TASK:>
Ensure the keys of the 'document_info_dict' property of the pdf-ext
<END_TASK>
<USER_TASK:>
Description:
def pdf_doc_info(instance):
"""Ensure the keys of the 'document_info_dict' property of the pdf-ext
extension of file objects are only valid PDF Document Information
Dictionary Keys.
""" |
for key, obj in instance['objects'].items():
if ('type' in obj and obj['type'] == 'file'):
try:
did = obj['extensions']['pdf-ext']['document_info_dict']
except KeyError:
continue
for elem in did:
if elem not in enums.PDF_DID:
yield JSONError("The 'document_info_dict' property of "
"object '%s' contains a key ('%s') that is"
" not a valid PDF Document Information "
"Dictionary key."
% (key, elem), instance['id'],
'pdf-doc-info') |
<SYSTEM_TASK:>
Ensure that the `country` property of `location` objects is a valid
<END_TASK>
<USER_TASK:>
Description:
def countries(instance):
"""Ensure that the `country` property of `location` objects is a valid
ISO 3166-1 ALPHA-2 Code.
""" |
if (instance['type'] == 'location' and 'country' in instance and not
instance['country'].upper() in enums.COUNTRY_CODES):
return JSONError("Location `country` should be a valid ISO 3166-1 "
"ALPHA-2 Code.",
instance['id'], 'marking-definition-type') |
<SYSTEM_TASK:>
Ensure the 'priority' property of windows-process-ext ends in '_CLASS'.
<END_TASK>
<USER_TASK:>
Description:
def windows_process_priority_format(instance):
"""Ensure the 'priority' property of windows-process-ext ends in '_CLASS'.
""" |
class_suffix_re = re.compile(r'.+_CLASS$')
for key, obj in instance['objects'].items():
if 'type' in obj and obj['type'] == 'process':
try:
priority = obj['extensions']['windows-process-ext']['priority']
except KeyError:
continue
if not class_suffix_re.match(priority):
yield JSONError("The 'priority' property of object '%s' should"
" end in '_CLASS'." % key, instance['id'],
'windows-process-priority-format') |
<SYSTEM_TASK:>
Ensure objects with duplicate IDs have different `modified` timestamps.
<END_TASK>
<USER_TASK:>
Description:
def duplicate_ids(instance):
"""Ensure objects with duplicate IDs have different `modified` timestamps.
""" |
if instance['type'] != 'bundle' or 'objects' not in instance:
return
unique_ids = {}
for obj in instance['objects']:
if 'id' not in obj or 'modified' not in obj:
continue
elif obj['id'] not in unique_ids:
unique_ids[obj['id']] = obj['modified']
elif obj['modified'] == unique_ids[obj['id']]:
yield JSONError("Duplicate ID '%s' has identical `modified` timestamp."
" If they are different versions of the same object, "
"they should have different `modified` properties."
% obj['id'], instance['id'], 'duplicate-ids') |
<SYSTEM_TASK:>
Ensure timestamps contain sane months, days, hours, minutes, seconds.
<END_TASK>
<USER_TASK:>
Description:
def timestamp(instance):
"""Ensure timestamps contain sane months, days, hours, minutes, seconds.
""" |
ts_re = re.compile(r"^[0-9]{4}-(0[1-9]|1[012])-(0[1-9]|[12][0-9]|3[01])T([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9]|60)(\.[0-9]+)?Z$")
timestamp_props = ['created', 'modified']
if instance['type'] in enums.TIMESTAMP_PROPERTIES:
timestamp_props += enums.TIMESTAMP_PROPERTIES[instance['type']]
for tprop in timestamp_props:
if tprop in instance and ts_re.match(instance[tprop]):
# Don't raise an error if schemas will catch it
try:
parser.parse(instance[tprop])
except ValueError as e:
yield JSONError("'%s': '%s' is not a valid timestamp: %s"
% (tprop, instance[tprop], str(e)), instance['id'])
if has_cyber_observable_data(instance):
for key, obj in instance['objects'].items():
if 'type' not in obj:
continue
if obj['type'] in enums.TIMESTAMP_OBSERVABLE_PROPERTIES:
for tprop in enums.TIMESTAMP_OBSERVABLE_PROPERTIES[obj['type']]:
if tprop in obj and ts_re.match(obj[tprop]):
# Don't raise an error if schemas will catch it
try:
parser.parse(obj[tprop])
except ValueError as e:
yield JSONError("'%s': '%s': '%s' is not a valid timestamp: %s"
% (obj['type'], tprop, obj[tprop], str(e)), instance['id'])
if obj['type'] in enums.TIMESTAMP_EMBEDDED_PROPERTIES:
for embed in enums.TIMESTAMP_EMBEDDED_PROPERTIES[obj['type']]:
if embed in obj:
for tprop in enums.TIMESTAMP_EMBEDDED_PROPERTIES[obj['type']][embed]:
if embed == 'extensions':
for ext in obj[embed]:
if tprop in obj[embed][ext] and ts_re.match(obj[embed][ext][tprop]):
try:
parser.parse(obj[embed][ext][tprop])
except ValueError as e:
yield JSONError("'%s': '%s': '%s': '%s' is not a valid timestamp: %s"
% (obj['type'], ext, tprop, obj[embed][ext][tprop], str(e)), instance['id'])
elif tprop in obj[embed] and ts_re.match(obj[embed][tprop]):
try:
parser.parse(obj[embed][tprop])
except ValueError as e:
yield JSONError("'%s': '%s': '%s' is not a valid timestamp: %s"
% (obj['type'], tprop, obj[embed][tprop], str(e)), instance['id']) |
<SYSTEM_TASK:>
`modified` property must be later or equal to `created` property
<END_TASK>
<USER_TASK:>
Description:
def modified_created(instance):
"""`modified` property must be later or equal to `created` property
""" |
if 'modified' in instance and 'created' in instance and \
instance['modified'] < instance['created']:
msg = "'modified' (%s) must be later or equal to 'created' (%s)"
return JSONError(msg % (instance['modified'], instance['created']),
instance['id']) |
<SYSTEM_TASK:>
Ensure selectors in granular markings refer to items which are actually
<END_TASK>
<USER_TASK:>
Description:
def marking_selector_syntax(instance):
"""Ensure selectors in granular markings refer to items which are actually
present in the object.
""" |
if 'granular_markings' not in instance:
return
list_index_re = re.compile(r"\[(\d+)\]")
for marking in instance['granular_markings']:
if 'selectors' not in marking:
continue
selectors = marking['selectors']
for selector in selectors:
segments = selector.split('.')
obj = instance
prev_segmt = None
for segmt in segments:
index_match = list_index_re.match(segmt)
if index_match:
try:
idx = int(index_match.group(1))
obj = obj[idx]
except IndexError:
yield JSONError("'%s' is not a valid selector because"
" %s is not a valid index."
% (selector, idx), instance['id'])
except KeyError:
yield JSONError("'%s' is not a valid selector because"
" '%s' is not a list."
% (selector, prev_segmt), instance['id'])
else:
try:
obj = obj[segmt]
except KeyError as e:
yield JSONError("'%s' is not a valid selector because"
" %s is not a property."
% (selector, e), instance['id'])
except TypeError:
yield JSONError("'%s' is not a valid selector because"
" '%s' is not a property."
% (selector, segmt), instance['id'])
prev_segmt = segmt |
<SYSTEM_TASK:>
Ensure certain observable object properties reference the correct type
<END_TASK>
<USER_TASK:>
Description:
def observable_object_references(instance):
"""Ensure certain observable object properties reference the correct type
of object.
""" |
for key, obj in instance['objects'].items():
if 'type' not in obj:
continue
elif obj['type'] not in enums.OBSERVABLE_PROP_REFS:
continue
obj_type = obj['type']
for obj_prop in enums.OBSERVABLE_PROP_REFS[obj_type]:
if obj_prop not in obj:
continue
enum_prop = enums.OBSERVABLE_PROP_REFS[obj_type][obj_prop]
if isinstance(enum_prop, list):
refs = obj[obj_prop]
enum_vals = enum_prop
for x in check_observable_refs(refs, obj_prop, enum_prop, '',
enum_vals, key, instance):
yield x
elif isinstance(enum_prop, dict):
for embedded_prop in enum_prop:
if isinstance(obj[obj_prop], dict):
if embedded_prop not in obj[obj_prop]:
continue
embedded_obj = obj[obj_prop][embedded_prop]
for embed_obj_prop in embedded_obj:
if embed_obj_prop not in enum_prop[embedded_prop]:
continue
refs = embedded_obj[embed_obj_prop]
enum_vals = enum_prop[embedded_prop][embed_obj_prop]
for x in check_observable_refs(refs, obj_prop, enum_prop,
embed_obj_prop, enum_vals,
key, instance):
yield x
elif isinstance(obj[obj_prop], list):
for embedded_list_obj in obj[obj_prop]:
if embedded_prop not in embedded_list_obj:
continue
embedded_obj = embedded_list_obj[embedded_prop]
refs = embedded_obj
enum_vals = enum_prop[embedded_prop]
for x in check_observable_refs(refs, obj_prop, enum_prop,
embedded_prop, enum_vals,
key, instance):
yield x |
<SYSTEM_TASK:>
Ensure the 'mime_type' property of artifact objects comes from the
<END_TASK>
<USER_TASK:>
Description:
def artifact_mime_type(instance):
"""Ensure the 'mime_type' property of artifact objects comes from the
Template column in the IANA media type registry.
""" |
for key, obj in instance['objects'].items():
if ('type' in obj and obj['type'] == 'artifact' and 'mime_type' in obj):
if enums.media_types():
if obj['mime_type'] not in enums.media_types():
yield JSONError("The 'mime_type' property of object '%s' "
"('%s') must be an IANA registered MIME "
"Type of the form 'type/subtype'."
% (key, obj['mime_type']), instance['id'])
else:
info("Can't reach IANA website; using regex for mime types.")
mime_re = re.compile(r'^(application|audio|font|image|message|model'
'|multipart|text|video)/[a-zA-Z0-9.+_-]+')
if not mime_re.match(obj['mime_type']):
yield JSONError("The 'mime_type' property of object '%s' "
"('%s') should be an IANA MIME Type of the"
" form 'type/subtype'."
% (key, obj['mime_type']), instance['id']) |
<SYSTEM_TASK:>
Ensure certain properties of cyber observable objects come from the IANA
<END_TASK>
<USER_TASK:>
Description:
def character_set(instance):
"""Ensure certain properties of cyber observable objects come from the IANA
Character Set list.
""" |
char_re = re.compile(r'^[a-zA-Z0-9_\(\)-]+$')
for key, obj in instance['objects'].items():
if ('type' in obj and obj['type'] == 'directory' and 'path_enc' in obj):
if enums.char_sets():
if obj['path_enc'] not in enums.char_sets():
yield JSONError("The 'path_enc' property of object '%s' "
"('%s') must be an IANA registered "
"character set."
% (key, obj['path_enc']), instance['id'])
else:
info("Can't reach IANA website; using regex for character_set.")
if not char_re.match(obj['path_enc']):
yield JSONError("The 'path_enc' property of object '%s' "
"('%s') must be an IANA registered "
"character set."
% (key, obj['path_enc']), instance['id'])
if ('type' in obj and obj['type'] == 'file' and 'name_enc' in obj):
if enums.char_sets():
if obj['name_enc'] not in enums.char_sets():
yield JSONError("The 'name_enc' property of object '%s' "
"('%s') must be an IANA registered "
"character set."
% (key, obj['name_enc']), instance['id'])
else:
info("Can't reach IANA website; using regex for character_set.")
if not char_re.match(obj['name_enc']):
yield JSONError("The 'name_enc' property of object '%s' "
"('%s') must be an IANA registered "
"character set."
% (key, obj['name_enc']), instance['id']) |
<SYSTEM_TASK:>
Ensure the 'language' property of software objects is a valid ISO 639-2
<END_TASK>
<USER_TASK:>
Description:
def software_language(instance):
"""Ensure the 'language' property of software objects is a valid ISO 639-2
language code.
""" |
for key, obj in instance['objects'].items():
if ('type' in obj and obj['type'] == 'software' and
'languages' in obj):
for lang in obj['languages']:
if lang not in enums.SOFTWARE_LANG_CODES:
yield JSONError("The 'languages' property of object '%s' "
"contains an invalid ISO 639-2 language "
" code ('%s')."
% (key, lang), instance['id']) |
<SYSTEM_TASK:>
Ensure that no custom object types are used, but only the official ones
<END_TASK>
<USER_TASK:>
Description:
def types_strict(instance):
"""Ensure that no custom object types are used, but only the official ones
from the specification.
""" |
if instance['type'] not in enums.TYPES:
yield JSONError("Object type '%s' is not one of those defined in the"
" specification." % instance['type'], instance['id'])
if has_cyber_observable_data(instance):
for key, obj in instance['objects'].items():
if 'type' in obj and obj['type'] not in enums.OBSERVABLE_TYPES:
yield JSONError("Observable object %s is type '%s' which is "
"not one of those defined in the "
"specification."
% (key, obj['type']), instance['id']) |
<SYSTEM_TASK:>
Ensure that no custom properties are used, but only the official ones
<END_TASK>
<USER_TASK:>
Description:
def properties_strict(instance):
"""Ensure that no custom properties are used, but only the official ones
from the specification.
""" |
if instance['type'] not in enums.TYPES:
return # only check properties for official objects
defined_props = enums.PROPERTIES.get(instance['type'], [])
for prop in instance.keys():
if prop not in defined_props:
yield JSONError("Property '%s' is not one of those defined in the"
" specification." % prop, instance['id'])
if has_cyber_observable_data(instance):
for key, obj in instance['objects'].items():
type_ = obj.get('type', '')
if type_ not in enums.OBSERVABLE_PROPERTIES:
continue # custom observable types handled outside this function
observable_props = enums.OBSERVABLE_PROPERTIES.get(type_, [])
embedded_props = enums.OBSERVABLE_EMBEDDED_PROPERTIES.get(type_, {})
extensions = enums.OBSERVABLE_EXTENSIONS.get(type_, [])
for prop in obj.keys():
if prop not in observable_props:
yield JSONError("Property '%s' is not one of those defined in the"
" specification for %s objects."
% (prop, type_), instance['id'])
# Check properties of embedded cyber observable types
elif prop in embedded_props:
embedded_prop_keys = embedded_props.get(prop, [])
for embedded_key in obj[prop]:
if isinstance(embedded_key, dict):
for embedded in embedded_key:
if embedded not in embedded_prop_keys:
yield JSONError("Property '%s' is not one of those defined in the"
" specification for the %s property in %s objects."
% (embedded, prop, type_), instance['id'])
elif embedded_key not in embedded_prop_keys:
yield JSONError("Property '%s' is not one of those defined in the"
" specification for the %s property in %s objects."
% (embedded_key, prop, type_), instance['id'])
# Check properties of embedded cyber observable types
for ext_key in obj.get('extensions', {}):
if ext_key not in extensions:
continue # don't check custom extensions
extension_props = enums.OBSERVABLE_EXTENSION_PROPERTIES[ext_key]
for ext_prop in obj['extensions'][ext_key]:
if ext_prop not in extension_props:
yield JSONError("Property '%s' is not one of those defined in the"
" specification for the %s extension in %s objects."
% (ext_prop, ext_key, type_), instance['id'])
embedded_ext_props = enums.OBSERVABLE_EXTENSION_EMBEDDED_PROPERTIES.get(ext_key, {}).get(ext_prop, [])
if embedded_ext_props:
for embed_ext_prop in obj['extensions'][ext_key].get(ext_prop, []):
if embed_ext_prop not in embedded_ext_props:
yield JSONError("Property '%s' in the %s property of the %s extension "
"is not one of those defined in the specification."
% (embed_ext_prop, ext_prop, ext_key), instance['id']) |
<SYSTEM_TASK:>
Return a list of the IANA Character Sets, or an empty list if the
<END_TASK>
<USER_TASK:>
Description:
def char_sets():
"""Return a list of the IANA Character Sets, or an empty list if the
IANA website is unreachable.
Store it as a function attribute so that we only build the list once.
""" |
if not hasattr(char_sets, 'setlist'):
clist = []
try:
data = requests.get('http://www.iana.org/assignments/character-'
'sets/character-sets-1.csv')
except requests.exceptions.RequestException:
return []
for line in data.iter_lines():
if line:
line = line.decode("utf-8")
if line.count(',') > 0:
vals = line.split(',')
if vals[0]:
clist.append(vals[0])
else:
clist.append(vals[1])
char_sets.setlist = clist
return char_sets.setlist |
<SYSTEM_TASK:>
Return a list of values from the IANA Service Name and Transport
<END_TASK>
<USER_TASK:>
Description:
def protocols():
"""Return a list of values from the IANA Service Name and Transport
Protocol Port Number Registry, or an empty list if the IANA website is
unreachable.
Store it as a function attribute so that we only build the list once.
""" |
if not hasattr(protocols, 'protlist'):
plist = []
try:
data = requests.get('http://www.iana.org/assignments/service-names'
'-port-numbers/service-names-port-numbers.csv')
except requests.exceptions.RequestException:
return []
for line in data.iter_lines():
if line:
line = line.decode("utf-8")
if line.count(',') > 0:
vals = line.split(',')
if vals[0]:
plist.append(vals[0])
if len(vals) > 2 and vals[2] and vals[2] not in plist:
plist.append(vals[2])
plist.append('ipv4')
plist.append('ipv6')
plist.append('ssl')
plist.append('tls')
plist.append('dns')
protocols.protlist = plist
return protocols.protlist |
<SYSTEM_TASK:>
Print a formatted message to stdout prepended by spaces. Useful for
<END_TASK>
<USER_TASK:>
Description:
def print_level(log_function, fmt, level, *args):
"""Print a formatted message to stdout prepended by spaces. Useful for
printing hierarchical information, like bullet lists.
Note:
If the application is running in "Silent Mode"
(i.e., ``_SILENT == True``), this function will return
immediately and no message will be printed.
Args:
log_function: The function that will be called to output the formatted
message.
fmt (str): A Python formatted string.
level (int): Used to determing how many spaces to print. The formula
is ``' ' * level ``.
*args: Variable length list of arguments. Values are plugged into the
format string.
Examples:
>>> print_level("%s %d", 0, "TEST", 0)
TEST 0
>>> print_level("%s %d", 1, "TEST", 1)
TEST 1
>>> print_level("%s %d", 2, "TEST", 2)
TEST 2
""" |
if _SILENT:
return
msg = fmt % args
spaces = ' ' * level
log_function("%s%s" % (spaces, msg)) |
<SYSTEM_TASK:>
Print fatal errors that occurred during validation runs.
<END_TASK>
<USER_TASK:>
Description:
def print_fatal_results(results, level=0):
"""Print fatal errors that occurred during validation runs.
""" |
print_level(logger.critical, _RED + "[X] Fatal Error: %s", level, results.error) |
<SYSTEM_TASK:>
Print JSON Schema validation errors to stdout.
<END_TASK>
<USER_TASK:>
Description:
def print_schema_results(results, level=0):
"""Print JSON Schema validation errors to stdout.
Args:
results: An instance of ObjectValidationResults.
level: The level at which to print the results.
""" |
for error in results.errors:
print_level(logger.error, _RED + "[X] %s", level, error) |
<SYSTEM_TASK:>
Print warning messages found during validation.
<END_TASK>
<USER_TASK:>
Description:
def print_warning_results(results, level=0):
"""Print warning messages found during validation.
""" |
marker = _YELLOW + "[!] "
for warning in results.warnings:
print_level(logger.warning, marker + "Warning: %s", level, warning) |
<SYSTEM_TASK:>
Print a header for the results of either a file or an object.
<END_TASK>
<USER_TASK:>
Description:
def print_results_header(identifier, is_valid):
"""Print a header for the results of either a file or an object.
""" |
print_horizontal_rule()
print_level(logger.info, "[-] Results for: %s", 0, identifier)
if is_valid:
marker = _GREEN + "[+]"
verdict = "Valid"
log_func = logger.info
else:
marker = _RED + "[X]"
verdict = "Invalid"
log_func = logger.error
print_level(log_func, "%s STIX JSON: %s", 0, marker, verdict) |
<SYSTEM_TASK:>
Print the results of validating an object.
<END_TASK>
<USER_TASK:>
Description:
def print_object_results(obj_result):
"""Print the results of validating an object.
Args:
obj_result: An ObjectValidationResults instance.
""" |
print_results_header(obj_result.object_id, obj_result.is_valid)
if obj_result.warnings:
print_warning_results(obj_result, 1)
if obj_result.errors:
print_schema_results(obj_result, 1) |
<SYSTEM_TASK:>
Ensure file objects' 'encryption_algorithm' property is from the
<END_TASK>
<USER_TASK:>
Description:
def vocab_encryption_algo(instance):
"""Ensure file objects' 'encryption_algorithm' property is from the
encryption-algo-ov vocabulary.
""" |
for key, obj in instance['objects'].items():
if 'type' in obj and obj['type'] == 'file':
try:
enc_algo = obj['encryption_algorithm']
except KeyError:
continue
if enc_algo not in enums.ENCRYPTION_ALGO_OV:
yield JSONError("Object '%s' has an 'encryption_algorithm' of "
"'%s', which is not a value in the "
"encryption-algo-ov vocabulary."
% (key, enc_algo), instance['id'],
'encryption-algo') |
<SYSTEM_TASK:>
Ensures that all SDOs being referenced by the SRO are contained
<END_TASK>
<USER_TASK:>
Description:
def enforce_relationship_refs(instance):
"""Ensures that all SDOs being referenced by the SRO are contained
within the same bundle""" |
if instance['type'] != 'bundle' or 'objects' not in instance:
return
rel_references = set()
"""Find and store all ids"""
for obj in instance['objects']:
if obj['type'] != 'relationship':
rel_references.add(obj['id'])
"""Check if id has been encountered"""
for obj in instance['objects']:
if obj['type'] == 'relationship':
if obj['source_ref'] not in rel_references:
yield JSONError("Relationship object %s makes reference to %s "
"Which is not found in current bundle "
% (obj['id'], obj['source_ref']), 'enforce-relationship-refs')
if obj['target_ref'] not in rel_references:
yield JSONError("Relationship object %s makes reference to %s "
"Which is not found in current bundle "
% (obj['id'], obj['target_ref']), 'enforce-relationship-refs') |
<SYSTEM_TASK:>
Ensure timestamp properties with a comparison requirement are valid.
<END_TASK>
<USER_TASK:>
Description:
def timestamp_compare(instance):
"""Ensure timestamp properties with a comparison requirement are valid.
E.g. `modified` must be later or equal to `created`.
""" |
compares = [('modified', 'ge', 'created')]
additional_compares = enums.TIMESTAMP_COMPARE.get(instance.get('type', ''), [])
compares.extend(additional_compares)
for first, op, second in compares:
comp = getattr(operator, op)
comp_str = get_comparison_string(op)
if first in instance and second in instance and \
not comp(instance[first], instance[second]):
msg = "'%s' (%s) must be %s '%s' (%s)"
yield JSONError(msg % (first, instance[first], comp_str, second, instance[second]),
instance['id']) |
<SYSTEM_TASK:>
Ensure cyber observable timestamp properties with a comparison
<END_TASK>
<USER_TASK:>
Description:
def observable_timestamp_compare(instance):
"""Ensure cyber observable timestamp properties with a comparison
requirement are valid.
""" |
for key, obj in instance['objects'].items():
compares = enums.TIMESTAMP_COMPARE_OBSERVABLE.get(obj.get('type', ''), [])
print(compares)
for first, op, second in compares:
comp = getattr(operator, op)
comp_str = get_comparison_string(op)
if first in obj and second in obj and \
not comp(obj[first], obj[second]):
msg = "In object '%s', '%s' (%s) must be %s '%s' (%s)"
yield JSONError(msg % (key, first, obj[first], comp_str, second, obj[second]),
instance['id']) |
<SYSTEM_TASK:>
Ensure keys in Language Content's 'contents' dictionary are valid
<END_TASK>
<USER_TASK:>
Description:
def language_contents(instance):
"""Ensure keys in Language Content's 'contents' dictionary are valid
language codes, and that the keys in the sub-dictionaries match the rules
for object property names.
""" |
if instance['type'] != 'language-content' or 'contents' not in instance:
return
for key, value in instance['contents'].items():
if key not in enums.LANG_CODES:
yield JSONError("Invalid key '%s' in 'contents' property must be"
" an RFC 5646 code" % key, instance['id'])
for subkey, subvalue in value.items():
if not PROPERTY_FORMAT_RE.match(subkey):
yield JSONError("'%s' in '%s' of the 'contents' property is "
"invalid and must match a valid property name"
% (subkey, key), instance['id'], 'observable-dictionary-keys') |
<SYSTEM_TASK:>
Decorator for functions that require cyber observable data.
<END_TASK>
<USER_TASK:>
Description:
def cyber_observable_check(original_function):
"""Decorator for functions that require cyber observable data.
""" |
def new_function(*args, **kwargs):
if not has_cyber_observable_data(args[0]):
return
func = original_function(*args, **kwargs)
if isinstance(func, Iterable):
for x in original_function(*args, **kwargs):
yield x
new_function.__name__ = original_function.__name__
return new_function |
<SYSTEM_TASK:>
Initializes a cache which the ``requests`` library will consult for
<END_TASK>
<USER_TASK:>
Description:
def init_requests_cache(refresh_cache=False):
"""
Initializes a cache which the ``requests`` library will consult for
responses, before making network requests.
:param refresh_cache: Whether the cache should be cleared out
""" |
# Cache data from external sources; used in some checks
dirs = AppDirs("stix2-validator", "OASIS")
# Create cache dir if doesn't exist
try:
os.makedirs(dirs.user_cache_dir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
requests_cache.install_cache(
cache_name=os.path.join(dirs.user_cache_dir, 'py{}cache'.format(
sys.version_info[0])),
expire_after=datetime.timedelta(weeks=1))
if refresh_cache:
clear_requests_cache() |
<SYSTEM_TASK:>
Parse a Marathon response into an object or list of objects.
<END_TASK>
<USER_TASK:>
Description:
def _parse_response(response, clazz, is_list=False, resource_name=None):
"""Parse a Marathon response into an object or list of objects.""" |
target = response.json()[
resource_name] if resource_name else response.json()
if is_list:
return [clazz.from_json(resource) for resource in target]
else:
return clazz.from_json(target) |
<SYSTEM_TASK:>
Query Marathon server.
<END_TASK>
<USER_TASK:>
Description:
def _do_request(self, method, path, params=None, data=None):
"""Query Marathon server.""" |
headers = {
'Content-Type': 'application/json', 'Accept': 'application/json'}
if self.auth_token:
headers['Authorization'] = "token={}".format(self.auth_token)
response = None
servers = list(self.servers)
while servers and response is None:
server = servers.pop(0)
url = ''.join([server.rstrip('/'), path])
try:
response = self.session.request(
method, url, params=params, data=data, headers=headers,
auth=self.auth, timeout=self.timeout, verify=self.verify)
marathon.log.info('Got response from %s', server)
except requests.exceptions.RequestException as e:
marathon.log.error(
'Error while calling %s: %s', url, str(e))
if response is None:
raise NoResponseError('No remaining Marathon servers to try')
if response.status_code >= 500:
marathon.log.error('Got HTTP {code}: {body}'.format(
code=response.status_code, body=response.text.encode('utf-8')))
raise InternalServerError(response)
elif response.status_code >= 400:
marathon.log.error('Got HTTP {code}: {body}'.format(
code=response.status_code, body=response.text.encode('utf-8')))
if response.status_code == 404:
raise NotFoundError(response)
elif response.status_code == 409:
raise ConflictError(response)
else:
raise MarathonHttpError(response)
elif response.status_code >= 300:
marathon.log.warn('Got HTTP {code}: {body}'.format(
code=response.status_code, body=response.text.encode('utf-8')))
else:
marathon.log.debug('Got HTTP {code}: {body}'.format(
code=response.status_code, body=response.text.encode('utf-8')))
return response |
<SYSTEM_TASK:>
Query Marathon server for events.
<END_TASK>
<USER_TASK:>
Description:
def _do_sse_request(self, path, params=None):
"""Query Marathon server for events.""" |
urls = [''.join([server.rstrip('/'), path]) for server in self.servers]
while urls:
url = urls.pop()
try:
# Requests does not set the original Authorization header on cross origin
# redirects. If set allow_redirects=True we may get a 401 response.
response = self.sse_session.get(
url,
params=params,
stream=True,
headers={'Accept': 'text/event-stream'},
auth=self.auth,
verify=self.verify,
allow_redirects=False
)
except Exception as e:
marathon.log.error(
'Error while calling %s: %s', url, e.message)
else:
if response.is_redirect and response.next:
urls.append(response.next.url)
marathon.log.debug("Got redirect to {}".format(response.next.url))
elif response.ok:
return response.iter_lines()
raise MarathonError('No remaining Marathon servers to try') |
<SYSTEM_TASK:>
Create and start an app.
<END_TASK>
<USER_TASK:>
Description:
def create_app(self, app_id, app, minimal=True):
"""Create and start an app.
:param str app_id: application ID
:param :class:`marathon.models.app.MarathonApp` app: the application to create
:param bool minimal: ignore nulls and empty collections
:returns: the created app (on success)
:rtype: :class:`marathon.models.app.MarathonApp` or False
""" |
app.id = app_id
data = app.to_json(minimal=minimal)
response = self._do_request('POST', '/v2/apps', data=data)
if response.status_code == 201:
return self._parse_response(response, MarathonApp)
else:
return False |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.