text_prompt
stringlengths 100
17.7k
⌀ | code_prompt
stringlengths 7
9.86k
⌀ |
---|---|
<SYSTEM_TASK:>
Get the current API key.
<END_TASK>
<USER_TASK:>
Description:
def get(cls):
"""Get the current API key.
if one has not been given via 'set' the env var STEAMODD_API_KEY will
be checked instead.
""" |
apikey = cls.__api_key or cls.__api_key_env_var
if apikey:
return apikey
else:
raise APIKeyMissingError("API key not set") |
<SYSTEM_TASK:>
Make the API call again and fetch fresh data.
<END_TASK>
<USER_TASK:>
Description:
def call(self):
""" Make the API call again and fetch fresh data. """ |
data = self._downloader.download()
# Only try to pass errors arg if supported
if sys.version >= "2.7":
data = data.decode("utf-8", errors="ignore")
else:
data = data.decode("utf-8")
self.update(json.loads(data))
self._fetched = True |
<SYSTEM_TASK:>
Returns the attribute definition dict of a given attribute
<END_TASK>
<USER_TASK:>
Description:
def _attribute_definition(self, attrid):
""" Returns the attribute definition dict of a given attribute
ID, can be the name or the integer ID """ |
attrs = self._schema["attributes"]
try:
# Make a new dict to avoid side effects
return dict(attrs[attrid])
except KeyError:
attr_names = self._schema["attribute_names"]
attrdef = attrs.get(attr_names.get(str(attrid).lower()))
if not attrdef:
return None
else:
return dict(attrdef) |
<SYSTEM_TASK:>
Returns the ID and localized name of the given quality, can be either ID type
<END_TASK>
<USER_TASK:>
Description:
def _quality_definition(self, qid):
""" Returns the ID and localized name of the given quality, can be either ID type """ |
qualities = self._schema["qualities"]
try:
return qualities[qid]
except KeyError:
qid = self._schema["quality_names"].get(str(qid).lower(), 0)
return qualities.get(qid, (qid, "normal", "Normal")) |
<SYSTEM_TASK:>
Returns all attributes in the schema
<END_TASK>
<USER_TASK:>
Description:
def attributes(self):
""" Returns all attributes in the schema """ |
attrs = self._schema["attributes"]
return [item_attribute(attr) for attr in sorted(attrs.values(),
key=operator.itemgetter("defindex"))] |
<SYSTEM_TASK:>
Returns a localized origin name for a given ID
<END_TASK>
<USER_TASK:>
Description:
def origin_id_to_name(self, origin):
""" Returns a localized origin name for a given ID """ |
try:
oid = int(origin)
except (ValueError, TypeError):
return None
return self.origins.get(oid) |
<SYSTEM_TASK:>
Returns a list of attributes
<END_TASK>
<USER_TASK:>
Description:
def attributes(self):
""" Returns a list of attributes """ |
overridden_attrs = self._attributes
sortmap = {"neutral": 1, "positive": 2,
"negative": 3}
sortedattrs = list(overridden_attrs.values())
sortedattrs.sort(key=operator.itemgetter("defindex"))
sortedattrs.sort(key=lambda t: sortmap.get(t.get("effect_type",
"neutral"), 99))
return [item_attribute(theattr) for theattr in sortedattrs] |
<SYSTEM_TASK:>
Returns a dict of classes that have the item equipped and in what slot
<END_TASK>
<USER_TASK:>
Description:
def equipped(self):
""" Returns a dict of classes that have the item equipped and in what slot """ |
equipped = self._item.get("equipped", [])
# WORKAROUND: 0 is probably an off-by-one error
# WORKAROUND: 65535 actually serves a purpose (according to Valve)
return dict([(eq["class"], eq["slot"]) for eq in equipped if eq["class"] != 0 and eq["slot"] != 65535]) |
<SYSTEM_TASK:>
Returns a list of classes that _can_ use the item.
<END_TASK>
<USER_TASK:>
Description:
def equipable_classes(self):
""" Returns a list of classes that _can_ use the item. """ |
sitem = self._schema_item
return [c for c in sitem.get("used_by_classes", self.equipped.keys()) if c] |
<SYSTEM_TASK:>
Returns the item in the container, if there is one.
<END_TASK>
<USER_TASK:>
Description:
def contents(self):
""" Returns the item in the container, if there is one.
This will be a standard item object. """ |
rawitem = self._item.get("contained_item")
if rawitem:
return self.__class__(rawitem, self._schema) |
<SYSTEM_TASK:>
The full name of the item, generated depending
<END_TASK>
<USER_TASK:>
Description:
def full_name(self):
"""
The full name of the item, generated depending
on things such as its quality, rank, the schema language,
and so on.
""" |
qid, quality_str, pretty_quality_str = self.quality
custom_name = self.custom_name
item_name = self.name
english = (self._language == "en_US")
rank = self.rank
prefixed = self._schema_item.get("proper_name", False)
prefix = ''
suffix = ''
pfinal = ''
if item_name.startswith("The ") and prefixed:
item_name = item_name[4:]
if quality_str != "unique" and quality_str != "normal":
pfinal = pretty_quality_str
if english:
if prefixed:
if quality_str == "unique":
pfinal = "The"
elif quality_str == "unique":
pfinal = ''
if rank and quality_str == "strange":
pfinal = rank["name"]
if english:
prefix = pfinal
elif pfinal:
suffix = '(' + pfinal + ') ' + suffix
return (prefix + " " + item_name + " " + suffix).strip() |
<SYSTEM_TASK:>
Returns a list of all styles defined for the item
<END_TASK>
<USER_TASK:>
Description:
def available_styles(self):
""" Returns a list of all styles defined for the item """ |
styles = self._schema_item.get("styles", [])
return list(map(operator.itemgetter("name"), styles)) |
<SYSTEM_TASK:>
Returns a formatted value as a string
<END_TASK>
<USER_TASK:>
Description:
def formatted_value(self):
""" Returns a formatted value as a string""" |
# TODO: Cleanup all of this, it's just weird and unnatural maths
val = self.value
pval = val
ftype = self.value_type
if ftype == "percentage":
pval = int(round(val * 100))
if self.type == "negative":
pval = 0 - (100 - pval)
else:
pval -= 100
elif ftype == "additive_percentage":
pval = int(round(val * 100))
elif ftype == "inverted_percentage":
pval = 100 - int(round(val * 100))
# Can't remember what workaround this was, is it needed?
if self.type == "negative":
if self.value > 1:
pval = 0 - pval
elif ftype == "additive" or ftype == "particle_index" or ftype == "account_id":
if int(val) == val:
pval = int(val)
elif ftype == "date":
d = time.gmtime(int(val))
pval = time.strftime("%Y-%m-%d %H:%M:%S", d)
return u"{0}".format(pval) |
<SYSTEM_TASK:>
Certain attributes have a user's account information
<END_TASK>
<USER_TASK:>
Description:
def account_info(self):
""" Certain attributes have a user's account information
associated with it such as a gifted or crafted item.
A dict with two keys: 'persona' and 'id64'.
None if the attribute has no account information attached to it. """ |
account_info = self._attribute.get("account_info")
if account_info:
return {"persona": account_info.get("personaname", ""),
"id64": account_info["steamid"]}
else:
return None |
<SYSTEM_TASK:>
Returns a dict containing tags and their localized labels as values
<END_TASK>
<USER_TASK:>
Description:
def tags(self):
""" Returns a dict containing tags and their localized labels as values """ |
return dict([(t, self._catalog.tags.get(t, t)) for t in self._asset.get("tags", [])]) |
<SYSTEM_TASK:>
Returns the user's vanity url if it exists, None otherwise
<END_TASK>
<USER_TASK:>
Description:
def vanity(self):
""" Returns the user's vanity url if it exists, None otherwise """ |
purl = self.profile_url.strip('/')
if purl.find("/id/") != -1:
return os.path.basename(purl) |
<SYSTEM_TASK:>
Returns the account creation date as a localtime time.struct_time
<END_TASK>
<USER_TASK:>
Description:
def creation_date(self):
""" Returns the account creation date as a localtime time.struct_time
struct if public""" |
timestamp = self._prof.get("timecreated")
if timestamp:
return time.localtime(timestamp) |
<SYSTEM_TASK:>
Returns the the user's profile level, note that this runs a separate
<END_TASK>
<USER_TASK:>
Description:
def level(self):
"""
Returns the the user's profile level, note that this runs a separate
request because the profile level data isn't in the standard player summary
output even though it should be. Which is also why it's not implemented
as a separate class. You won't need this output and not the profile output
""" |
level_key = "player_level"
if level_key in self._api["response"]:
return self._api["response"][level_key]
try:
lvl = api.interface("IPlayerService").GetSteamLevel(steamid=self.id64)["response"][level_key]
self._api["response"][level_key] = lvl
return lvl
except:
return -1 |
<SYSTEM_TASK:>
Builds a profile object from a raw player summary object
<END_TASK>
<USER_TASK:>
Description:
def from_def(cls, obj):
""" Builds a profile object from a raw player summary object """ |
prof = cls(obj["steamid"])
prof._cache = obj
return prof |
<SYSTEM_TASK:>
Called when a status is withheld
<END_TASK>
<USER_TASK:>
Description:
def on_status_withheld(self, status_id, user_id, countries):
"""Called when a status is withheld""" |
logger.info('Status %s withheld for user %s', status_id, user_id)
return True |
<SYSTEM_TASK:>
Called when a disconnect is received
<END_TASK>
<USER_TASK:>
Description:
def on_disconnect(self, code, stream_name, reason):
"""Called when a disconnect is received""" |
logger.error('Disconnect message: %s %s %s', code, stream_name, reason)
return True |
<SYSTEM_TASK:>
Called when a non-200 status code is returned
<END_TASK>
<USER_TASK:>
Description:
def on_error(self, status_code):
"""Called when a non-200 status code is returned""" |
logger.error('Twitter returned error code %s', status_code)
self.error = status_code
return False |
<SYSTEM_TASK:>
An exception occurred in the streaming thread
<END_TASK>
<USER_TASK:>
Description:
def on_exception(self, exception):
"""An exception occurred in the streaming thread""" |
logger.error('Exception from stream!', exc_info=True)
self.streaming_exception = exception |
<SYSTEM_TASK:>
Checks if the list of tracked terms has changed.
<END_TASK>
<USER_TASK:>
Description:
def check(self):
"""
Checks if the list of tracked terms has changed.
Returns True if changed, otherwise False.
""" |
new_tracking_terms = self.update_tracking_terms()
terms_changed = False
# any deleted terms?
if self._tracking_terms_set > new_tracking_terms:
logging.debug("Some tracking terms removed")
terms_changed = True
# any added terms?
elif self._tracking_terms_set < new_tracking_terms:
logging.debug("Some tracking terms added")
terms_changed = True
# Go ahead and store for later
self._tracking_terms_set = new_tracking_terms
# If the terms changed, we need to restart the stream
return terms_changed |
<SYSTEM_TASK:>
Break into a debugger if receives the SIGUSR1 signal
<END_TASK>
<USER_TASK:>
Description:
def set_debug_listener(stream):
"""Break into a debugger if receives the SIGUSR1 signal""" |
def debugger(sig, frame):
launch_debugger(frame, stream)
if hasattr(signal, 'SIGUSR1'):
signal.signal(signal.SIGUSR1, debugger)
else:
logger.warn("Cannot set SIGUSR1 signal for debug mode.") |
<SYSTEM_TASK:>
Create the listener that prints tweets
<END_TASK>
<USER_TASK:>
Description:
def construct_listener(outfile=None):
"""Create the listener that prints tweets""" |
if outfile is not None:
if os.path.exists(outfile):
raise IOError("File %s already exists" % outfile)
outfile = open(outfile, 'wb')
return PrintingListener(out=outfile) |
<SYSTEM_TASK:>
Start and maintain the streaming connection...
<END_TASK>
<USER_TASK:>
Description:
def begin_stream_loop(stream, poll_interval):
"""Start and maintain the streaming connection...""" |
while should_continue():
try:
stream.start_polling(poll_interval)
except Exception as e:
# Infinite restart
logger.error("Exception while polling. Restarting in 1 second.", exc_info=True)
time.sleep(1) |
<SYSTEM_TASK:>
Print out the current tweet rate and reset the counter
<END_TASK>
<USER_TASK:>
Description:
def print_status(self):
"""Print out the current tweet rate and reset the counter""" |
tweets = self.received
now = time.time()
diff = now - self.since
self.since = now
self.received = 0
if diff > 0:
logger.info("Receiving tweets at %s tps", tweets / diff) |
<SYSTEM_TASK:>
Start polling for term updates and streaming.
<END_TASK>
<USER_TASK:>
Description:
def start_polling(self, interval):
"""
Start polling for term updates and streaming.
""" |
interval = float(interval)
self.polling = True
# clear the stored list of terms - we aren't tracking any
self.term_checker.reset()
logger.info("Starting polling for changes to the track list")
while self.polling:
loop_start = time()
self.update_stream()
self.handle_exceptions()
# wait for the interval unless interrupted, compensating for time elapsed in the loop
elapsed = time() - loop_start
sleep(max(0.1, interval - elapsed))
logger.warning("Term poll ceased!") |
<SYSTEM_TASK:>
Restarts the stream with the current list of tracking terms.
<END_TASK>
<USER_TASK:>
Description:
def update_stream(self):
"""
Restarts the stream with the current list of tracking terms.
""" |
need_to_restart = False
# If we think we are running, but something has gone wrong in the streaming thread
# Restart it.
if self.stream is not None and not self.stream.running:
logger.warning("Stream exists but isn't running")
self.listener.error = False
self.listener.streaming_exception = None
need_to_restart = True
# Check if the tracking list has changed
if self.term_checker.check():
logger.info("Terms have changed")
need_to_restart = True
# If we aren't running and we are allowing unfiltered streams
if self.stream is None and self.unfiltered:
need_to_restart = True
if not need_to_restart:
return
logger.info("Restarting stream...")
# Stop any old stream
self.stop_stream()
# Start a new stream
self.start_stream() |
<SYSTEM_TASK:>
Starts a stream with teh current tracking terms
<END_TASK>
<USER_TASK:>
Description:
def start_stream(self):
"""Starts a stream with teh current tracking terms""" |
tracking_terms = self.term_checker.tracking_terms()
if len(tracking_terms) > 0 or self.unfiltered:
# we have terms to track, so build a new stream
self.stream = tweepy.Stream(self.auth, self.listener,
stall_warnings=True,
timeout=90,
retry_count=self.retry_count)
if len(tracking_terms) > 0:
logger.info("Starting new twitter stream with %s terms:", len(tracking_terms))
logger.info(" %s", repr(tracking_terms))
# Launch it in a new thread
self.stream.filter(track=tracking_terms, async=True, languages=self.languages)
else:
logger.info("Starting new unfiltered stream")
self.stream.sample(async=True, languages=self.languages) |
<SYSTEM_TASK:>
Stops the current stream. Blocks until this is done.
<END_TASK>
<USER_TASK:>
Description:
def stop_stream(self):
"""
Stops the current stream. Blocks until this is done.
""" |
if self.stream is not None:
# There is a streaming thread
logger.warning("Stopping twitter stream...")
self.stream.disconnect()
self.stream = None
# wait a few seconds to allow the streaming to actually stop
sleep(self.STOP_TIMEOUT) |
<SYSTEM_TASK:>
Apply the local presentation logic to the fetched data.
<END_TASK>
<USER_TASK:>
Description:
def enrich(self, tweet):
""" Apply the local presentation logic to the fetched data.""" |
tweet = urlize_tweet(expand_tweet_urls(tweet))
# parses created_at "Wed Aug 27 13:08:45 +0000 2008"
if settings.USE_TZ:
tweet['datetime'] = datetime.strptime(tweet['created_at'], '%a %b %d %H:%M:%S +0000 %Y').replace(tzinfo=timezone.utc)
else:
tweet['datetime'] = datetime.strptime(tweet['created_at'], '%a %b %d %H:%M:%S +0000 %Y')
return tweet |
<SYSTEM_TASK:>
Replace shortened URLs with long URLs in the twitter status, and add the "RT" flag.
<END_TASK>
<USER_TASK:>
Description:
def expand_tweet_urls(tweet):
""" Replace shortened URLs with long URLs in the twitter status, and add the "RT" flag.
Should be used before urlize_tweet
""" |
if 'retweeted_status' in tweet:
text = 'RT @{user}: {text}'.format(user=tweet['retweeted_status']['user']['screen_name'],
text=tweet['retweeted_status']['text'])
urls = tweet['retweeted_status']['entities']['urls']
else:
text = tweet['text']
urls = tweet['entities']['urls']
for url in urls:
text = text.replace(url['url'], '<a href="%s">%s</a>' % (url['expanded_url'], url['display_url']))
tweet['html'] = text
return tweet |
<SYSTEM_TASK:>
Performs a rabin-miller primality test
<END_TASK>
<USER_TASK:>
Description:
def rabin_miller(p):
"""
Performs a rabin-miller primality test
:param p: Number to test
:return: Bool of whether num is prime
""" |
# From this stackoverflow answer: https://codegolf.stackexchange.com/questions/26739/super-speedy-totient-function
if p < 2:
return False
if p != 2 and p & 1 == 0:
return False
s = p - 1
while s & 1 == 0:
s >>= 1
for x in range(10):
a = random.randrange(p - 1) + 1
temp = s
mod = pow(a, temp, p)
while temp != p - 1 and mod != 1 and mod != p - 1:
mod = (mod * mod) % p
temp = temp * 2
if mod != p - 1 and temp % 2 == 0:
return False
return True |
<SYSTEM_TASK:>
Split a string on a regex that only matches zero-width strings
<END_TASK>
<USER_TASK:>
Description:
def zero_width_split(pattern, string):
"""
Split a string on a regex that only matches zero-width strings
:param pattern: Regex pattern that matches zero-width strings
:param string: String to split on.
:return: Split array
""" |
splits = list((m.start(), m.end()) for m in regex.finditer(pattern, string, regex.VERBOSE))
starts = [0] + [i[1] for i in splits]
ends = [i[0] for i in splits] + [len(string)]
return [string[start:end] for start, end in zip(starts, ends)] |
<SYSTEM_TASK:>
Rolls a group of dice in 2d6, 3d10, d12, etc. format
<END_TASK>
<USER_TASK:>
Description:
def roll_group(group):
"""
Rolls a group of dice in 2d6, 3d10, d12, etc. format
:param group: String of dice group
:return: Array of results
""" |
group = regex.match(r'^(\d*)d(\d+)$', group, regex.IGNORECASE)
num_of_dice = int(group[1]) if group[1] != '' else 1
type_of_dice = int(group[2])
assert num_of_dice > 0
result = []
for i in range(num_of_dice):
result.append(random.randint(1, type_of_dice))
return result |
<SYSTEM_TASK:>
Returns the number of elements in a list that pass a comparison
<END_TASK>
<USER_TASK:>
Description:
def num_equal(result, operator, comparator):
"""
Returns the number of elements in a list that pass a comparison
:param result: The list of results of a dice roll
:param operator: Operator in string to perform comparison on:
Either '+', '-', or '*'
:param comparator: The value to compare
:return:
""" |
if operator == '<':
return len([x for x in result if x < comparator])
elif operator == '>':
return len([x for x in result if x > comparator])
elif operator == '=':
return len([x for x in result if x == comparator])
else:
raise ValueError |
<SYSTEM_TASK:>
Evaluates an expression
<END_TASK>
<USER_TASK:>
Description:
def eval(self, expr):
"""
Evaluates an expression
:param expr: Expression to evaluate
:return: Result of expression
""" |
# set a copy of the expression aside, so we can give nice errors...
self.expr = expr
# and evaluate:
return self._eval(ast.parse(expr.strip()).body[0].value) |
<SYSTEM_TASK:>
Evaluate a numerical node
<END_TASK>
<USER_TASK:>
Description:
def _eval_num(self, node):
"""
Evaluate a numerical node
:param node: Node to eval
:return: Result of node
""" |
if self.floats:
return node.n
else:
return int(node.n) |
<SYSTEM_TASK:>
Rolls dicebag and sets last_roll and last_explanation to roll results
<END_TASK>
<USER_TASK:>
Description:
def roll_dice(self): # Roll dice with current roll
"""
Rolls dicebag and sets last_roll and last_explanation to roll results
:return: Roll results.
""" |
roll = roll_dice(self.roll, floats=self.floats, functions=self.functions)
self._last_roll = roll[0]
self._last_explanation = roll[1]
return self.last_roll, self.last_explanation |
<SYSTEM_TASK:>
Setter for roll, verifies the roll is valid
<END_TASK>
<USER_TASK:>
Description:
def roll(self, value):
"""
Setter for roll, verifies the roll is valid
:param value: Roll
:return: None
""" |
if type(value) != str: # Make sure dice roll is a str
raise TypeError('Dice roll must be a string in dice notation')
try:
roll_dice(value) # Make sure dice roll parses as a valid roll and not an error
except Exception as e:
raise ValueError('Dice roll specified was not a valid diceroll.\n%s\n' % str(e))
else:
self._roll = value |
<SYSTEM_TASK:>
An iterator of valid checks that are in the installed checkers package.
<END_TASK>
<USER_TASK:>
Description:
def checks():
"""
An iterator of valid checks that are in the installed checkers package.
yields check name, check module
""" |
checkers_dir = os.path.dirname(checkers.__file__)
mod_names = [name for _, name, _ in pkgutil.iter_modules([checkers_dir])]
for name in mod_names:
mod = importlib.import_module("checkers.{0}".format(name))
# Does the module have a "run" function
if isinstance(getattr(mod, 'run', None), types.FunctionType):
# has a run method, yield it
yield getattr(mod, 'CHECK_NAME', name), mod |
<SYSTEM_TASK:>
Validate the commit diff.
<END_TASK>
<USER_TASK:>
Description:
def files_to_check(commit_only):
"""
Validate the commit diff.
Make copies of the staged changes for analysis.
""" |
global TEMP_FOLDER
safe_directory = tempfile.mkdtemp()
TEMP_FOLDER = safe_directory
files = get_files(commit_only=commit_only, copy_dest=safe_directory)
try:
yield files
finally:
shutil.rmtree(safe_directory) |
<SYSTEM_TASK:>
Run the configured code checks.
<END_TASK>
<USER_TASK:>
Description:
def main(commit_only=True):
"""
Run the configured code checks.
Return system exit code.
1 - reject commit
0 - accept commit
""" |
global TEMP_FOLDER
exit_code = 0
hook_checks = HookConfig(get_config_file())
with files_to_check(commit_only) as files:
for name, mod in checks():
default = getattr(mod, 'DEFAULT', 'off')
if hook_checks.is_enabled(name, default=default):
if hasattr(mod, 'REQUIRED_FILES'):
for filename in mod.REQUIRED_FILES:
if os.path.isfile(filename):
try:
shutil.copy(filename, TEMP_FOLDER)
except shutil.Error:
# Copied over by a previous check
continue
args = hook_checks.arguments(name)
tmp_files = [os.path.join(TEMP_FOLDER, f) for f in files]
if args:
errors = mod.run(tmp_files, TEMP_FOLDER, args)
else:
errors = mod.run(tmp_files, TEMP_FOLDER)
if errors:
title_print("Checking {0}".format(name))
print((errors.replace(TEMP_FOLDER + "/", '')))
print("")
exit_code = 1
if exit_code == 1:
title_print("Rejecting commit")
return exit_code |
<SYSTEM_TASK:>
Create copies of the given list of files in the destination given.
<END_TASK>
<USER_TASK:>
Description:
def create_fake_copies(files, destination):
"""
Create copies of the given list of files in the destination given.
Creates copies of the actual files to be committed using
git show :<filename>
Return a list of destination files.
""" |
dest_files = []
for filename in files:
leaf_dest_folder = os.path.join(destination, os.path.dirname(filename))
if not os.path.exists(leaf_dest_folder):
os.makedirs(leaf_dest_folder)
dest_file = os.path.join(destination, filename)
bash("git show :{filename} > {dest_file}".format(
filename=filename,
dest_file=dest_file)
)
dest_files.append(os.path.realpath(dest_file))
return dest_files |
<SYSTEM_TASK:>
Check isort errors in the code base.
<END_TASK>
<USER_TASK:>
Description:
def run(files, temp_folder):
"""Check isort errors in the code base.
For the --quiet option, at least isort >= 4.1.1 is required.
https://github.com/timothycrosley/isort/blob/develop/CHANGELOG.md#411
""" |
try:
import isort # NOQA
except ImportError:
return NO_ISORT_MSG
py_files = filter_python_files(files)
# --quiet because isort >= 4.1 outputs its logo in the console by default.
return bash('isort -df --quiet {0}'.format(' '.join(py_files))).value() |
<SYSTEM_TASK:>
Helper function to parse the output file of SDPA.
<END_TASK>
<USER_TASK:>
Description:
def read_sdpa_out(filename, solutionmatrix=False, status=False,
sdp=None):
"""Helper function to parse the output file of SDPA.
:param filename: The name of the SDPA output file.
:type filename: str.
:param solutionmatrix: Optional parameter for retrieving the solution.
:type solutionmatrix: bool.
:param status: Optional parameter for retrieving the status.
:type status: bool.
:param sdp: Optional parameter to add the solution to a
relaxation.
:type sdp: sdp.
:returns: tuple of two floats and optionally two lists of `numpy.array` and
a status string
""" |
primal = None
dual = None
x_mat = None
y_mat = None
status_string = None
with open(filename, 'r') as file_:
for line in file_:
if line.find("objValPrimal") > -1:
primal = float((line.split())[2])
if line.find("objValDual") > -1:
dual = float((line.split())[2])
if solutionmatrix:
if line.find("xMat =") > -1:
x_mat = parse_solution_matrix(file_)
if line.find("yMat =") > -1:
y_mat = parse_solution_matrix(file_)
if line.find("phase.value") > -1:
if line.find("pdOPT") > -1:
status_string = 'optimal'
elif line.find("pFEAS") > -1:
status_string = 'primal feasible'
elif line.find("pdFEAS") > -1:
status_string = 'primal-dual feasible'
elif line.find("dFEAS") > -1:
status_string = 'dual feasible'
elif line.find("INF") > -1:
status_string = 'infeasible'
elif line.find("UNBD") > -1:
status_string = 'unbounded'
else:
status_string = 'unknown'
for var in [primal, dual, status_string]:
if var is None:
status_string = 'invalid'
break
if solutionmatrix:
for var in [x_mat, y_mat]:
if var is None:
status_string = 'invalid'
break
if sdp is not None:
sdp.primal = primal
sdp.dual = dual
sdp.x_mat = x_mat
sdp.y_mat = y_mat
sdp.status = status_string
if solutionmatrix and status:
return primal, dual, x_mat, y_mat, status_string
elif solutionmatrix:
return primal, dual, x_mat, y_mat
elif status:
return primal, dual, status_string
else:
return primal, dual |
<SYSTEM_TASK:>
Helper function to write out the SDP problem to a temporary
<END_TASK>
<USER_TASK:>
Description:
def solve_with_sdpa(sdp, solverparameters=None):
"""Helper function to write out the SDP problem to a temporary
file, call the solver, and parse the output.
:param sdp: The SDP relaxation to be solved.
:type sdp: :class:`ncpol2sdpa.sdp`.
:param solverparameters: Optional parameters to SDPA.
:type solverparameters: dict of str.
:returns: tuple of float and list -- the primal and dual solution of the
SDP, respectively, and a status string.
""" |
solverexecutable = detect_sdpa(solverparameters)
if solverexecutable is None:
raise OSError("SDPA is not in the path or the executable provided is" +
" not correct")
primal, dual = 0, 0
tempfile_ = tempfile.NamedTemporaryFile()
tmp_filename = tempfile_.name
tempfile_.close()
tmp_dats_filename = tmp_filename + ".dat-s"
tmp_out_filename = tmp_filename + ".out"
write_to_sdpa(sdp, tmp_dats_filename)
command_line = [solverexecutable, "-ds", tmp_dats_filename,
"-o", tmp_out_filename]
if solverparameters is not None:
for key, value in list(solverparameters.items()):
if key == "executable":
continue
elif key == "paramsfile":
command_line.extend(["-p", value])
else:
raise ValueError("Unknown parameter for SDPA: " + key)
if sdp.verbose < 1:
with open(os.devnull, "w") as fnull:
call(command_line, stdout=fnull, stderr=fnull)
else:
call(command_line)
primal, dual, x_mat, y_mat, status = read_sdpa_out(tmp_out_filename, True,
True)
if sdp.verbose < 2:
os.remove(tmp_dats_filename)
os.remove(tmp_out_filename)
return primal+sdp.constant_term, \
dual+sdp.constant_term, x_mat, y_mat, status |
<SYSTEM_TASK:>
Convert the SDP relaxation to a human-readable format.
<END_TASK>
<USER_TASK:>
Description:
def convert_to_human_readable(sdp):
"""Convert the SDP relaxation to a human-readable format.
:param sdp: The SDP relaxation to write.
:type sdp: :class:`ncpol2sdpa.sdp`.
:returns: tuple of the objective function in a string and a matrix of
strings as the symbolic representation of the moment matrix
""" |
objective = ""
indices_in_objective = []
for i, tmp in enumerate(sdp.obj_facvar):
candidates = [key for key, v in
sdp.monomial_index.items() if v == i+1]
if len(candidates) > 0:
monomial = convert_monomial_to_string(candidates[0])
else:
monomial = ""
if tmp > 0:
objective += "+"+str(tmp)+monomial
indices_in_objective.append(i)
elif tmp < 0:
objective += str(tmp)+monomial
indices_in_objective.append(i)
matrix_size = 0
cumulative_sum = 0
row_offsets = [0]
block_offset = [0]
for bs in sdp.block_struct:
matrix_size += abs(bs)
cumulative_sum += bs ** 2
row_offsets.append(cumulative_sum)
block_offset.append(matrix_size)
matrix = []
for i in range(matrix_size):
matrix_line = ["0"] * matrix_size
matrix.append(matrix_line)
for row in range(len(sdp.F.rows)):
if len(sdp.F.rows[row]) > 0:
col_index = 0
for k in sdp.F.rows[row]:
value = sdp.F.data[row][col_index]
col_index += 1
block_index, i, j = convert_row_to_sdpa_index(
sdp.block_struct, row_offsets, row)
candidates = [key for key, v in
sdp.monomial_index.items()
if v == k]
if len(candidates) > 0:
monomial = convert_monomial_to_string(candidates[0])
else:
monomial = ""
offset = block_offset[block_index]
if matrix[offset+i][offset+j] == "0":
matrix[offset+i][offset+j] = ("%s%s" % (value, monomial))
else:
if value.real > 0:
matrix[offset+i][offset+j] += ("+%s%s" % (value,
monomial))
else:
matrix[offset+i][offset+j] += ("%s%s" % (value,
monomial))
return objective, matrix |
<SYSTEM_TASK:>
Write the SDP relaxation to a human-readable format.
<END_TASK>
<USER_TASK:>
Description:
def write_to_human_readable(sdp, filename):
"""Write the SDP relaxation to a human-readable format.
:param sdp: The SDP relaxation to write.
:type sdp: :class:`ncpol2sdpa.sdp`.
:param filename: The name of the file.
:type filename: str.
""" |
objective, matrix = convert_to_human_readable(sdp)
f = open(filename, 'w')
f.write("Objective:" + objective + "\n")
for matrix_line in matrix:
f.write(str(list(matrix_line)).replace('[', '').replace(']', '')
.replace('\'', ''))
f.write('\n')
f.close() |
<SYSTEM_TASK:>
Adds bytes to be internal buffer to be read
<END_TASK>
<USER_TASK:>
Description:
def unget_bytes(self, string):
"""Adds bytes to be internal buffer to be read
This method is for reporting bytes from an in_stream read
not initiated by this Input object""" |
self.unprocessed_bytes.extend(string[i:i + 1]
for i in range(len(string))) |
<SYSTEM_TASK:>
Returns tuple of whether stdin is ready to read and an event.
<END_TASK>
<USER_TASK:>
Description:
def _wait_for_read_ready_or_timeout(self, timeout):
"""Returns tuple of whether stdin is ready to read and an event.
If an event is returned, that event is more pressing than reading
bytes on stdin to create a keyboard input event.
If stdin is ready, either there are bytes to read or a SIGTSTP
triggered by dsusp has been received""" |
remaining_timeout = timeout
t0 = time.time()
while True:
try:
(rs, _, _) = select.select(
[self.in_stream.fileno()] + self.readers,
[], [], remaining_timeout)
if not rs:
return False, None
r = rs[0] # if there's more than one, get it in the next loop
if r == self.in_stream.fileno():
return True, None
else:
os.read(r, 1024)
if self.queued_interrupting_events:
return False, self.queued_interrupting_events.pop(0)
elif remaining_timeout is not None:
remaining_timeout = max(0, t0 + timeout - time.time())
continue
else:
continue
except select.error:
if self.sigints:
return False, self.sigints.pop()
if remaining_timeout is not None:
remaining_timeout = max(timeout - (time.time() - t0), 0) |
<SYSTEM_TASK:>
Returns an event or None if no events occur before timeout.
<END_TASK>
<USER_TASK:>
Description:
def send(self, timeout=None):
"""Returns an event or None if no events occur before timeout.""" |
if self.sigint_event and is_main_thread():
with ReplacedSigIntHandler(self.sigint_handler):
return self._send(timeout)
else:
return self._send(timeout) |
<SYSTEM_TASK:>
Returns the number of characters read and adds them to self.unprocessed_bytes
<END_TASK>
<USER_TASK:>
Description:
def _nonblocking_read(self):
"""Returns the number of characters read and adds them to self.unprocessed_bytes""" |
with Nonblocking(self.in_stream):
if PY3:
try:
data = os.read(self.in_stream.fileno(), READ_SIZE)
except BlockingIOError:
return 0
if data:
self.unprocessed_bytes.extend(data[i:i+1] for i in range(len(data)))
return len(data)
else:
return 0
else:
try:
data = os.read(self.in_stream.fileno(), READ_SIZE)
except OSError:
return 0
else:
self.unprocessed_bytes.extend(data)
return len(data) |
<SYSTEM_TASK:>
Returns a callback that creates events.
<END_TASK>
<USER_TASK:>
Description:
def event_trigger(self, event_type):
"""Returns a callback that creates events.
Returned callback function will add an event of type event_type
to a queue which will be checked the next time an event is requested.""" |
def callback(**kwargs):
self.queued_events.append(event_type(**kwargs))
return callback |
<SYSTEM_TASK:>
Returns a callback that schedules events for the future.
<END_TASK>
<USER_TASK:>
Description:
def scheduled_event_trigger(self, event_type):
"""Returns a callback that schedules events for the future.
Returned callback function will add an event of type event_type
to a queue which will be checked the next time an event is requested.""" |
def callback(when, **kwargs):
self.queued_scheduled_events.append((when, event_type(when=when, **kwargs)))
return callback |
<SYSTEM_TASK:>
Returns a callback to creates events, interrupting current event requests.
<END_TASK>
<USER_TASK:>
Description:
def threadsafe_event_trigger(self, event_type):
"""Returns a callback to creates events, interrupting current event requests.
Returned callback function will create an event of type event_type
which will interrupt an event request if one
is concurrently occuring, otherwise adding the event to a queue
that will be checked on the next event request.""" |
readfd, writefd = os.pipe()
self.readers.append(readfd)
def callback(**kwargs):
self.queued_interrupting_events.append(event_type(**kwargs)) #TODO use a threadsafe queue for this
logger.warning('added event to events list %r', self.queued_interrupting_events)
os.write(writefd, b'interrupting event!')
return callback |
<SYSTEM_TASK:>
Call a solver on the SDP relaxation. Upon successful solution, it
<END_TASK>
<USER_TASK:>
Description:
def solve_sdp(sdp, solver=None, solverparameters=None):
"""Call a solver on the SDP relaxation. Upon successful solution, it
returns the primal and dual objective values along with the solution
matrices.
:param sdpRelaxation: The SDP relaxation to be solved.
:type sdpRelaxation: :class:`ncpol2sdpa.SdpRelaxation`.
:param solver: The solver to be called, either `None`, "sdpa", "mosek",
"cvxpy", "scs", or "cvxopt". The default is `None`,
which triggers autodetect.
:type solver: str.
:param solverparameters: Parameters to be passed to the solver. Actual
options depend on the solver:
SDPA:
- `"executable"`:
Specify the executable for SDPA. E.g.,
`"executable":"/usr/local/bin/sdpa"`, or
`"executable":"sdpa_gmp"`
- `"paramsfile"`: Specify the parameter file
Mosek:
Refer to the Mosek documentation. All
arguments are passed on.
Cvxopt:
Refer to the PICOS documentation. All
arguments are passed on.
Cvxpy:
Refer to the Cvxpy documentation. All
arguments are passed on.
SCS:
Refer to the Cvxpy documentation. All
arguments are passed on.
:type solverparameters: dict of str.
:returns: tuple of the primal and dual optimum, and the solutions for the
primal and dual.
:rtype: (float, float, list of `numpy.array`, list of `numpy.array`)
""" |
solvers = autodetect_solvers(solverparameters)
solver = solver.lower() if solver is not None else solver
if solvers == []:
raise Exception("Could not find any SDP solver. Please install SDPA," +
" Mosek, Cvxpy, or Picos with Cvxopt")
elif solver is not None and solver not in solvers:
print("Available solvers: " + str(solvers))
if solver == "cvxopt":
try:
import cvxopt
except ImportError:
pass
else:
raise Exception("Cvxopt is detected, but Picos is not. "
"Please install Picos to use Cvxopt")
raise Exception("Could not detect requested " + solver)
elif solver is None:
solver = solvers[0]
primal, dual, x_mat, y_mat, status = None, None, None, None, None
tstart = time.time()
if solver == "sdpa":
primal, dual, x_mat, y_mat, status = \
solve_with_sdpa(sdp, solverparameters)
elif solver == "cvxpy":
primal, dual, x_mat, y_mat, status = \
solve_with_cvxpy(sdp, solverparameters)
elif solver == "scs":
if solverparameters is None:
solverparameters_ = {"solver": "SCS"}
else:
solverparameters_ = solverparameters.copy()
solverparameters_["solver"] = "SCS"
primal, dual, x_mat, y_mat, status = \
solve_with_cvxpy(sdp, solverparameters_)
elif solver == "mosek":
primal, dual, x_mat, y_mat, status = \
solve_with_mosek(sdp, solverparameters)
elif solver == "cvxopt":
primal, dual, x_mat, y_mat, status = \
solve_with_cvxopt(sdp, solverparameters)
# We have to compensate for the equality constraints
for constraint in sdp.constraints[sdp._n_inequalities:]:
idx = sdp._constraint_to_block_index[constraint]
sdp._constraint_to_block_index[constraint] = (idx[0],)
else:
raise Exception("Unkown solver: " + solver)
sdp.solution_time = time.time() - tstart
sdp.primal = primal
sdp.dual = dual
sdp.x_mat = x_mat
sdp.y_mat = y_mat
sdp.status = status
return primal, dual, x_mat, y_mat |
<SYSTEM_TASK:>
Given a solution of the dual problem, it returns the SOS
<END_TASK>
<USER_TASK:>
Description:
def get_sos_decomposition(sdp, y_mat=None, threshold=0.0):
"""Given a solution of the dual problem, it returns the SOS
decomposition.
:param sdp: The SDP relaxation to be solved.
:type sdp: :class:`ncpol2sdpa.sdp`.
:param y_mat: Optional parameter providing the dual solution of the
moment matrix. If not provided, the solution is extracted
from the sdp object.
:type y_mat: :class:`numpy.array`.
:param threshold: Optional parameter for specifying the threshold value
below which the eigenvalues and entries of the
eigenvectors are disregarded.
:type threshold: float.
:returns: The SOS decomposition of [sigma_0, sigma_1, ..., sigma_m]
:rtype: list of :class:`sympy.core.exp.Expr`.
""" |
if len(sdp.monomial_sets) != 1:
raise Exception("Cannot automatically match primal and dual " +
"variables.")
elif len(sdp.y_mat[1:]) != len(sdp.constraints):
raise Exception("Cannot automatically match constraints with blocks " +
"in the dual solution.")
elif sdp.status == "unsolved" and y_mat is None:
raise Exception("The SDP relaxation is unsolved and dual solution " +
"is not provided!")
elif sdp.status != "unsolved" and y_mat is None:
y_mat = sdp.y_mat
sos = []
for y_mat_block in y_mat:
term = 0
vals, vecs = np.linalg.eigh(y_mat_block)
for j, val in enumerate(vals):
if val < -0.001:
raise Exception("Large negative eigenvalue: " + val +
". Matrix cannot be positive.")
elif val > 0:
sub_term = 0
for i, entry in enumerate(vecs[:, j]):
sub_term += entry * sdp.monomial_sets[0][i]
term += val * sub_term**2
term = expand(term)
new_term = 0
if term.is_Mul:
elements = [term]
else:
elements = term.as_coeff_mul()[1][0].as_coeff_add()[1]
for element in elements:
_, coeff = separate_scalar_factor(element)
if abs(coeff) > threshold:
new_term += element
sos.append(new_term)
return sos |
<SYSTEM_TASK:>
Given a solution of the dual problem and a monomial, it returns the
<END_TASK>
<USER_TASK:>
Description:
def extract_dual_value(sdp, monomial, blocks=None):
"""Given a solution of the dual problem and a monomial, it returns the
inner product of the corresponding coefficient matrix and the dual
solution. It can be restricted to certain blocks.
:param sdp: The SDP relaxation.
:type sdp: :class:`ncpol2sdpa.sdp`.
:param monomial: The monomial for which the value is requested.
:type monomial: :class:`sympy.core.exp.Expr`.
:param monomial: The monomial for which the value is requested.
:type monomial: :class:`sympy.core.exp.Expr`.
:param blocks: Optional parameter to specify the blocks to be included.
:type blocks: list of `int`.
:returns: The value of the monomial in the solved relaxation.
:rtype: float.
""" |
if sdp.status == "unsolved":
raise Exception("The SDP relaxation is unsolved!")
if blocks is None:
blocks = [i for i, _ in enumerate(sdp.block_struct)]
if is_number_type(monomial):
index = 0
else:
index = sdp.monomial_index[monomial]
row_offsets = [0]
cumulative_sum = 0
for block_size in sdp.block_struct:
cumulative_sum += block_size ** 2
row_offsets.append(cumulative_sum)
result = 0
for row in range(len(sdp.F.rows)):
if len(sdp.F.rows[row]) > 0:
col_index = 0
for k in sdp.F.rows[row]:
if k != index:
continue
value = sdp.F.data[row][col_index]
col_index += 1
block_index, i, j = convert_row_to_sdpa_index(
sdp.block_struct, row_offsets, row)
if block_index in blocks:
result += -value*sdp.y_mat[block_index][i][j]
return result |
<SYSTEM_TASK:>
load_module is always called with the same argument as finder's
<END_TASK>
<USER_TASK:>
Description:
def load_module(self, fullname):
"""
load_module is always called with the same argument as finder's
find_module, see "How Import Works"
""" |
mod = super(JsonLoader, self).load_module(fullname)
try:
with codecs.open(self.cfg_file, 'r', 'utf-8') as f:
mod.__dict__.update(json.load(f))
except ValueError:
# if raise here, traceback will contain ValueError
self.e = "ValueError"
self.err_msg = sys.exc_info()[1]
if self.e == "ValueError":
err_msg = "\nJson file not valid: "
err_msg += self.cfg_file + '\n'
err_msg += str(self.err_msg)
raise InvalidJsonError(err_msg)
return mod |
<SYSTEM_TASK:>
Returns a message to be displayed if game is over, else None
<END_TASK>
<USER_TASK:>
Description:
def tick(self):
"""Returns a message to be displayed if game is over, else None""" |
for npc in self.npcs:
self.move_entity(npc, *npc.towards(self.player))
for entity1, entity2 in itertools.combinations(self.entities, 2):
if (entity1.x, entity1.y) == (entity2.x, entity2.y):
if self.player in (entity1, entity2):
return 'you lost on turn %d' % self.turn
entity1.die()
entity2.die()
if all(npc.speed == 0 for npc in self.npcs):
return 'you won on turn %d' % self.turn
self.turn += 1
if self.turn % 20 == 0:
self.player.speed = max(1, self.player.speed - 1)
self.player.display = on_blue(green(bold(unicode_str(self.player.speed)))) |
<SYSTEM_TASK:>
Returns a FSArray of the size of the window containing msg
<END_TASK>
<USER_TASK:>
Description:
def array_from_text(self, msg):
"""Returns a FSArray of the size of the window containing msg""" |
rows, columns = self.t.height, self.t.width
return self.array_from_text_rc(msg, rows, columns) |
<SYSTEM_TASK:>
Returns the how far down the cursor moved since last render.
<END_TASK>
<USER_TASK:>
Description:
def get_cursor_vertical_diff(self):
"""Returns the how far down the cursor moved since last render.
Note:
If another get_cursor_vertical_diff call is already in progress,
immediately returns zero. (This situation is likely if
get_cursor_vertical_diff is called from a SIGWINCH signal
handler, since sigwinches can happen in rapid succession and
terminal emulators seem not to respond to cursor position
queries before the next sigwinch occurs.)
""" |
# Probably called by a SIGWINCH handler, and therefore
# will do cursor querying until a SIGWINCH doesn't happen during
# the query. Calls to the function from a signal handler COULD STILL
# HAPPEN out of order -
# they just can't interrupt the actual cursor query.
if self.in_get_cursor_diff:
self.another_sigwinch = True
return 0
cursor_dy = 0
while True:
self.in_get_cursor_diff = True
self.another_sigwinch = False
cursor_dy += self._get_cursor_vertical_diff_once()
self.in_get_cursor_diff = False
if not self.another_sigwinch:
return cursor_dy |
<SYSTEM_TASK:>
Returns the how far down the cursor moved.
<END_TASK>
<USER_TASK:>
Description:
def _get_cursor_vertical_diff_once(self):
"""Returns the how far down the cursor moved.""" |
old_top_usable_row = self.top_usable_row
row, col = self.get_cursor_position()
if self._last_cursor_row is None:
cursor_dy = 0
else:
cursor_dy = row - self._last_cursor_row
logger.info('cursor moved %d lines down' % cursor_dy)
while self.top_usable_row > -1 and cursor_dy > 0:
self.top_usable_row += 1
cursor_dy -= 1
while self.top_usable_row > 1 and cursor_dy < 0:
self.top_usable_row -= 1
cursor_dy += 1
logger.info('top usable row changed from %d to %d', old_top_usable_row,
self.top_usable_row)
logger.info('returning cursor dy of %d from curtsies' % cursor_dy)
self._last_cursor_row = row
return cursor_dy |
<SYSTEM_TASK:>
Renders array to terminal, returns the number of lines scrolled offscreen
<END_TASK>
<USER_TASK:>
Description:
def render_to_terminal(self, array, cursor_pos=(0, 0)):
"""Renders array to terminal, returns the number of lines scrolled offscreen
Returns:
Number of times scrolled
Args:
array (FSArray): Grid of styled characters to be rendered.
If array received is of width too small, render it anyway
if array received is of width too large, render it anyway
if array received is of height too small, render it anyway
if array received is of height too large, render it, scroll down,
and render the rest of it, then return how much we scrolled down
""" |
for_stdout = self.fmtstr_to_stdout_xform()
# caching of write and tc (avoiding the self. lookups etc) made
# no significant performance difference here
if not self.hide_cursor:
self.write(self.t.hide_cursor)
# TODO race condition here?
height, width = self.t.height, self.t.width
if (height != self._last_rendered_height or
width != self._last_rendered_width):
self.on_terminal_size_change(height, width)
current_lines_by_row = {}
rows_for_use = list(range(self.top_usable_row, height))
# rows which we have content for and don't require scrolling
# TODO rename shared
shared = min(len(array), len(rows_for_use))
for row, line in zip(rows_for_use[:shared], array[:shared]):
current_lines_by_row[row] = line
if line == self._last_lines_by_row.get(row, None):
continue
self.write(self.t.move(row, 0))
self.write(for_stdout(line))
if len(line) < width:
self.write(self.t.clear_eol)
# rows already on screen that we don't have content for
rest_of_lines = array[shared:]
rest_of_rows = rows_for_use[shared:]
for row in rest_of_rows: # if array too small
if self._last_lines_by_row and row not in self._last_lines_by_row:
continue
self.write(self.t.move(row, 0))
self.write(self.t.clear_eol)
# TODO probably not necessary - is first char cleared?
self.write(self.t.clear_bol)
current_lines_by_row[row] = None
# lines for which we need to scroll down to render
offscreen_scrolls = 0
for line in rest_of_lines: # if array too big
self.scroll_down()
if self.top_usable_row > 0:
self.top_usable_row -= 1
else:
offscreen_scrolls += 1
current_lines_by_row = dict(
(k - 1, v) for k, v in current_lines_by_row.items()
)
logger.debug('new top_usable_row: %d' % self.top_usable_row)
# since scrolling moves the cursor
self.write(self.t.move(height - 1, 0))
self.write(for_stdout(line))
current_lines_by_row[height - 1] = line
logger.debug(
'lines in last lines by row: %r' % self._last_lines_by_row.keys()
)
logger.debug(
'lines in current lines by row: %r' % current_lines_by_row.keys()
)
self._last_cursor_row = max(
0, cursor_pos[0] - offscreen_scrolls + self.top_usable_row
)
self._last_cursor_column = cursor_pos[1]
self.write(
self.t.move(self._last_cursor_row, self._last_cursor_column)
)
self._last_lines_by_row = current_lines_by_row
if not self.hide_cursor:
self.write(self.t.normal_cursor)
return offscreen_scrolls |
<SYSTEM_TASK:>
Call a solver on the SDP relaxation. Upon successful solution, it
<END_TASK>
<USER_TASK:>
Description:
def solve(self, solver=None, solverparameters=None):
"""Call a solver on the SDP relaxation. Upon successful solution, it
returns the primal and dual objective values along with the solution
matrices. It also sets these values in the `sdpRelaxation` object,
along with some status information.
:param sdpRelaxation: The SDP relaxation to be solved.
:type sdpRelaxation: :class:`ncpol2sdpa.SdpRelaxation`.
:param solver: The solver to be called, either `None`, "sdpa", "mosek",
"cvxpy", "scs", or "cvxopt". The default is `None`,
which triggers autodetect.
:type solver: str.
:param solverparameters: Parameters to be passed to the solver. Actual
options depend on the solver:
SDPA:
- `"executable"`:
Specify the executable for SDPA. E.g.,
`"executable":"/usr/local/bin/sdpa"`, or
`"executable":"sdpa_gmp"`
- `"paramsfile"`: Specify the parameter file
Mosek:
Refer to the Mosek documentation. All
arguments are passed on.
Cvxopt:
Refer to the PICOS documentation. All
arguments are passed on.
Cvxpy:
Refer to the Cvxpy documentation. All
arguments are passed on.
SCS:
Refer to the Cvxpy documentation. All
arguments are passed on.
:type solverparameters: dict of str.
""" |
if self.F is None:
raise Exception("Relaxation is not generated yet. Call "
"'SdpRelaxation.get_relaxation' first")
solve_sdp(self, solver, solverparameters) |
<SYSTEM_TASK:>
Calculate the sparse vector representation of a polynomial
<END_TASK>
<USER_TASK:>
Description:
def __push_facvar_sparse(self, polynomial, block_index, row_offset, i, j):
"""Calculate the sparse vector representation of a polynomial
and pushes it to the F structure.
""" |
width = self.block_struct[block_index - 1]
# Preprocess the polynomial for uniform handling later
# DO NOT EXPAND THE POLYNOMIAL HERE!!!!!!!!!!!!!!!!!!!
# The simplify_polynomial bypasses the problem.
# Simplifying here will trigger a bug in SymPy related to
# the powers of daggered variables.
# polynomial = polynomial.expand()
if is_number_type(polynomial) or polynomial.is_Mul:
elements = [polynomial]
else:
elements = polynomial.as_coeff_mul()[1][0].as_coeff_add()[1]
# Identify its constituent monomials
for element in elements:
results = self._get_index_of_monomial(element)
# k identifies the mapped value of a word (monomial) w
for (k, coeff) in results:
if k > -1 and coeff != 0:
self.F[row_offset + i * width + j, k] += coeff |
<SYSTEM_TASK:>
Attempt to remove equalities by solving the linear equations.
<END_TASK>
<USER_TASK:>
Description:
def __remove_equalities(self, equalities, momentequalities):
"""Attempt to remove equalities by solving the linear equations.
""" |
A = self.__process_equalities(equalities, momentequalities)
if min(A.shape != np.linalg.matrix_rank(A)):
print("Warning: equality constraints are linearly dependent! "
"Results might be incorrect.", file=sys.stderr)
if A.shape[0] == 0:
return
c = np.array(self.obj_facvar)
if self.verbose > 0:
print("QR decomposition...")
Q, R = np.linalg.qr(A[:, 1:].T, mode='complete')
n = np.max(np.nonzero(np.sum(np.abs(R), axis=1) > 0)) + 1
x = np.dot(Q[:, :n], np.linalg.solve(np.transpose(R[:n, :]), -A[:, 0]))
self._new_basis = lil_matrix(Q[:, n:])
# Transforming the objective function
self._original_obj_facvar = self.obj_facvar
self._original_constant_term = self.constant_term
self.obj_facvar = self._new_basis.T.dot(c)
self.constant_term += c.dot(x)
x = np.append(1, x)
# Transforming the moment matrix and localizing matrices
new_F = lil_matrix((self.F.shape[0], self._new_basis.shape[1] + 1))
new_F[:, 0] = self.F[:, :self.n_vars+1].dot(x).reshape((new_F.shape[0],
1))
new_F[:, 1:] = self.F[:, 1:self.n_vars+1].\
dot(self._new_basis)
self._original_F = self.F
self.F = new_F
self.n_vars = self._new_basis.shape[1]
if self.verbose > 0:
print("Number of variables after solving the linear equations: %d"
% self.n_vars) |
<SYSTEM_TASK:>
Process the constraints and generate localizing matrices. Useful
<END_TASK>
<USER_TASK:>
Description:
def process_constraints(self, inequalities=None, equalities=None,
momentinequalities=None, momentequalities=None,
block_index=0, removeequalities=False):
"""Process the constraints and generate localizing matrices. Useful
only if the moment matrix already exists. Call it if you want to
replace your constraints. The number of the respective types of
constraints and the maximum degree of each constraint must remain the
same.
:param inequalities: Optional parameter to list inequality constraints.
:type inequalities: list of :class:`sympy.core.exp.Expr`.
:param equalities: Optional parameter to list equality constraints.
:type equalities: list of :class:`sympy.core.exp.Expr`.
:param momentinequalities: Optional parameter of inequalities defined
on moments.
:type momentinequalities: list of :class:`sympy.core.exp.Expr`.
:param momentequalities: Optional parameter of equalities defined
on moments.
:type momentequalities: list of :class:`sympy.core.exp.Expr`.
:param removeequalities: Optional parameter to attempt removing the
equalities by solving the linear equations.
:param removeequalities: Optional parameter to attempt removing the
equalities by solving the linear equations.
:type removeequalities: bool.
""" |
self.status = "unsolved"
if block_index == 0:
if self._original_F is not None:
self.F = self._original_F
self.obj_facvar = self._original_obj_facvar
self.constant_term = self._original_constant_term
self.n_vars = len(self.obj_facvar)
self._new_basis = None
block_index = self.constraint_starting_block
self.__wipe_F_from_constraints()
self.constraints = flatten([inequalities])
self._constraint_to_block_index = {}
for constraint in self.constraints:
self._constraint_to_block_index[constraint] = (block_index, )
block_index += 1
if momentinequalities is not None:
for mineq in momentinequalities:
self.constraints.append(mineq)
self._constraint_to_block_index[mineq] = (block_index, )
block_index += 1
if not (removeequalities or equalities is None):
# Equalities are converted to pairs of inequalities
for k, equality in enumerate(equalities):
if equality.is_Relational:
equality = convert_relational(equality)
self.constraints.append(equality)
self.constraints.append(-equality)
ln = len(self.localizing_monomial_sets[block_index-
self.constraint_starting_block])
self._constraint_to_block_index[equality] = (block_index,
block_index+ln*(ln+1)//2)
block_index += ln*(ln+1)
if momentequalities is not None and not removeequalities:
for meq in momentequalities:
self.constraints += [meq, flip_sign(meq)]
self._constraint_to_block_index[meq] = (block_index,
block_index+1)
block_index += 2
block_index = self.constraint_starting_block
self.__process_inequalities(block_index)
if removeequalities:
self.__remove_equalities(equalities, momentequalities) |
<SYSTEM_TASK:>
Given a solution of the dual problem and a constraint of any type,
<END_TASK>
<USER_TASK:>
Description:
def get_dual(self, constraint, ymat=None):
"""Given a solution of the dual problem and a constraint of any type,
it returns the corresponding block in the dual solution. If it is an
equality constraint that was converted to a pair of inequalities, it
returns a two-tuple of the matching dual blocks.
:param constraint: The constraint.
:type index: `sympy.core.exp.Expr`
:param y_mat: Optional parameter providing the dual solution of the
SDP. If not provided, the solution is extracted
from the sdpRelaxation object.
:type y_mat: :class:`numpy.array`.
:returns: The corresponding block in the dual solution.
:rtype: :class:`numpy.array` or a tuple thereof.
""" |
if not isinstance(constraint, Expr):
raise Exception("Not a monomial or polynomial!")
elif self.status == "unsolved" and ymat is None:
raise Exception("SDP relaxation is not solved yet!")
elif ymat is None:
ymat = self.y_mat
index = self._constraint_to_block_index.get(constraint)
if index is None:
raise Exception("Constraint is not in the dual!")
if len(index) == 2:
return ymat[index[0]], self.y_mat[index[1]]
else:
return ymat[index[0]] |
<SYSTEM_TASK:>
Get the SDP relaxation of a noncommutative polynomial optimization
<END_TASK>
<USER_TASK:>
Description:
def get_relaxation(self, level, objective=None, inequalities=None,
equalities=None, substitutions=None,
momentinequalities=None, momentequalities=None,
momentsubstitutions=None,
removeequalities=False, extramonomials=None,
extramomentmatrices=None, extraobjexpr=None,
localizing_monomials=None, chordal_extension=False):
"""Get the SDP relaxation of a noncommutative polynomial optimization
problem.
:param level: The level of the relaxation. The value -1 will skip
automatic monomial generation and use only the monomials
supplied by the option `extramonomials`.
:type level: int.
:param obj: Optional parameter to describe the objective function.
:type obj: :class:`sympy.core.exp.Expr`.
:param inequalities: Optional parameter to list inequality constraints.
:type inequalities: list of :class:`sympy.core.exp.Expr`.
:param equalities: Optional parameter to list equality constraints.
:type equalities: list of :class:`sympy.core.exp.Expr`.
:param substitutions: Optional parameter containing monomials that can
be replaced (e.g., idempotent variables).
:type substitutions: dict of :class:`sympy.core.exp.Expr`.
:param momentinequalities: Optional parameter of inequalities defined
on moments.
:type momentinequalities: list of :class:`sympy.core.exp.Expr`.
:param momentequalities: Optional parameter of equalities defined
on moments.
:type momentequalities: list of :class:`sympy.core.exp.Expr`.
:param momentsubstitutions: Optional parameter containing moments that
can be replaced.
:type momentsubstitutions: dict of :class:`sympy.core.exp.Expr`.
:param removeequalities: Optional parameter to attempt removing the
equalities by solving the linear equations.
:type removeequalities: bool.
:param extramonomials: Optional paramter of monomials to be included,
on top of the requested level of relaxation.
:type extramonomials: list of :class:`sympy.core.exp.Expr`.
:param extramomentmatrices: Optional paramter of duplicating or adding
moment matrices. A new moment matrix can be
unconstrained (""), a copy of the first one
("copy"), and satisfying a partial positivity
constraint ("ppt"). Each new moment matrix is
requested as a list of string of these options.
For instance, adding a single new moment matrix
as a copy of the first would be
``extramomentmatrices=[["copy"]]``.
:type extramomentmatrices: list of list of str.
:param extraobjexpr: Optional parameter of a string expression of a
linear combination of moment matrix elements to be
included in the objective function.
:type extraobjexpr: str.
:param localizing_monomials: Optional parameter to specify sets of
localizing monomials for each constraint.
The internal order of constraints is
inequalities first, followed by the
equalities. If the parameter is specified,
but for a certain constraint the automatic
localization is requested, leave None in
its place in this parameter.
:type localizing_monomials: list of list of `sympy.core.exp.Expr`.
:param chordal_extension: Optional parameter to request a sparse
chordal extension.
:type chordal_extension: bool.
""" |
if self.level < -1:
raise Exception("Invalid level of relaxation")
self.level = level
if substitutions is None:
self.substitutions = {}
else:
self.substitutions = substitutions
for lhs, rhs in substitutions.items():
if not is_pure_substitution_rule(lhs, rhs):
self.pure_substitution_rules = False
if iscomplex(lhs) or iscomplex(rhs):
self.complex_matrix = True
if momentsubstitutions is not None:
self.moment_substitutions = momentsubstitutions.copy()
# If we have a real-valued problem, the moment matrix is symmetric
# and moment substitutions also apply to the conjugate monomials
if not self.complex_matrix:
for key, val in self.moment_substitutions.copy().items():
adjoint_monomial = apply_substitutions(key.adjoint(),
self.substitutions)
self.moment_substitutions[adjoint_monomial] = val
if chordal_extension:
self.variables = find_variable_cliques(self.variables, objective,
inequalities, equalities,
momentinequalities,
momentequalities)
self.__generate_monomial_sets(extramonomials)
self.localizing_monomial_sets = localizing_monomials
# Figure out basic structure of the SDP
self._calculate_block_structure(inequalities, equalities,
momentinequalities, momentequalities,
extramomentmatrices,
removeequalities)
self._estimate_n_vars()
if extramomentmatrices is not None:
for parameters in extramomentmatrices:
copy = False
for parameter in parameters:
if parameter == "copy":
copy = True
if copy:
self.n_vars += self.n_vars + 1
else:
self.n_vars += (self.block_struct[0]**2)/2
if self.complex_matrix:
dtype = np.complex128
else:
dtype = np.float64
self.F = lil_matrix((sum([bs**2 for bs in self.block_struct]),
self.n_vars + 1), dtype=dtype)
if self.verbose > 0:
print(('Estimated number of SDP variables: %d' % self.n_vars))
print('Generating moment matrix...')
# Generate moment matrices
new_n_vars, block_index = self.__add_parameters()
self._time0 = time.time()
new_n_vars, block_index = \
self._generate_all_moment_matrix_blocks(new_n_vars, block_index)
if extramomentmatrices is not None:
new_n_vars, block_index = \
self.__add_extra_momentmatrices(extramomentmatrices,
new_n_vars, block_index)
# The initial estimate for the size of F was overly generous.
self.n_vars = new_n_vars
# We don't correct the size of F, because that would trigger
# memory copies, and extra columns in lil_matrix are free anyway.
# self.F = self.F[:, 0:self.n_vars + 1]
if self.verbose > 0:
print(('Reduced number of SDP variables: %d' % self.n_vars))
# Objective function
self.set_objective(objective, extraobjexpr)
# Process constraints
self.constraint_starting_block = block_index
self.process_constraints(inequalities, equalities, momentinequalities,
momentequalities, block_index,
removeequalities) |
<SYSTEM_TASK:>
Flatten a list of lists to a list.
<END_TASK>
<USER_TASK:>
Description:
def flatten(lol):
"""Flatten a list of lists to a list.
:param lol: A list of lists in arbitrary depth.
:type lol: list of list.
:returns: flat list of elements.
""" |
new_list = []
for element in lol:
if element is None:
continue
elif not isinstance(element, list) and not isinstance(element, tuple):
new_list.append(element)
elif len(element) > 0:
new_list.extend(flatten(element))
return new_list |
<SYSTEM_TASK:>
Simplify a polynomial for uniform handling later.
<END_TASK>
<USER_TASK:>
Description:
def simplify_polynomial(polynomial, monomial_substitutions):
"""Simplify a polynomial for uniform handling later.
""" |
if isinstance(polynomial, (int, float, complex)):
return polynomial
polynomial = (1.0 * polynomial).expand(mul=True,
multinomial=True)
if is_number_type(polynomial):
return polynomial
if polynomial.is_Mul:
elements = [polynomial]
else:
elements = polynomial.as_coeff_mul()[1][0].as_coeff_add()[1]
new_polynomial = 0
# Identify its constituent monomials
for element in elements:
monomial, coeff = separate_scalar_factor(element)
monomial = apply_substitutions(monomial, monomial_substitutions)
new_polynomial += coeff * monomial
return new_polynomial |
<SYSTEM_TASK:>
Separate the constant factor from a monomial.
<END_TASK>
<USER_TASK:>
Description:
def __separate_scalar_factor(monomial):
"""Separate the constant factor from a monomial.
""" |
scalar_factor = 1
if is_number_type(monomial):
return S.One, monomial
if monomial == 0:
return S.One, 0
comm_factors, _ = split_commutative_parts(monomial)
if len(comm_factors) > 0:
if isinstance(comm_factors[0], Number):
scalar_factor = comm_factors[0]
if scalar_factor != 1:
return monomial / scalar_factor, scalar_factor
else:
return monomial, scalar_factor |
<SYSTEM_TASK:>
Construct a monomial with the coefficient separated
<END_TASK>
<USER_TASK:>
Description:
def separate_scalar_factor(element):
"""Construct a monomial with the coefficient separated
from an element in a polynomial.
""" |
coeff = 1.0
monomial = S.One
if isinstance(element, (int, float, complex)):
coeff *= element
return monomial, coeff
for var in element.as_coeff_mul()[1]:
if not (var.is_Number or var.is_imaginary):
monomial = monomial * var
else:
if var.is_Number:
coeff = float(var)
# If not, then it is imaginary
else:
coeff = 1j * coeff
coeff = float(element.as_coeff_mul()[0]) * coeff
return monomial, coeff |
<SYSTEM_TASK:>
Given a list of monomials, it counts those that have a certain degree,
<END_TASK>
<USER_TASK:>
Description:
def count_ncmonomials(monomials, degree):
"""Given a list of monomials, it counts those that have a certain degree,
or less. The function is useful when certain monomials were eliminated
from the basis.
:param variables: The noncommutative variables making up the monomials
:param monomials: List of monomials (the monomial basis).
:param degree: Maximum degree to count.
:returns: The count of appropriate monomials.
""" |
ncmoncount = 0
for monomial in monomials:
if ncdegree(monomial) <= degree:
ncmoncount += 1
else:
break
return ncmoncount |
<SYSTEM_TASK:>
Helper function to remove monomials from the basis.
<END_TASK>
<USER_TASK:>
Description:
def apply_substitutions(monomial, monomial_substitutions, pure=False):
"""Helper function to remove monomials from the basis.""" |
if is_number_type(monomial):
return monomial
original_monomial = monomial
changed = True
if not pure:
substitutions = monomial_substitutions
else:
substitutions = {}
for lhs, rhs in monomial_substitutions.items():
irrelevant = False
for atom in lhs.atoms():
if atom.is_Number:
continue
if not monomial.has(atom):
irrelevant = True
break
if not irrelevant:
substitutions[lhs] = rhs
while changed:
for lhs, rhs in substitutions.items():
monomial = fast_substitute(monomial, lhs, rhs)
if original_monomial == monomial:
changed = False
original_monomial = monomial
return monomial |
<SYSTEM_TASK:>
Generates a number of commutative or noncommutative variables
<END_TASK>
<USER_TASK:>
Description:
def generate_variables(name, n_vars=1, hermitian=None, commutative=True):
"""Generates a number of commutative or noncommutative variables
:param name: The prefix in the symbolic representation of the noncommuting
variables. This will be suffixed by a number from 0 to
n_vars-1 if n_vars > 1.
:type name: str.
:param n_vars: The number of variables.
:type n_vars: int.
:param hermitian: Optional parameter to request Hermitian variables .
:type hermitian: bool.
:param commutative: Optional parameter to request commutative variables.
Commutative variables are Hermitian by default.
:type commutative: bool.
:returns: list of :class:`sympy.physics.quantum.operator.Operator` or
:class:`sympy.physics.quantum.operator.HermitianOperator`
variables or `sympy.Symbol`
:Example:
>>> generate_variables('y', 2, commutative=True)
[y0, y1]
""" |
variables = []
for i in range(n_vars):
if n_vars > 1:
var_name = '%s%s' % (name, i)
else:
var_name = '%s' % name
if commutative:
if hermitian is None or hermitian:
variables.append(Symbol(var_name, real=True))
else:
variables.append(Symbol(var_name, complex=True))
elif hermitian is not None and hermitian:
variables.append(HermitianOperator(var_name))
else:
variables.append(Operator(var_name))
return variables |
<SYSTEM_TASK:>
Generates a number of commutative or noncommutative operators
<END_TASK>
<USER_TASK:>
Description:
def generate_operators(name, n_vars=1, hermitian=None, commutative=False):
"""Generates a number of commutative or noncommutative operators
:param name: The prefix in the symbolic representation of the noncommuting
variables. This will be suffixed by a number from 0 to
n_vars-1 if n_vars > 1.
:type name: str.
:param n_vars: The number of variables.
:type n_vars: int.
:param hermitian: Optional parameter to request Hermitian variables .
:type hermitian: bool.
:param commutative: Optional parameter to request commutative variables.
Commutative variables are Hermitian by default.
:type commutative: bool.
:returns: list of :class:`sympy.physics.quantum.operator.Operator` or
:class:`sympy.physics.quantum.operator.HermitianOperator`
variables
:Example:
>>> generate_variables('y', 2, commutative=True)
[y0, y1]
""" |
variables = []
for i in range(n_vars):
if n_vars > 1:
var_name = '%s%s' % (name, i)
else:
var_name = '%s' % name
if hermitian is not None and hermitian:
variables.append(HermitianOperator(var_name))
else:
variables.append(Operator(var_name))
variables[-1].is_commutative = commutative
return variables |
<SYSTEM_TASK:>
Generates all noncommutative monomials up to a degree
<END_TASK>
<USER_TASK:>
Description:
def get_monomials(variables, degree):
"""Generates all noncommutative monomials up to a degree
:param variables: The noncommutative variables to generate monomials from
:type variables: list of :class:`sympy.physics.quantum.operator.Operator`
or
:class:`sympy.physics.quantum.operator.HermitianOperator`.
:param degree: The maximum degree.
:type degree: int.
:returns: list of monomials.
""" |
if degree == -1:
return []
if not variables:
return [S.One]
else:
_variables = variables[:]
_variables.insert(0, 1)
ncmonomials = [S.One]
ncmonomials.extend(var for var in variables)
for var in variables:
if not is_hermitian(var):
ncmonomials.append(var.adjoint())
for _ in range(1, degree):
temp = []
for var in _variables:
for new_var in ncmonomials:
temp.append(var * new_var)
if var != 1 and not is_hermitian(var):
temp.append(var.adjoint() * new_var)
ncmonomials = unique(temp[:])
return ncmonomials |
<SYSTEM_TASK:>
Returns the degree of a noncommutative polynomial.
<END_TASK>
<USER_TASK:>
Description:
def ncdegree(polynomial):
"""Returns the degree of a noncommutative polynomial.
:param polynomial: Polynomial of noncommutive variables.
:type polynomial: :class:`sympy.core.expr.Expr`.
:returns: int -- the degree of the polynomial.
""" |
degree = 0
if is_number_type(polynomial):
return degree
polynomial = polynomial.expand()
for monomial in polynomial.as_coefficients_dict():
subdegree = 0
for variable in monomial.as_coeff_mul()[1]:
if isinstance(variable, Pow):
subdegree += variable.exp
elif not isinstance(variable, Number) and variable != I:
subdegree += 1
if subdegree > degree:
degree = subdegree
return degree |
<SYSTEM_TASK:>
Returns whether the polynomial has complex coefficients
<END_TASK>
<USER_TASK:>
Description:
def iscomplex(polynomial):
"""Returns whether the polynomial has complex coefficients
:param polynomial: Polynomial of noncommutive variables.
:type polynomial: :class:`sympy.core.expr.Expr`.
:returns: bool -- whether there is a complex coefficient.
""" |
if isinstance(polynomial, (int, float)):
return False
if isinstance(polynomial, complex):
return True
polynomial = polynomial.expand()
for monomial in polynomial.as_coefficients_dict():
for variable in monomial.as_coeff_mul()[1]:
if isinstance(variable, complex) or variable == I:
return True
return False |
<SYSTEM_TASK:>
Return the monomials of a certain degree.
<END_TASK>
<USER_TASK:>
Description:
def get_all_monomials(variables, extramonomials, substitutions, degree,
removesubstitutions=True):
"""Return the monomials of a certain degree.
""" |
monomials = get_monomials(variables, degree)
if extramonomials is not None:
monomials.extend(extramonomials)
if removesubstitutions and substitutions is not None:
monomials = [monomial for monomial in monomials if monomial not
in substitutions]
monomials = [remove_scalar_factor(apply_substitutions(monomial,
substitutions))
for monomial in monomials]
monomials = unique(monomials)
return monomials |
<SYSTEM_TASK:>
Collect monomials up to a given degree.
<END_TASK>
<USER_TASK:>
Description:
def pick_monomials_up_to_degree(monomials, degree):
"""Collect monomials up to a given degree.
""" |
ordered_monomials = []
if degree >= 0:
ordered_monomials.append(S.One)
for deg in range(1, degree + 1):
ordered_monomials.extend(pick_monomials_of_degree(monomials, deg))
return ordered_monomials |
<SYSTEM_TASK:>
Collect all monomials up of a given degree.
<END_TASK>
<USER_TASK:>
Description:
def pick_monomials_of_degree(monomials, degree):
"""Collect all monomials up of a given degree.
""" |
selected_monomials = []
for monomial in monomials:
if ncdegree(monomial) == degree:
selected_monomials.append(monomial)
return selected_monomials |
<SYSTEM_TASK:>
Save a monomial dictionary for debugging purposes.
<END_TASK>
<USER_TASK:>
Description:
def save_monomial_index(filename, monomial_index):
"""Save a monomial dictionary for debugging purposes.
:param filename: The name of the file to save to.
:type filename: str.
:param monomial_index: The monomial index of the SDP relaxation.
:type monomial_index: dict of :class:`sympy.core.expr.Expr`.
""" |
monomial_translation = [''] * (len(monomial_index) + 1)
for key, k in monomial_index.items():
monomial_translation[k] = convert_monomial_to_string(key)
file_ = open(filename, 'w')
for k in range(len(monomial_translation)):
file_.write('%s %s\n' % (k, monomial_translation[k]))
file_.close() |
<SYSTEM_TASK:>
Helper function to include only unique monomials in a basis.
<END_TASK>
<USER_TASK:>
Description:
def unique(seq):
"""Helper function to include only unique monomials in a basis.""" |
seen = {}
result = []
for item in seq:
marker = item
if marker in seen:
continue
seen[marker] = 1
result.append(item)
return result |
<SYSTEM_TASK:>
Convert all inequalities to >=0 form.
<END_TASK>
<USER_TASK:>
Description:
def convert_relational(relational):
"""Convert all inequalities to >=0 form.
""" |
rel = relational.rel_op
if rel in ['==', '>=', '>']:
return relational.lhs-relational.rhs
elif rel in ['<=', '<']:
return relational.rhs-relational.lhs
else:
raise Exception("The relational operation ' + rel + ' is not "
"implemented!") |
<SYSTEM_TASK:>
Returns either x or o if one of them won, otherwise None
<END_TASK>
<USER_TASK:>
Description:
def winner(self):
"""Returns either x or o if one of them won, otherwise None""" |
for c in 'xo':
for comb in [(0,3,6), (1,4,7), (2,5,8), (0,1,2), (3,4,5), (6,7,8), (0,4,8), (2,4,6)]:
if all(self.spots[p] == c for p in comb):
return c
return None |
<SYSTEM_TASK:>
Get the sparse SDP relaxation of a Bell inequality.
<END_TASK>
<USER_TASK:>
Description:
def get_relaxation(self, A_configuration, B_configuration, I):
"""Get the sparse SDP relaxation of a Bell inequality.
:param A_configuration: The definition of measurements of Alice.
:type A_configuration: list of list of int.
:param B_configuration: The definition of measurements of Bob.
:type B_configuration: list of list of int.
:param I: The matrix describing the Bell inequality in the
Collins-Gisin picture.
:type I: list of list of int.
""" |
coefficients = collinsgisin_to_faacets(I)
M, ncIndices = get_faacets_moment_matrix(A_configuration,
B_configuration, coefficients)
self.n_vars = M.max() - 1
bs = len(M) # The block size
self.block_struct = [bs]
self.F = lil_matrix((bs**2, self.n_vars + 1))
# Constructing the internal representation of the constraint matrices
# See Section 2.1 in the SDPA manual and also Yalmip's internal
# representation
for i in range(bs):
for j in range(i, bs):
if M[i, j] != 0:
self.F[i*bs+j, abs(M[i, j])-1] = copysign(1, M[i, j])
self.obj_facvar = [0 for _ in range(self.n_vars)]
for i in range(1, len(ncIndices)):
self.obj_facvar[abs(ncIndices[i])-2] += \
copysign(1, ncIndices[i])*coefficients[i] |
<SYSTEM_TASK:>
Returns list of users track has been shared with.
<END_TASK>
<USER_TASK:>
Description:
def share(track_id=None, url=None, users=None):
"""
Returns list of users track has been shared with.
Either track or url need to be provided.
""" |
client = get_client()
if url:
track_id = client.get('/resolve', url=url).id
if not users:
return client.get('/tracks/%d/permissions' % track_id)
permissions = {'user_id': []}
for username in users:
# check cache for user
user = settings.users.get(username, None)
if user:
permissions['user_id'].append(user['id'])
else:
user = client.get('/resolve', url='http://soundcloud.com/%s' % username)
permissions['user_id'].append(user.id)
settings.users[username] = user.obj
settings.save()
return client.put('/tracks/%d/permissions' % track_id, permissions=permissions) |
<SYSTEM_TASK:>
Helper function to convert the SDP problem to PICOS
<END_TASK>
<USER_TASK:>
Description:
def solve_with_cvxopt(sdp, solverparameters=None):
"""Helper function to convert the SDP problem to PICOS
and call CVXOPT solver, and parse the output.
:param sdp: The SDP relaxation to be solved.
:type sdp: :class:`ncpol2sdpa.sdp`.
""" |
P = convert_to_picos(sdp)
P.set_option("solver", "cvxopt")
P.set_option("verbose", sdp.verbose)
if solverparameters is not None:
for key, value in solverparameters.items():
P.set_option(key, value)
solution = P.solve()
x_mat = [np.array(P.get_valued_variable('X'))]
y_mat = [np.array(P.get_constraint(i).dual)
for i in range(len(P.constraints))]
return -solution["cvxopt_sol"]["primal objective"] + \
sdp.constant_term, \
-solution["cvxopt_sol"]["dual objective"] + \
sdp.constant_term, \
x_mat, y_mat, solution["status"] |
<SYSTEM_TASK:>
Helper function to convert the SDP problem to CVXPY
<END_TASK>
<USER_TASK:>
Description:
def solve_with_cvxpy(sdp, solverparameters=None):
"""Helper function to convert the SDP problem to CVXPY
and call the solver, and parse the output.
:param sdp: The SDP relaxation to be solved.
:type sdp: :class:`ncpol2sdpa.sdp`.
""" |
problem = convert_to_cvxpy(sdp)
if solverparameters is not None and 'solver' in solverparameters:
solver = solverparameters.pop('solver')
v = problem.solve(solver=solver, verbose=(sdp.verbose > 0))
else:
v = problem.solve(verbose=(sdp.verbose > 0))
if v is None:
status = "infeasible"
x_mat, y_mat = [], []
elif v == float("inf") or v == -float("inf"):
status = "unbounded"
x_mat, y_mat = [], []
else:
status = "optimal"
x_pre = sdp.F[:, 1:sdp.n_vars+1].dot(problem.variables()[0].value)
x_pre += sdp.F[:, 0]
row_offsets = [0]
cumulative_sum = 0
for block_size in sdp.block_struct:
cumulative_sum += block_size ** 2
row_offsets.append(cumulative_sum)
x_mat = []
for bi, bs in enumerate(sdp.block_struct):
x = x_pre[row_offsets[bi]:row_offsets[bi+1]].reshape((bs, bs))
x += x.T - np.diag(np.array(x.diagonal())[0])
x_mat.append(x)
y_mat = [constraint.dual_value for constraint in problem.constraints]
return v+sdp.constant_term, v+sdp.constant_term, x_mat, y_mat, status |
<SYSTEM_TASK:>
Convert an SDP relaxation to a CVXPY problem.
<END_TASK>
<USER_TASK:>
Description:
def convert_to_cvxpy(sdp):
"""Convert an SDP relaxation to a CVXPY problem.
:param sdp: The SDP relaxation to convert.
:type sdp: :class:`ncpol2sdpa.sdp`.
:returns: :class:`cvxpy.Problem`.
""" |
from cvxpy import Minimize, Problem, Variable
row_offsets = [0]
cumulative_sum = 0
for block_size in sdp.block_struct:
cumulative_sum += block_size ** 2
row_offsets.append(cumulative_sum)
x = Variable(sdp.n_vars)
# The moment matrices are the first blocks of identical size
constraints = []
for idx, bs in enumerate(sdp.block_struct):
nonzero_set = set()
F = [lil_matrix((bs, bs)) for _ in range(sdp.n_vars+1)]
for ri, row in enumerate(sdp.F.rows[row_offsets[idx]:
row_offsets[idx+1]],
row_offsets[idx]):
block_index, i, j = convert_row_to_sdpa_index(sdp.block_struct,
row_offsets, ri)
for col_index, k in enumerate(row):
value = sdp.F.data[ri][col_index]
F[k][i, j] = value
F[k][j, i] = value
nonzero_set.add(k)
if bs > 1:
sum_ = sum(F[k]*x[k-1] for k in nonzero_set if k > 0)
if not isinstance(sum_, (int, float)):
if F[0].getnnz() > 0:
sum_ += F[0]
constraints.append(sum_ >> 0)
else:
sum_ = sum(F[k][0, 0]*x[k-1] for k in nonzero_set if k > 0)
if not isinstance(sum_, (int, float)):
sum_ += F[0][0, 0]
constraints.append(sum_ >= 0)
obj = sum(ci*xi for ci, xi in zip(sdp.obj_facvar, x) if ci != 0)
problem = Problem(Minimize(obj), constraints)
return problem |
<SYSTEM_TASK:>
Get the forward neighbors of a site in a lattice.
<END_TASK>
<USER_TASK:>
Description:
def get_neighbors(index, lattice_length, width=0, periodic=False):
"""Get the forward neighbors of a site in a lattice.
:param index: Linear index of operator.
:type index: int.
:param lattice_length: The size of the 2D lattice in either dimension
:type lattice_length: int.
:param width: Optional parameter to define width.
:type width: int.
:param periodic: Optional parameter to indicate periodic boundary
conditions.
:type periodic: bool
:returns: list of int -- the neighbors in linear index.
""" |
if width == 0:
width = lattice_length
neighbors = []
coords = divmod(index, width)
if coords[1] < width - 1:
neighbors.append(index + 1)
elif periodic and width > 1:
neighbors.append(index - width + 1)
if coords[0] < lattice_length - 1:
neighbors.append(index + width)
elif periodic:
neighbors.append(index - (lattice_length - 1) * width)
return neighbors |
<SYSTEM_TASK:>
Get the forward neighbors at a given distance of a site or set of sites
<END_TASK>
<USER_TASK:>
Description:
def get_next_neighbors(indices, lattice_length, width=0, distance=1,
periodic=False):
"""Get the forward neighbors at a given distance of a site or set of sites
in a lattice.
:param index: Linear index of operator.
:type index: int.
:param lattice_length: The size of the 2D lattice in either dimension
:type lattice_length: int.
:param width: Optional parameter to define width.
:type width: int.
:param distance: Optional parameter to define distance.
:type width: int.
:param periodic: Optional parameter to indicate periodic boundary
conditions.
:type periodic: bool
:returns: list of int -- the neighbors at given distance in linear index.
""" |
if not isinstance(indices, list):
indices = [indices]
if distance == 1:
return flatten(get_neighbors(index, lattice_length, width, periodic)
for index in indices)
else:
s1 = set(flatten(get_next_neighbors(get_neighbors(index,
lattice_length,
width, periodic),
lattice_length, width, distance-1,
periodic) for index in indices))
s2 = set(get_next_neighbors(indices, lattice_length, width, distance-1,
periodic))
return list(s1 - s2) |
<SYSTEM_TASK:>
Return a set of constraints that define Pauli spin operators.
<END_TASK>
<USER_TASK:>
Description:
def pauli_constraints(X, Y, Z):
"""Return a set of constraints that define Pauli spin operators.
:param X: List of Pauli X operator on sites.
:type X: list of :class:`sympy.physics.quantum.operator.HermitianOperator`.
:param Y: List of Pauli Y operator on sites.
:type Y: list of :class:`sympy.physics.quantum.operator.HermitianOperator`.
:param Z: List of Pauli Z operator on sites.
:type Z: list of :class:`sympy.physics.quantum.operator.HermitianOperator`.
:returns: tuple of substitutions and equalities.
""" |
substitutions = {}
n_vars = len(X)
for i in range(n_vars):
# They square to the identity
substitutions[X[i] * X[i]] = 1
substitutions[Y[i] * Y[i]] = 1
substitutions[Z[i] * Z[i]] = 1
# Anticommutation relations
substitutions[Y[i] * X[i]] = - X[i] * Y[i]
substitutions[Z[i] * X[i]] = - X[i] * Z[i]
substitutions[Z[i] * Y[i]] = - Y[i] * Z[i]
# Commutation relations.
# equalities.append(X[i]*Y[i] - 1j*Z[i])
# equalities.append(X[i]*Z[i] + 1j*Y[i])
# equalities.append(Y[i]*Z[i] - 1j*X[i])
# They commute between the sites
for j in range(i + 1, n_vars):
substitutions[X[j] * X[i]] = X[i] * X[j]
substitutions[Y[j] * Y[i]] = Y[i] * Y[j]
substitutions[Y[j] * X[i]] = X[i] * Y[j]
substitutions[Y[i] * X[j]] = X[j] * Y[i]
substitutions[Z[j] * Z[i]] = Z[i] * Z[j]
substitutions[Z[j] * X[i]] = X[i] * Z[j]
substitutions[Z[i] * X[j]] = X[j] * Z[i]
substitutions[Z[j] * Y[i]] = Y[i] * Z[j]
substitutions[Z[i] * Y[j]] = Y[j] * Z[i]
return substitutions |
<SYSTEM_TASK:>
Generate variables that behave like measurements.
<END_TASK>
<USER_TASK:>
Description:
def generate_measurements(party, label):
"""Generate variables that behave like measurements.
:param party: The list of number of measurement outputs a party has.
:type party: list of int.
:param label: The label to be given to the symbolic variables.
:type label: str.
:returns: list of list of
:class:`sympy.physics.quantum.operator.HermitianOperator`.
""" |
measurements = []
for i in range(len(party)):
measurements.append(generate_operators(label + '%s' % i, party[i] - 1,
hermitian=True))
return measurements |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.