file_name
stringlengths
3
137
prefix
stringlengths
0
918k
suffix
stringlengths
0
962k
middle
stringlengths
0
812k
squeak_admin_server_handler.py
# MIT License # # Copyright (c) 2020 Jonathan Zernik # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import logging from proto import squeak_admin_pb2 from squeaknode.admin.messages import connected_peer_to_message from squeaknode.admin.messages import download_result_to_message from squeaknode.admin.messages import message_to_peer_address from squeaknode.admin.messages import message_to_received_payment from squeaknode.admin.messages import message_to_sent_payment from squeaknode.admin.messages import message_to_squeak_entry from squeaknode.admin.messages import optional_connected_peer_to_message from squeaknode.admin.messages import optional_received_offer_to_message from squeaknode.admin.messages import optional_sent_payment_to_message from squeaknode.admin.messages import optional_squeak_entry_to_message from squeaknode.admin.messages import optional_squeak_hash_to_hex from squeaknode.admin.messages import optional_squeak_peer_to_message from squeaknode.admin.messages import optional_squeak_profile_to_message from squeaknode.admin.messages import payment_summary_to_message from squeaknode.admin.messages import peer_address_to_message from squeaknode.admin.messages import received_offer_to_message from squeaknode.admin.messages import received_payment_to_message from squeaknode.admin.messages import sent_offer_to_message from squeaknode.admin.messages import sent_payment_to_message from squeaknode.admin.messages import squeak_entry_to_message from squeaknode.admin.messages import squeak_peer_to_message from squeaknode.admin.messages import squeak_profile_to_message from squeaknode.admin.messages import twitter_account_to_message from squeaknode.admin.profile_image_util import base64_string_to_bytes from squeaknode.lightning.lnd_lightning_client import LNDLightningClient from squeaknode.node.squeak_controller import SqueakController logger = logging.getLogger(__name__) class SqueakAdminServerHandler(object): """Handles admin server commands.""" def __init__( self, lightning_client: LNDLightningClient, squeak_controller: SqueakController, ): self.lightning_client = lightning_client self.squeak_controller = squeak_controller def handle_lnd_get_info(self, request): logger.info("Handle lnd get info") return self.lightning_client.stub.GetInfo(request) def handle_lnd_wallet_balance(self, request): logger.info("Handle lnd wallet balance") return self.lightning_client.stub.WalletBalance(request) def handle_lnd_new_address(self, request): logger.info("Handle lnd new address: {}".format(request)) return self.lightning_client.stub.NewAddress(request) def handle_lnd_list_channels(self, request): logger.info("Handle lnd list channels") return self.lightning_client.stub.ListChannels(request) def handle_lnd_pending_channels(self, request): logger.info("Handle lnd pending channels") return self.lightning_client.stub.PendingChannels(request) def handle_lnd_get_transactions(self, request): logger.info("Handle lnd get transactions") return self.lightning_client.stub.GetTransactions(request) def handle_lnd_list_peers(self, request): logger.info("Handle list peers") return self.lightning_client.stub.ListPeers(request) def handle_lnd_connect_peer(self, request): logger.info("Handle connect peer: {}".format(request)) return self.lightning_client.stub.ConnectPeer(request) def handle_lnd_disconnect_peer(self, request): logger.info("Handle disconnect peer: {}".format(request)) return self.lightning_client.stub.DisconnectPeer(request) def handle_lnd_open_channel_sync(self, request): logger.info("Handle open channel: {}".format(request)) return self.lightning_client.stub.OpenChannelSync(request) def handle_lnd_close_channel(self, request): logger.info("Handle close channel: {}".format(request)) return self.lightning_client.stub.CloseChannel(request) def handle_lnd_subscribe_channel_events(self, request): logger.info("Handle subscribe channel events") return self.lightning_client.stub.SubscribeChannelEvents(request) def handle_lnd_send_coins(self, request): logger.info("Handle send coins.") return self.lightning_client.stub.SendCoins(request) def handle_create_signing_profile(self, request): profile_name = request.profile_name logger.info( "Handle create signing profile with name: {}".format(profile_name)) profile_id = self.squeak_controller.create_signing_profile( profile_name) logger.info("New profile_id: {}".format(profile_id)) return squeak_admin_pb2.CreateSigningProfileReply( profile_id=profile_id, ) def handle_import_signing_profile(self, request): profile_name = request.profile_name private_key = request.private_key logger.info( "Handle import signing profile with name: {}".format(profile_name)) profile_id = self.squeak_controller.import_signing_profile( profile_name, private_key) logger.info("New profile_id: {}".format(profile_id)) return squeak_admin_pb2.ImportSigningProfileReply( profile_id=profile_id, ) def handle_create_contact_profile(self, request): profile_name = request.profile_name squeak_address = request.address logger.info( "Handle create contact profile with name: {}, address: {}".format( profile_name, squeak_address, ) ) profile_id = self.squeak_controller.create_contact_profile( profile_name, squeak_address ) logger.info("New profile_id: {}".format(profile_id)) return squeak_admin_pb2.CreateContactProfileReply( profile_id=profile_id, ) def handle_get_profiles(self, request): logger.info("Handle get profiles.") profiles = self.squeak_controller.get_profiles() logger.info("Got number of profiles: {}".format(len(profiles))) profile_msgs = [squeak_profile_to_message( profile) for profile in profiles] return squeak_admin_pb2.GetProfilesReply(squeak_profiles=profile_msgs) def handle_get_signing_profiles(self, request): logger.info("Handle get signing profiles.") profiles = self.squeak_controller.get_signing_profiles() logger.info("Got number of signing profiles: {}".format(len(profiles))) profile_msgs = [squeak_profile_to_message( profile) for profile in profiles] return squeak_admin_pb2.GetSigningProfilesReply(squeak_profiles=profile_msgs) def handle_get_contact_profiles(self, request): logger.info("Handle get contact profiles.") profiles = self.squeak_controller.get_contact_profiles() logger.info("Got number of contact profiles: {}".format(len(profiles))) profile_msgs = [squeak_profile_to_message( profile) for profile in profiles] return squeak_admin_pb2.GetContactProfilesReply(squeak_profiles=profile_msgs) def handle_get_squeak_profile(self, request): profile_id = request.profile_id logger.info("Handle get squeak profile with id: {}".format(profile_id)) squeak_profile = self.squeak_controller.get_squeak_profile(profile_id) squeak_profile_msg = optional_squeak_profile_to_message(squeak_profile) return squeak_admin_pb2.GetSqueakProfileReply( squeak_profile=squeak_profile_msg, ) def handle_get_squeak_profile_by_address(self, request): address = request.address logger.info( "Handle get squeak profile with address: {}".format(address)) squeak_profile = self.squeak_controller.get_squeak_profile_by_address( address) squeak_profile_msg = optional_squeak_profile_to_message(squeak_profile) return squeak_admin_pb2.GetSqueakProfileByAddressReply( squeak_profile=squeak_profile_msg ) def handle_get_squeak_profile_by_name(self, request): name = request.name logger.info("Handle get squeak profile with name: {}".format(name)) squeak_profile = self.squeak_controller.get_squeak_profile_by_name( name) squeak_profile_msg = optional_squeak_profile_to_message(squeak_profile) return squeak_admin_pb2.GetSqueakProfileByNameReply( squeak_profile=squeak_profile_msg ) def handle_set_squeak_profile_following(self, request): profile_id = request.profile_id following = request.following logger.info( "Handle set squeak profile following with profile id: {}, following: {}".format( profile_id, following, ) ) self.squeak_controller.set_squeak_profile_following( profile_id, following) return squeak_admin_pb2.SetSqueakProfileFollowingReply() def handle_rename_squeak_profile(self, request): profile_id = request.profile_id profile_name = request.profile_name logger.info( "Handle rename squeak profile with profile id: {}, new name: {}".format( profile_id, profile_name, ) ) self.squeak_controller.rename_squeak_profile(profile_id, profile_name) return squeak_admin_pb2.RenameSqueakProfileReply() def handle_delete_squeak_profile(self, request): profile_id = request.profile_id logger.info( "Handle delete squeak profile with id: {}".format(profile_id)) self.squeak_controller.delete_squeak_profile(profile_id) return squeak_admin_pb2.DeleteSqueakProfileReply() def handle_set_squeak_profile_image(self, request): profile_id = request.profile_id profile_image = request.profile_image logger.info( "Handle set squeak profile image with profile id: {}".format( profile_id, ) ) profile_image_bytes = base64_string_to_bytes(profile_image) self.squeak_controller.set_squeak_profile_image( profile_id, profile_image_bytes) return squeak_admin_pb2.SetSqueakProfileImageReply() def handle_clear_squeak_profile_image(self, request): profile_id = request.profile_id logger.info( "Handle clear squeak profile image with profile id: {}".format( profile_id, ) ) self.squeak_controller.clear_squeak_profile_image( profile_id, ) return squeak_admin_pb2.ClearSqueakProfileImageReply() def handle_get_squeak_profile_private_key(self, request): profile_id = request.profile_id logger.info( "Handle get squeak profile private key for id: {}".format(profile_id)) private_key = self.squeak_controller.get_squeak_profile_private_key( profile_id) return squeak_admin_pb2.GetSqueakProfilePrivateKeyReply( private_key=private_key ) def handle_make_squeak(self, request): profile_id = request.profile_id content_str = request.content replyto_hash_str = request.replyto replyto_hash = bytes.fromhex( replyto_hash_str) if replyto_hash_str else None logger.info("Handle make squeak profile with id: {}".format(profile_id)) inserted_squeak_hash = self.squeak_controller.make_squeak( profile_id, content_str, replyto_hash ) inserted_squeak_hash_str = optional_squeak_hash_to_hex( inserted_squeak_hash) return squeak_admin_pb2.MakeSqueakReply( squeak_hash=inserted_squeak_hash_str, ) def handle_get_squeak_display_entry(self, request): squeak_hash_str = request.squeak_hash squeak_hash = bytes.fromhex(squeak_hash_str) logger.info( "Handle get squeak display entry for hash: {}".format(squeak_hash_str)) squeak_entry = ( self.squeak_controller.get_squeak_entry( squeak_hash ) ) display_message = optional_squeak_entry_to_message( squeak_entry) return squeak_admin_pb2.GetSqueakDisplayReply( squeak_display_entry=display_message ) def handle_get_timeline_squeak_display_entries(self, request): limit = request.limit last_entry = message_to_squeak_entry(request.last_entry) if request.HasField( "last_entry") else None logger.info("""Handle get timeline squeak display entries with limit: {} last_entry: {} """.format( limit, last_entry, )) squeak_entries = ( self.squeak_controller.get_timeline_squeak_entries( limit, last_entry, ) ) logger.info( "Got number of timeline squeak entries: {}".format( len(squeak_entries) ) ) squeak_display_msgs = [ squeak_entry_to_message(entry) for entry in squeak_entries ] return squeak_admin_pb2.GetTimelineSqueakDisplaysReply( squeak_display_entries=squeak_display_msgs ) def handle_get_squeak_display_entries_for_address(self, request): address = request.address limit = request.limit last_entry = message_to_squeak_entry(request.last_entry) if request.HasField( "last_entry") else None logger.info("""Handle get squeak display entries for address: {} with limit: {} last_entry: {} """.format( address, limit, last_entry, )) squeak_entries = ( self.squeak_controller.get_squeak_entries_for_address( address, limit, last_entry, ) ) logger.info( "Got number of squeak entries for address: {}".format( len(squeak_entries) ) ) squeak_display_msgs = [ squeak_entry_to_message(entry) for entry in squeak_entries ] return squeak_admin_pb2.GetAddressSqueakDisplaysReply( squeak_display_entries=squeak_display_msgs ) def handle_get_squeak_display_entries_for_text_search(self, request): search_text = request.search_text limit = request.limit last_entry = message_to_squeak_entry(request.last_entry) if request.HasField( "last_entry") else None logger.info("""Handle get squeak display entries for search_text: {} with limit: {} last_entry: {} """.format( search_text, limit, last_entry, )) squeak_entries = ( self.squeak_controller.get_squeak_entries_for_text_search( search_text, limit, last_entry, ) ) logger.info( "Got number of squeak entries for text search: {}".format( len(squeak_entries) ) ) squeak_display_msgs = [ squeak_entry_to_message(entry) for entry in squeak_entries ] return squeak_admin_pb2.GetAddressSqueakDisplaysReply( squeak_display_entries=squeak_display_msgs ) def handle_get_ancestor_squeak_display_entries(self, request): squeak_hash_str = request.squeak_hash squeak_hash = bytes.fromhex(squeak_hash_str) logger.info( "Handle get ancestor squeak display entries for squeak hash: {}".format( squeak_hash_str ) ) squeak_entries = ( self.squeak_controller.get_ancestor_squeak_entries( squeak_hash, ) ) logger.info( "Got number of ancestor squeak entries: {}".format( len(squeak_entries) ) ) squeak_display_msgs = [ squeak_entry_to_message(entry) for entry in squeak_entries ] return squeak_admin_pb2.GetAncestorSqueakDisplaysReply( squeak_display_entries=squeak_display_msgs ) def handle_get_reply_squeak_display_entries(self, request): squeak_hash_str = request.squeak_hash squeak_hash = bytes.fromhex(squeak_hash_str) limit = request.limit last_entry = message_to_squeak_entry(request.last_entry) if request.HasField( "last_entry") else None logger.info("""Handle get reply squeak display entries for squeak hash: {} with limit: {} last_entry: {} """.format( squeak_hash_str, limit, last_entry, )) squeak_entries = ( self.squeak_controller.get_reply_squeak_entries( squeak_hash, limit, last_entry, ) ) logger.info( "Got number of reply squeak entries: {}".format( len(squeak_entries) ) ) squeak_display_msgs = [ squeak_entry_to_message(entry) for entry in squeak_entries ] return squeak_admin_pb2.GetReplySqueakDisplaysReply( squeak_display_entries=squeak_display_msgs ) def handle_delete_squeak(self, request): squeak_hash_str = request.squeak_hash squeak_hash = bytes.fromhex(squeak_hash_str) logger.info( "Handle delete squeak with hash: {}".format(squeak_hash_str)) self.squeak_controller.delete_squeak(squeak_hash) logger.info( "Deleted squeak entry with hash: {}".format(squeak_hash_str)) return squeak_admin_pb2.DeleteSqueakReply() def handle_create_peer(self, request): peer_name = request.peer_name peer_address = message_to_peer_address(request.peer_address) logger.info( "Handle create peer with name: {}, address: {}".format( peer_name, peer_address, ) ) peer_id = self.squeak_controller.create_peer( peer_name, peer_address, ) return squeak_admin_pb2.CreatePeerReply( peer_id=peer_id, ) def handle_get_squeak_peer(self, request): peer_id = request.peer_id logger.info("Handle get squeak peer with id: {}".format(peer_id)) squeak_peer = self.squeak_controller.get_peer(peer_id) logger.info("Got squeak peer: {}".format(squeak_peer)) squeak_peer_msg = optional_squeak_peer_to_message(squeak_peer) return squeak_admin_pb2.GetPeerReply( squeak_peer=squeak_peer_msg, ) def handle_get_squeak_peer_by_address(self, request): peer_address = message_to_peer_address(request.peer_address) logger.info( "Handle get squeak peer with address: {}".format(peer_address)) squeak_peer = self.squeak_controller.get_peer_by_address(peer_address) squeak_peer_msg = optional_squeak_peer_to_message(squeak_peer) return squeak_admin_pb2.GetPeerByAddressReply( squeak_peer=squeak_peer_msg, ) def handle_get_squeak_peers(self, request): logger.info("Handle get squeak peers") squeak_peers = self.squeak_controller.get_peers() squeak_peer_msgs = [ squeak_peer_to_message(squeak_peer) for squeak_peer in squeak_peers ] return squeak_admin_pb2.GetPeersReply( squeak_peers=squeak_peer_msgs, ) def handle_rename_squeak_peer(self, request): peer_id = request.peer_id peer_name = request.peer_name logger.info( "Handle rename peer with peer id: {}, new name: {}".format( peer_id, peer_name, ) ) self.squeak_controller.rename_peer(peer_id, peer_name) return squeak_admin_pb2.RenamePeerReply() def handle_set_squeak_peer_autoconnect(self, request): peer_id = request.peer_id autoconnect = request.autoconnect logger.info( "Handle set peer autoconnect with peer id: {}, autoconnect: {}".format( peer_id, autoconnect, ) ) self.squeak_controller.set_peer_autoconnect(peer_id, autoconnect) return squeak_admin_pb2.SetPeerAutoconnectReply() def handle_set_squeak_peer_share_for_free(self, request): peer_id = request.peer_id share_for_free = request.share_for_free logger.info( "Handle set peer share_for_free with peer id: {}, share_for_free: {}".format( peer_id, share_for_free, ) ) self.squeak_controller.set_peer_share_for_free(peer_id, share_for_free) return squeak_admin_pb2.SetPeerShareForFreeReply() def handle_delete_squeak_peer(self, request): peer_id = request.peer_id logger.info("Handle delete squeak peer with id: {}".format(peer_id)) self.squeak_controller.delete_peer(peer_id) return squeak_admin_pb2.DeletePeerReply() def handle_get_buy_offers(self, request): squeak_hash_str = request.squeak_hash squeak_hash = bytes.fromhex(squeak_hash_str) logger.info( "Handle get received offers for hash: {}".format(squeak_hash_str)) offers = self.squeak_controller.get_received_offers( squeak_hash) offer_msgs = [received_offer_to_message(offer) for offer in offers] return squeak_admin_pb2.GetBuyOffersReply( offers=offer_msgs, ) def handle_get_buy_offer(self, request): offer_id = request.offer_id logger.info("Handle get buy offer for hash: {}".format(offer_id)) received_offer = self.squeak_controller.get_received_offer(offer_id) received_offer_msg = optional_received_offer_to_message(received_offer) return squeak_admin_pb2.GetBuyOfferReply( offer=received_offer_msg, ) def handle_download_squeaks(self, request): addresses = request.addreses min_block = request.min_block_height max_block = request.max_block_height replyto_hash = request.replyto_squeak_hash logger.info("""Handle download squeaks for addreses: {} min_block: {} max_block: {} replyto_hash: {} """.format( addresses, min_block, max_block, replyto_hash, )) download_result = self.squeak_controller.download_squeaks( addresses, min_block, max_block, replyto_hash, ) logger.info("Download result: {}".format(download_result)) download_result_msg = download_result_to_message(download_result) return squeak_admin_pb2.DownloadSqueaksReply( download_result=download_result_msg, ) def handle_download_squeak(self, request): squeak_hash_str = request.squeak_hash squeak_hash = bytes.fromhex(squeak_hash_str) logger.info( "Handle download squeak with hash: {}".format(squeak_hash_str)) download_result = self.squeak_controller.download_single_squeak( squeak_hash) logger.info("Download result: {}".format(download_result)) download_result_msg = download_result_to_message(download_result) return squeak_admin_pb2.DownloadSqueakReply( download_result=download_result_msg, ) def handle_download_offers(self, request): squeak_hash_str = request.squeak_hash squeak_hash = bytes.fromhex(squeak_hash_str) logger.info( "Handle download offer for hash: {}".format(squeak_hash_str)) download_result = self.squeak_controller.download_offers(squeak_hash) logger.info("Download result: {}".format(download_result)) download_result_msg = download_result_to_message(download_result) return squeak_admin_pb2.DownloadOffersReply( download_result=download_result_msg, ) def handle_download_replies(self, request): squeak_hash_str = request.squeak_hash squeak_hash = bytes.fromhex(squeak_hash_str) logger.info( "Handle download replies for hash: {}".format(squeak_hash_str)) download_result = self.squeak_controller.download_replies(squeak_hash) logger.info("Download result: {}".format(download_result)) download_result_msg = download_result_to_message(download_result) return squeak_admin_pb2.DownloadRepliesReply( download_result=download_result_msg, ) def handle_download_address_squeaks(self, request): squeak_address = request.address logger.info( "Handle download address squeaks for address: {}".format(squeak_address)) download_result = self.squeak_controller.download_address_squeaks( squeak_address) logger.info("Download result: {}".format(download_result)) download_result_msg = download_result_to_message(download_result) return squeak_admin_pb2.DownloadAddressSqueaksReply( download_result=download_result_msg, ) def handle_pay_offer(self, request): offer_id = request.offer_id logger.info("Handle pay offer for offer id: {}".format(offer_id)) sent_payment_id = self.squeak_controller.pay_offer(offer_id) return squeak_admin_pb2.PayOfferReply( sent_payment_id=sent_payment_id, ) def handle_get_sent_payments(self, request): limit = request.limit last_sent_payment = message_to_sent_payment(request.last_sent_payment) if request.HasField( "last_sent_payment") else None logger.info("""Handle get sent payments with limit: {} last_sent_payment: {} """.format( limit, last_sent_payment, )) sent_payments = self.squeak_controller.get_sent_payments( limit, last_sent_payment, ) logger.info( "Got number of sent payments: {}".format( len(sent_payments) ) ) sent_payment_msgs = [ sent_payment_to_message(sent_payment) for sent_payment in sent_payments ] return squeak_admin_pb2.GetSentPaymentsReply( sent_payments=sent_payment_msgs, ) def handle_get_sent_payment(self, request): sent_payment_id = request.sent_payment_id logger.info( "Handle get sent payment with id: {}".format(sent_payment_id)) sent_payment = self.squeak_controller.get_sent_payment(sent_payment_id) sent_payment_msg = optional_sent_payment_to_message(sent_payment) return squeak_admin_pb2.GetSentPaymentReply( sent_payment=sent_payment_msg, ) def handle_get_sent_offers(self, request): logger.info("Handle get sent offers") sent_offers = self.squeak_controller.get_sent_offers() sent_offer_msgs = [ sent_offer_to_message(sent_offer) for sent_offer in sent_offers ] return squeak_admin_pb2.GetSentOffersReply( sent_offers=sent_offer_msgs, ) def handle_get_received_payments(self, request): limit = request.limit last_received_payment = message_to_received_payment(request.last_received_payment) if request.HasField( "last_received_payment") else None logger.info("""Handle get received payments with limit: {} last_received_payment: {} """.format( limit, last_received_payment, )) received_payments = self.squeak_controller.get_received_payments( limit, last_received_payment, ) logger.info( "Got number of received payments: {}".format( len(received_payments) ) ) received_payment_msgs = [ received_payment_to_message(received_payment) for received_payment in received_payments ] return squeak_admin_pb2.GetReceivedPaymentsReply( received_payments=received_payment_msgs, ) def handle_subscribe_received_payments(self, request, stopped): payment_index = request.payment_index logger.info( "Handle subscribe received payments with index: {}".format( payment_index) ) received_payments_stream = self.squeak_controller.subscribe_received_payments( payment_index, stopped, ) for received_payment in received_payments_stream: received_payment_msg = received_payment_to_message( received_payment) yield received_payment_msg def handle_get_network(self, request): logger.info("Handle get network") network = self.squeak_controller.get_network() return squeak_admin_pb2.GetNetworkReply( network=network, ) def handle_get_payment_summary(self, request): logger.info("Handle get payment summary") received_payment_summary = self.squeak_controller.get_received_payment_summary() sent_payment_summary = self.squeak_controller.get_sent_payment_summary() payment_summary_msg = payment_summary_to_message( received_payment_summary, sent_payment_summary, ) return squeak_admin_pb2.GetPaymentSummaryReply( payment_summary=payment_summary_msg, ) def handle_reprocess_received_payments(self, request): logger.info("Handle reprocess received payments") self.squeak_controller.reprocess_received_payments() return squeak_admin_pb2.ReprocessReceivedPaymentsReply() def handle_like_squeak(self, request: squeak_admin_pb2.LikeSqueakRequest): squeak_hash_str = request.squeak_hash squeak_hash = bytes.fromhex(squeak_hash_str) logger.info( "Handle like squeak with hash: {}".format(squeak_hash_str)) self.squeak_controller.like_squeak( squeak_hash ) return squeak_admin_pb2.LikeSqueakReply() def handle_unlike_squeak(self, request: squeak_admin_pb2.UnlikeSqueakRequest): squeak_hash_str = request.squeak_hash squeak_hash = bytes.fromhex(squeak_hash_str) logger.info( "Handle unlike squeak with hash: {}".format(squeak_hash_str)) self.squeak_controller.unlike_squeak( squeak_hash ) return squeak_admin_pb2.UnlikeSqueakReply() def handle_get_liked_squeak_display_entries(self, request):
def handle_connect_peer(self, request): logger.info("peer address msg: {}".format(request.peer_address)) peer_address = message_to_peer_address(request.peer_address) logger.info( "Handle connect peer with peer address: {}".format(peer_address)) self.squeak_controller.connect_peer(peer_address) return squeak_admin_pb2.ConnectPeerReply() def handle_get_connected_peers(self, request): logger.info("Handle get connected peers.") connected_peers = self.squeak_controller.get_connected_peers() logger.info("Connected peers: {}".format( connected_peers, )) connected_peers_display_msgs = [ connected_peer_to_message(peer) for peer in connected_peers ] return squeak_admin_pb2.GetConnectedPeersReply( connected_peers=connected_peers_display_msgs ) def handle_get_connected_peer(self, request): peer_address = message_to_peer_address(request.peer_address) logger.info("Handle get connected peer for address: {}".format( peer_address, )) connected_peer = self.squeak_controller.get_connected_peer( peer_address) logger.info("Connected peer: {}".format( connected_peer, )) connected_peers_display_msg = optional_connected_peer_to_message( connected_peer) return squeak_admin_pb2.GetConnectedPeerReply( connected_peer=connected_peers_display_msg ) def handle_disconnect_peer(self, request): peer_address = message_to_peer_address(request.peer_address) logger.info( "Handle disconnect peer with peer address: {}".format(peer_address)) self.squeak_controller.disconnect_peer(peer_address) return squeak_admin_pb2.DisconnectPeerReply() def handle_subscribe_connected_peers(self, request, stopped): logger.info("Handle subscribe connected peers") connected_peers_stream = self.squeak_controller.subscribe_connected_peers( stopped, ) for connected_peers in connected_peers_stream: connected_peers_display_msgs = [ connected_peer_to_message(peer) for peer in connected_peers ] yield squeak_admin_pb2.GetConnectedPeersReply( connected_peers=connected_peers_display_msgs ) def handle_subscribe_connected_peer(self, request, stopped): peer_address = message_to_peer_address(request.peer_address) logger.info( "Handle subscribe connected peer with peer address: {}".format(peer_address)) connected_peer_stream = self.squeak_controller.subscribe_connected_peer( peer_address, stopped, ) for connected_peer in connected_peer_stream: connected_peer_display_msg = optional_connected_peer_to_message( connected_peer) yield squeak_admin_pb2.GetConnectedPeerReply( connected_peer=connected_peer_display_msg, ) def handle_subscribe_buy_offers(self, request, stopped): squeak_hash_str = request.squeak_hash squeak_hash = bytes.fromhex(squeak_hash_str) logger.info( "Handle subscribe received offers for hash: {}".format(squeak_hash_str)) received_offer_stream = self.squeak_controller.subscribe_received_offers_for_squeak( squeak_hash, stopped, ) for offer in received_offer_stream: logger.info("Yielding received offer: {}".format(offer)) offer_msg = received_offer_to_message(offer) yield squeak_admin_pb2.GetBuyOfferReply( offer=offer_msg, ) def handle_subscribe_squeak_display(self, request, stopped): squeak_hash_str = request.squeak_hash squeak_hash = bytes.fromhex(squeak_hash_str) logger.info( "Handle subscribe squeak display for hash: {}".format(squeak_hash_str)) squeak_display_stream = self.squeak_controller.subscribe_squeak_entry( squeak_hash, stopped, ) for squeak_display in squeak_display_stream: display_message = optional_squeak_entry_to_message( squeak_display) yield squeak_admin_pb2.GetSqueakDisplayReply( squeak_display_entry=display_message ) def handle_subscribe_reply_squeak_displays(self, request, stopped): squeak_hash_str = request.squeak_hash squeak_hash = bytes.fromhex(squeak_hash_str) logger.info( "Handle subscribe reply squeak displays for hash: {}".format(squeak_hash_str)) squeak_display_stream = self.squeak_controller.subscribe_squeak_reply_entries( squeak_hash, stopped, ) for squeak_display in squeak_display_stream: display_message = optional_squeak_entry_to_message( squeak_display) yield squeak_admin_pb2.GetSqueakDisplayReply( squeak_display_entry=display_message ) def handle_subscribe_address_squeak_displays(self, request, stopped): squeak_address = request.address logger.info( "Handle subscribe address squeak displays for address: {}".format(squeak_address)) squeak_display_stream = self.squeak_controller.subscribe_squeak_address_entries( squeak_address, stopped, ) for squeak_display in squeak_display_stream: display_message = optional_squeak_entry_to_message( squeak_display) yield squeak_admin_pb2.GetSqueakDisplayReply( squeak_display_entry=display_message ) def handle_subscribe_ancestor_squeak_displays(self, request, stopped): squeak_hash_str = request.squeak_hash squeak_hash = bytes.fromhex(squeak_hash_str) logger.info( "Handle subscribe ancestor squeak displays for hash: {}".format(squeak_hash_str)) squeak_entries_stream = self.squeak_controller.subscribe_squeak_ancestor_entries( squeak_hash, stopped, ) for squeak_entries in squeak_entries_stream: logger.info( "Got number of ancestor squeak entries: {}".format( len(squeak_entries) ) ) squeak_display_msgs = [ squeak_entry_to_message(entry) for entry in squeak_entries ] yield squeak_admin_pb2.GetAncestorSqueakDisplaysReply( squeak_display_entries=squeak_display_msgs ) def handle_subscribe_squeak_displays(self, request, stopped): logger.info("Handle subscribe squeak displays") squeak_display_stream = self.squeak_controller.subscribe_squeak_entries( stopped, ) for squeak_display in squeak_display_stream: display_message = optional_squeak_entry_to_message( squeak_display) yield squeak_admin_pb2.GetSqueakDisplayReply( squeak_display_entry=display_message ) def handle_subscribe_timeline_squeak_displays(self, request, stopped): logger.info("Handle subscribe timeline squeak displays") squeak_display_stream = self.squeak_controller.subscribe_timeline_squeak_entries( stopped, ) for squeak_display in squeak_display_stream: display_message = optional_squeak_entry_to_message( squeak_display) yield squeak_admin_pb2.GetSqueakDisplayReply( squeak_display_entry=display_message ) def handle_get_external_address(self, request): logger.info("Handle get external address") external_address = self.squeak_controller.get_external_address() external_address_msg = peer_address_to_message(external_address) return squeak_admin_pb2.GetExternalAddressReply( peer_address=external_address_msg, ) def handle_get_default_peer_port(self, request): logger.info("Handle get default peer port") default_peer_port = self.squeak_controller.get_default_peer_port() return squeak_admin_pb2.GetDefaultPeerPortReply( port=default_peer_port, ) def handle_set_sell_price(self, request): sell_price_msat = request.price_msat logger.info("Handle set sell price msat to: {}".format( sell_price_msat, )) self.squeak_controller.set_sell_price_msat(sell_price_msat) return squeak_admin_pb2.SetSellPriceReply() def handle_clear_sell_price(self, request): logger.info("Handle clear sell price.") self.squeak_controller.clear_sell_price_msat() return squeak_admin_pb2.ClearSellPriceReply() def handle_get_sell_price(self, request): logger.info("Handle get sell price") sell_price_msat = self.squeak_controller.get_sell_price_msat() price_msat_is_set = sell_price_msat is not None default_sell_price_msat = self.squeak_controller.get_default_sell_price_msat() logger.info("sell price: {}".format(sell_price_msat)) logger.info("price_msat_is_set: {}".format(price_msat_is_set)) logger.info("default_sell_price_msat: {}".format( default_sell_price_msat)) return squeak_admin_pb2.GetSellPriceReply( price_msat=sell_price_msat, price_msat_is_set=price_msat_is_set, default_price_msat=default_sell_price_msat, ) def handle_set_twitter_bearer_token(self, request): twitter_bearer_token = request.bearer_token logger.info("Handle set twitter bearer token with value: {}".format( twitter_bearer_token, )) self.squeak_controller.set_twitter_bearer_token(twitter_bearer_token) return squeak_admin_pb2.SetTwitterBearerTokenReply() def handle_get_twitter_bearer_token(self, request): logger.info("Handle get twitter bearer token") twitter_bearer_token = self.squeak_controller.get_twitter_bearer_token() return squeak_admin_pb2.GetTwitterBearerTokenReply( bearer_token=twitter_bearer_token, ) def handle_add_twitter_account(self, request): handle = request.handle profile_id = request.profile_id logger.info("Handle add twitter account with handle: {} and profile_id: {}".format( handle, profile_id, )) twitter_account_id = self.squeak_controller.add_twitter_account( handle, profile_id, ) return squeak_admin_pb2.AddTwitterAccountReply( twitter_account_id=twitter_account_id, ) def handle_get_twitter_accounts(self, request): logger.info("Handle get twitter accounts") twitter_accounts = self.squeak_controller.get_twitter_accounts() logger.info("Got number of twitter accounts: {}".format( len(twitter_accounts))) twitter_account_msgs = [ twitter_account_to_message(twitter_account) for twitter_account in twitter_accounts ] return squeak_admin_pb2.GetTwitterAccountsReply( twitter_accounts=twitter_account_msgs, ) def handle_delete_twitter_account(self, request): twitter_account_id = request.twitter_account_id logger.info("Handle delete twitter account with id: {}".format( twitter_account_id, )) self.squeak_controller.delete_twitter_account( twitter_account_id, ) return squeak_admin_pb2.DeleteTwitterAccountReply()
limit = request.limit last_entry = message_to_squeak_entry(request.last_entry) if request.HasField( "last_entry") else None logger.info("""Handle get liked squeak display entries with limit: {} last_entry: {} """.format( limit, last_entry, )) squeak_entries = ( self.squeak_controller.get_liked_squeak_entries( limit, last_entry, ) ) logger.info( "Got number of liked squeak entries: {}".format( len(squeak_entries) ) ) squeak_display_msgs = [ squeak_entry_to_message(entry) for entry in squeak_entries ] return squeak_admin_pb2.GetLikedSqueakDisplaysReply( squeak_display_entries=squeak_display_msgs )
alternates.go
package objectpool import ( "context" "errors" "fmt" "io/ioutil" "os" "path/filepath" "sort" "strings" "time" "github.com/grpc-ecosystem/go-grpc-middleware/logging/logrus/ctxlogrus" "gitlab.com/gitlab-org/gitaly/v14/internal/git" "gitlab.com/gitlab-org/gitaly/v14/internal/helper" "gitlab.com/gitlab-org/gitaly/v14/internal/helper/text" "gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb" ) // DisconnectGitAlternates is a slightly dangerous RPC. It optimistically // hard-links all alternate objects we might need, and then temporarily // removes (renames) objects/info/alternates and runs 'git fsck'. If we // are unlucky that leaves the repository in a broken state during 'git // fsck'. If we are very unlucky and Gitaly crashes, the repository stays // in a broken state until an administrator intervenes and restores the // backed-up copy of objects/info/alternates. func (s *server) DisconnectGitAlternates(ctx context.Context, req *gitalypb.DisconnectGitAlternatesRequest) (*gitalypb.DisconnectGitAlternatesResponse, error) { repo := req.Repository if repo == nil { return nil, helper.ErrInvalidArgument(errors.New("no repository")) } if err := s.disconnectAlternates(ctx, repo); err != nil { return nil, helper.ErrInternal(err) } return &gitalypb.DisconnectGitAlternatesResponse{}, nil } func (s *server) disconnectAlternates(ctx context.Context, repo *gitalypb.Repository) error { repoPath, err := s.locator.GetRepoPath(repo) if err != nil { return err } altFile, err := s.locator.InfoAlternatesPath(repo) if err != nil { return err } altContents, err := ioutil.ReadFile(altFile) if err != nil { if os.IsNotExist(err) { return nil } return err } altDir := strings.TrimSpace(string(altContents)) if strings.Contains(altDir, "\n") { return &invalidAlternatesError{altContents: altContents} } if !filepath.IsAbs(altDir) { altDir = filepath.Join(repoPath, "objects", altDir) } stat, err := os.Stat(altDir) if err != nil { return err } if !stat.IsDir()
objectFiles, err := findObjectFiles(altDir) if err != nil { return err } for _, path := range objectFiles { source := filepath.Join(altDir, path) target := filepath.Join(repoPath, "objects", path) if err := os.MkdirAll(filepath.Dir(target), 0755); err != nil { return err } if err := os.Link(source, target); err != nil { if os.IsExist(err) { continue } return err } } backupFile, err := newBackupFile(altFile) if err != nil { return err } return s.removeAlternatesIfOk(ctx, repo, altFile, backupFile) } func newBackupFile(altFile string) (string, error) { randSuffix, err := text.RandomHex(6) if err != nil { return "", err } return fmt.Sprintf("%s.%d.%s", altFile, time.Now().Unix(), randSuffix), nil } func findObjectFiles(altDir string) ([]string, error) { var objectFiles []string if walkErr := filepath.Walk(altDir, func(path string, info os.FileInfo, err error) error { if err != nil { return err } rel, err := filepath.Rel(altDir, path) if err != nil { return err } if strings.HasPrefix(rel, "info/") { return nil } if info.IsDir() { return nil } objectFiles = append(objectFiles, rel) return nil }); walkErr != nil { return nil, walkErr } sort.Sort(objectPaths(objectFiles)) return objectFiles, nil } type fsckError struct{ error } func (fe *fsckError) Error() string { return fmt.Sprintf("git fsck error while disconnected: %v", fe.error) } type invalidAlternatesError struct { altContents []byte } func (e *invalidAlternatesError) Error() string { return fmt.Sprintf("invalid content in objects/info/alternates: %q", e.altContents) } // removeAlternatesIfOk is dangerous. We optimistically temporarily // rename objects/info/alternates, and run `git fsck` to see if the // resulting repo is connected. If this fails we restore // objects/info/alternates. If the repo is not connected for whatever // reason, then until this function returns, probably **all concurrent // RPC calls to the repo will fail**. Also, if Gitaly crashes in the // middle of this function, the repo is left in a broken state. We do // take care to leave a copy of the alternates file, so that it can be // manually restored by an administrator if needed. func (s *server) removeAlternatesIfOk(ctx context.Context, repo *gitalypb.Repository, altFile, backupFile string) error { if err := os.Rename(altFile, backupFile); err != nil { return err } rollback := true defer func() { if !rollback { return } logger := ctxlogrus.Extract(ctx) // If we would do a os.Rename, and then someone else comes and clobbers // our file, it's gone forever. This trick with os.Link and os.Rename // is equivalent to "cp $backupFile $altFile", meaning backupFile is // preserved for possible forensic use. tmp := backupFile + ".2" if err := os.Link(backupFile, tmp); err != nil { logger.WithError(err).Error("copy backup alternates file") return } if err := os.Rename(tmp, altFile); err != nil { logger.WithError(err).Error("restore backup alternates file") } }() cmd, err := s.gitCmdFactory.New(ctx, repo, git.SubCmd{ Name: "fsck", Flags: []git.Option{git.Flag{Name: "--connectivity-only"}}, }) if err != nil { return err } if err := cmd.Wait(); err != nil { return &fsckError{error: err} } rollback = false return nil } type objectPaths []string func (o objectPaths) Len() int { return len(o) } func (o objectPaths) Swap(i, j int) { o[i], o[j] = o[j], o[i] } func (o objectPaths) Less(i, j int) bool { return objectPriority(o[i]) <= objectPriority(o[j]) } // Based on pack_copy_priority in git/tmp-objdir.c func objectPriority(name string) int { if !strings.HasPrefix(name, "pack") { return 0 } if strings.HasSuffix(name, ".keep") { return 1 } if strings.HasSuffix(name, ".pack") { return 2 } if strings.HasSuffix(name, ".idx") { return 3 } return 4 }
{ return &invalidAlternatesError{altContents: altContents} }
configurable.py
# -*- coding: utf-8 -*- # Copyright 2015 Fanficdownloader team, 2019 FanFicFare team # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import absolute_import import sys import re import codecs # py2 vs py3 transition from . import six from .six.moves import configparser from .six.moves.configparser import DEFAULTSECT, ParsingError if six.PY2: ConfigParser = configparser.SafeConfigParser else: # PY3 ConfigParser = configparser.ConfigParser from .six import string_types as basestring import logging logger = logging.getLogger(__name__) try: import chardet except ImportError: chardet = None from . import exceptions from . import fetcher from . import nsapa_proxy ## has to be up here for brotli-dict to load correctly. from .browsercache import BrowserCache # All of the writers(epub,html,txt) and adapters(ffnet,twlt,etc) # inherit from Configurable. The config file(s) uses ini format: # [sections] with key:value settings. # # [defaults] # titlepage_entries: category,genre, status # [www.whofic.com] # titlepage_entries: category,genre, status,dateUpdated,rating # [epub] # titlepage_entries: category,genre, status,datePublished,dateUpdated,dateCreated # [www.whofic.com:epub] # titlepage_entries: category,genre, status,datePublished # [overrides] # titlepage_entries: category # Work around for fact that py3 apparently doesn't allow/ignore # recursive imports like py2 does. try: from . import adapters except ImportError: if "fanficfare.adapters" in sys.modules: adapters = sys.modules["fanficfare.adapters"] elif "calibre_plugins.fanficfare_plugin.fanficfare.adapters" in sys.modules: adapters = sys.modules["calibre_plugins.fanficfare_plugin.fanficfare.adapters"] def re_compile(regex,line): try: return re.compile(regex,re.DOTALL) except Exception as e: raise exceptions.RegularExpresssionFailed(e,regex,line) # fall back labels. titleLabels = { 'category':'Category', 'genre':'Genre', 'language':'Language', 'status':'Status', 'series':'Series', 'characters':'Characters', 'ships':'Relationships', 'datePublished':'Published', 'dateUpdated':'Updated', 'dateCreated':'Packaged', 'rating':'Rating', 'warnings':'Warnings', 'numChapters':'Chapters', 'numWords':'Words', 'words_added':'Words Added', # logpage only 'site':'Site', 'publisher':'Publisher', 'storyId':'Story ID', 'authorId':'Author ID', 'extratags':'Extra Tags', 'title':'Title', 'storyUrl':'Story URL', 'sectionUrl':'Story URL Section', 'description':'Summary', 'author':'Author', 'authorUrl':'Author URL', 'formatname':'File Format', 'formatext':'File Extension', 'siteabbrev':'Site Abbrev', 'version':'Downloader Version' } formatsections = ['html','txt','epub','mobi'] othersections = ['defaults','overrides'] def get_valid_sections(): sites = adapters.getConfigSections() sitesections = list(othersections) for section in sites: sitesections.append(section) # also allows [www.base_efiction] and [www.base_xenforoforum]. Not # likely to matter. if section.startswith('www.'): # add w/o www if has www sitesections.append(section[4:]) else: # add w/ www if doesn't www sitesections.append('www.%s'%section) allowedsections = [] allowedsections.extend(formatsections) for section in sitesections: allowedsections.append(section) for f in formatsections: allowedsections.append('%s:%s'%(section,f)) return allowedsections def get_valid_list_entries(): return list(['category', 'genre', 'characters', 'ships', 'warnings', 'extratags', 'author', 'authorId', 'authorUrl', 'lastupdate', ]) boollist=['true','false'] base_xenforo2_list=['base_xenforo2forum', 'forums.sufficientvelocity.com', 'forums.spacebattles.com', 'www.alternatehistory.com', ] base_xenforo_list=base_xenforo2_list+['base_xenforoforum', 'forum.questionablequesting.com', ] def get_valid_set_options(): ''' dict() of names of boolean options, but as a tuple with valid sites, valid formats and valid values (None==all) This is to further restrict keywords to certain sections and/or values. get_valid_keywords() below is the list of allowed keywords. Any keyword listed here must also be listed there. This is what's used by the code when you save personal.ini in plugin that stops and points out possible errors in keyword *values*. It doesn't flag 'bad' keywords. Note that it's separate from color highlighting and most keywords need to be added to both. ''' valdict = {'collect_series':(None,None,boollist), 'include_titlepage':(None,None,boollist), 'include_tocpage':(None,None,boollist), 'is_adult':(None,None,boollist), 'keep_style_attr':(None,None,boollist), 'keep_title_attr':(None,None,boollist), 'make_firstimage_cover':(None,None,boollist), 'never_make_cover':(None,None,boollist), 'nook_img_fix':(None,None,boollist), 'replace_br_with_p':(None,None,boollist), 'replace_hr':(None,None,boollist), 'sort_ships':(None,None,boollist), 'strip_chapter_numbers':(None,None,boollist), 'mark_new_chapters':(None,None,boollist+['latestonly']), 'titlepage_use_table':(None,None,boollist), 'use_ssl_unverified_context':(None,None,boollist), 'use_cloudscraper':(None,None,boollist), 'use_basic_cache':(None,None,boollist), 'use_nsapa_proxy':(None,None,boollist), ## currently, browser_cache_path is assumed to be ## shared and only ffnet uses it so far 'browser_cache_path':(['defaults'],None,None), 'use_browser_cache':(['fanfiction.net'],None,boollist), 'use_browser_cache_only':(['fanfiction.net'],None,boollist), 'continue_on_chapter_error':(None,None,boollist), 'conditionals_use_lists':(None,None,boollist), 'dedup_chapter_list':(None,None,boollist), 'add_chapter_numbers':(None,None,boollist+['toconly']), 'check_next_chapter':(['fanfiction.net','fictionpress.com'],None,boollist), 'tweak_fg_sleep':(None,None,boollist), 'skip_author_cover':(['fanfiction.net','fictionpress.com'],None,boollist), 'fix_fimf_blockquotes':(['fimfiction.net'],None,boollist), 'fail_on_password':(['fimfiction.net'],None,boollist), 'keep_prequel_in_description':(['fimfiction.net'],None,boollist), 'include_author_notes':(['fimfiction.net','royalroad.com'],None,boollist), 'do_update_hook':(['fimfiction.net', 'archiveofourown.org'],None,boollist), 'always_login':(['archiveofourown.org']+base_xenforo_list,None,boollist), 'use_archived_author':(['archiveofourown.org'],None,boollist), 'use_view_full_work':(['archiveofourown.org'],None,boollist), 'remove_authorfootnotes_on_update':(['archiveofourown.org'],None,boollist), 'force_login':(['phoenixsong.net'],None,boollist), 'non_breaking_spaces':(['fictionmania.tv'],None,boollist), 'download_text_version':(['fictionmania.tv'],None,boollist), 'universe_as_series':(['storiesonline.net','finestories.com','scifistories.com'],None,boollist), 'strip_text_links':(['bloodshedverse.com','asexstories.com'],None,boollist), 'centeredcat_to_characters':(['tthfanfic.org'],None,boollist), 'pairingcat_to_characters_ships':(['tthfanfic.org'],None,boollist), 'romancecat_to_characters_ships':(['tthfanfic.org'],None,boollist), 'use_meta_keywords':(['literotica.com'],None,boollist), 'chapter_categories_use_all':(['literotica.com'],None,boollist), 'clean_chapter_titles':(['literotica.com'],None,boollist), 'description_in_chapter':(['literotica.com'],None,boollist), 'inject_chapter_title':(['asianfanfics.com','storiesonline.net','finestories.com','scifistories.com'],None,boollist), 'append_datepublished_to_storyurl':(['storiesonline.net','finestories.com','scifistories.com'],None,boollist), 'auto_sub':(['asianfanfics.com'],None,boollist), # eFiction Base adapters allow bulk_load # kept forgetting to add them, so now it's automatic. 'bulk_load':(adapters.get_bulk_load_sites(), None,boollist), 'include_logpage':(None,['epub'],boollist+['smart']), 'logpage_at_end':(None,['epub'],boollist), 'calibre_series_meta':(None,['epub'],boollist), 'windows_eol':(None,['txt'],boollist), 'include_images':(None,['epub','html'],boollist), 'jpg_quality':(None,['epub','html'],None), 'additional_images':(None,['epub','html'],None), 'grayscale_images':(None,['epub','html'],boollist), 'no_image_processing':(None,['epub','html'],boollist), 'dedup_img_files':(None,['epub','html'],boollist), 'convert_inline_images':(None,['epub','html'],boollist), 'normalize_text_links':(None,['epub','html'],boollist), 'internalize_text_links':(None,['epub','html'],boollist), 'capitalize_forumtags':(base_xenforo_list,None,boollist), 'minimum_threadmarks':(base_xenforo_list,None,None), 'first_post_title':(base_xenforo_list,None,None), 'always_include_first_post':(base_xenforo_list,None,boollist), 'always_reload_first_chapter':(base_xenforo_list,None,boollist), 'always_use_forumtags':(base_xenforo_list,None,boollist), 'use_reader_mode':(base_xenforo_list,None,boollist), 'author_avatar_cover':(base_xenforo_list,None,boollist), 'remove_spoilers':(base_xenforo_list+['royalroad.com'],None,boollist), 'legend_spoilers':(base_xenforo_list+['royalroad.com', 'fiction.live'],None,boollist), 'apocrypha_to_omake':(base_xenforo_list,None,boollist), 'replace_failed_smilies_with_alt_text':(base_xenforo_list,None,boollist), 'use_threadmark_wordcounts':(base_xenforo_list,None,boollist), 'always_include_first_post_chapters':(base_xenforo_list,None,boollist), 'order_threadmarks_by_date':(base_xenforo_list,None,boollist), 'use_threadmarks_description':(base_xenforo2_list,None,boollist), 'use_threadmarks_status':(base_xenforo2_list,None,boollist), 'use_threadmarks_cover':(base_xenforo2_list,None,boollist), 'skip_sticky_first_posts':(base_xenforo2_list,None,boollist), 'include_dice_rolls':(base_xenforo2_list,None,boollist+['svg']), 'fix_pseudo_html': (['webnovel.com'], None, boollist), 'fix_excess_space': (['novelonlinefull.com', 'novelall.com'], ['epub', 'html'], boollist), 'dedup_order_chapter_list': (['wuxiaworld.co', 'novelupdates.cc'], None, boollist), 'show_nsfw_cover_images': (['fiction.live'], None, boollist), 'show_timestamps': (['fiction.live'], None, boollist), } return dict(valdict) def get_valid_scalar_entries(): return list(['series', 'seriesUrl', 'language', 'status', 'datePublished', 'dateUpdated', 'dateCreated', 'rating', 'numChapters', 'numWords', 'words_added', # logpage only. 'site', 'publisher', 'storyId', 'title', 'titleHTML', 'storyUrl', 'sectionUrl', 'description', 'formatname', 'formatext', 'siteabbrev', 'version', # internal stuff. 'authorHTML', 'seriesHTML', 'langcode', 'output_css', 'cover_image', ]) def get_valid_entries(): return get_valid_list_entries() + get_valid_scalar_entries() # *known* keywords -- or rather regexps for them. def get_valid_keywords(): ''' Among other things, this list is used by the color highlighting in personal.ini editing in plugin. Note that it's separate from value checking and most keywords need to be added to both. ''' return list(['(in|ex)clude_metadata_(pre|post)', 'add_chapter_numbers', 'add_genre_when_multi_category', 'add_category_when_multi_category', 'adult_ratings', 'allow_unsafe_filename', 'always_overwrite', 'anthology_tags', 'anthology_title_pattern', 'background_color', 'bulk_load', 'chapter_end', 'chapter_start', 'chapter_title_strip_pattern', 'chapter_title_def_pattern', 'chapter_title_add_pattern', 'chapter_title_new_pattern', 'chapter_title_addnew_pattern', 'title_chapter_range_pattern', 'mark_new_chapters', 'check_next_chapter', 'skip_author_cover', 'collect_series', 'comma_entries', 'connect_timeout', 'convert_images_to', 'cover_content', 'cover_exclusion_regexp', 'custom_columns_settings', 'dateCreated_format', 'datePublished_format', 'dateUpdated_format', 'default_cover_image', 'description_limit', 'do_update_hook', 'use_archived_author', 'use_view_full_work', 'always_login', 'exclude_notes', 'remove_authorfootnotes_on_update', 'exclude_editor_signature', 'extra_logpage_entries', 'extra_subject_tags', 'extra_titlepage_entries', 'extra_valid_entries', 'extratags', 'extracategories', 'extragenres', 'extracharacters', 'extraships', 'extrawarnings', 'fail_on_password', 'file_end', 'file_start', 'fileformat', 'find_chapters', 'fix_fimf_blockquotes', 'keep_prequel_in_description', 'include_author_notes', 'force_login', 'generate_cover_settings', 'grayscale_images', 'image_max_size', 'include_images', 'jpg_quality', 'additional_images', 'include_logpage', 'logpage_at_end', 'calibre_series_meta', 'include_subject_tags', 'include_titlepage', 'include_tocpage', 'chardet_confidence_limit', 'is_adult', 'join_string_authorHTML', 'keep_style_attr', 'keep_title_attr', 'keep_html_attrs', 'replace_tags_with_spans', 'keep_empty_tags', 'remove_tags', 'keep_summary_html', 'logpage_end', 'logpage_entries', 'logpage_entry', 'logpage_start', 'logpage_update_end', 'logpage_update_start', 'make_directories', 'make_firstimage_cover', 'make_linkhtml_entries', 'max_fg_sleep', 'max_fg_sleep_at_downloads', 'min_fg_sleep', 'never_make_cover', 'cover_min_size', 'no_image_processing', 'dedup_img_files', 'convert_inline_images', 'non_breaking_spaces', 'download_text_version', 'nook_img_fix', 'output_css', 'output_filename', 'output_filename_safepattern', 'password', 'post_process_cmd', 'rating_titles', 'remove_transparency', 'replace_br_with_p', 'replace_hr', 'replace_xbr_with_hr', 'replace_metadata', 'slow_down_sleep_time', 'sort_ships', 'sort_ships_splits', 'strip_chapter_numbers', 'strip_chapter_numeral', 'strip_text_links', 'centeredcat_to_characters', 'pairingcat_to_characters_ships', 'romancecat_to_characters_ships', 'use_meta_keywords', 'chapter_categories_use_all', 'clean_chapter_titles', 'conditionals_use_lists', 'description_in_chapter', 'inject_chapter_title', 'append_datepublished_to_storyurl', 'auto_sub', 'titlepage_end', 'titlepage_entries', 'titlepage_entry', 'titlepage_no_title_entry', 'titlepage_start', 'titlepage_use_table', 'titlepage_wide_entry', 'tocpage_end', 'tocpage_entry', 'tocpage_start', 'tweak_fg_sleep', 'universe_as_series', 'use_ssl_unverified_context', 'use_cloudscraper', 'use_basic_cache', 'use_browser_cache', 'use_browser_cache_only', 'use_nsapa_proxy', 'nsapa_proxy_address', 'nsapa_proxy_port', 'browser_cache_path', 'browser_cache_age_limit', 'user_agent', 'username', 'website_encodings', 'wide_titlepage_entries', 'windows_eol', 'wrap_width', 'zip_filename', 'zip_output', 'capitalize_forumtags', 'continue_on_chapter_error', 'chapter_title_error_mark', 'minimum_threadmarks', 'first_post_title', 'always_include_first_post', 'always_reload_first_chapter', 'always_use_forumtags', 'use_reader_mode', 'author_avatar_cover', 'reader_posts_per_page', 'remove_spoilers', 'legend_spoilers', 'apocrypha_to_omake', 'skip_threadmarks_categories', 'normalize_text_links', 'internalize_text_links', 'replace_failed_smilies_with_alt_text', 'use_threadmark_wordcounts', 'always_include_first_post_chapters', 'order_threadmarks_by_date', 'use_threadmarks_description', 'use_threadmarks_status', 'use_threadmarks_cover', 'skip_sticky_first_posts', 'include_dice_rolls', 'datethreadmark_format', 'fix_pseudo_html', 'fix_excess_space', 'dedup_order_chapter_list', 'ignore_chapter_url_list', 'dedup_chapter_list', 'show_timestamps', 'show_nsfw_cover_images', 'show_spoiler_tags', 'max_zalgo', 'epub_version', ]) # *known* entry keywords -- or rather regexps for them. def get_valid_entry_keywords(): return list(['%s_(label|format)', '(default_value|include_in|join_string|keep_in_order)_%s',]) # Moved here for test_config. def make_generate_cover_settings(param): vlist = [] for line in param.splitlines(): if "=>" in line: try: (template,regexp,setting) = [ x.strip() for x in line.split("=>") ] re_compile(regexp,line) vlist.append((template,regexp,setting)) except Exception as e: raise exceptions.PersonalIniFailed(e,line,param) return vlist class Configuration(ConfigParser): def __init__(self, sections, fileform, lightweight=False, basic_cache=None, browser_cache=None): site = sections[-1] # first section is site DN. ConfigParser.__init__(self) self.fetcher = None # the network layer for getting pages the self.sleeper = None # caching layer for getting pages, create one if not given. self.basic_cache = basic_cache or fetcher.BasicCache() # don't create a browser cache by default. self.browser_cache = browser_cache self.opener = None # used for _filelist self.lightweight = lightweight self.linenos=dict() # key by section or section,key -> lineno ## [injected] section has even less priority than [defaults] self.sectionslist = ['defaults','injected'] ## add other sections (not including site DN) after defaults, ## but before site-specific. for section in sections[:-1]: self.addConfigSection(section) if site.startswith("www."): sitewith = site sitewithout = site.replace("www.","") else: sitewith = "www."+site sitewithout = site self.addConfigSection(sitewith) self.addConfigSection(sitewithout) if fileform: self.addConfigSection(fileform) ## add other sections:fileform (not including site DN) ## after fileform, but before site-specific:fileform. for section in sections[:-1]: self.addConfigSection(section+":"+fileform) self.addConfigSection(sitewith+":"+fileform) self.addConfigSection(sitewithout+":"+fileform) self.addConfigSection("overrides") self.listTypeEntries = get_valid_list_entries() self.validEntries = get_valid_entries() self.url_config_set = False def section_url_names(self,domain,section_url_f): ## domain is passed as a method to limit the damage if/when an ## adapter screws up _section_url domain = domain.replace('www.','') ## let's not confuse the issue any more than it is. try: ## OrderDict (the default for ConfigParser) has to be ## reconstructed completely because removing and re-adding ## a section would mess up the order. ## assumes _dict and _sections from ConfigParser parent. self._sections = self._dict((section_url_f(k) if (domain in k and 'http' in k) else k, v) for k, v in six.viewitems(self._sections)) # logger.debug(self._sections.keys()) except Exception as e: logger.warning("Failed to perform section_url_names: %s"%e) def addUrlConfigSection(self,url): if not self.lightweight: # don't need when just checking for normalized URL. # replace if already set once. if self.url_config_set: self.sectionslist[self.sectionslist.index('overrides')+1]=url else: self.addConfigSection(url,'overrides') self.url_config_set=True def addConfigSection(self,section,before=None): if section not in self.sectionslist: # don't add if already present. if before is None: self.sectionslist.insert(0,section) else: ## because sectionslist is hi-pri first, lo-pri last, ## 'before' means after in the list. self.sectionslist.insert(self.sectionslist.index(before)+1,section) def isListType(self,key): return key in self.listTypeEntries or self.hasConfig("include_in_"+key) def isValidMetaEntry(self, key): return key in self.getValidMetaList() def getValidMetaList(self): return self.validEntries + self.getConfigList("extra_valid_entries") # used by adapters & writers, non-convention naming style def hasConfig(self, key): return self.has_config(self.sectionslist, key) def has_config(self, sections, key): for section in sections: try: self.get(section,key) #print("found %s in section [%s]"%(key,section)) return True except: try: self.get(section,key+"_filelist") #print("found %s_filelist in section [%s]"%(key,section)) return True except: try: self.get(section,"add_to_"+key) #print("found add_to_%s in section [%s]"%(key,section)) return True except: pass return False # used by adapters & writers, non-convention naming style def getConfig(self, key, default=""): return self.get_config(self.sectionslist,key,default) def get_config(self, sections, key, default=""): val = default val_files = [] if not key.endswith("_filelist"): ## <key>_filelist overrides <key>, but add_to_<key> is ## still used. By using self.get_config_list(), ## add_to_<key>_filelist also works. (But not ## <key>_filelist_filelist--that way lies madness--and ## infinite recursion.) self.get_config_list() also does ## the list split & clean up. val_files = self.get_config_list(sections, key+"_filelist") file_val = False if val_files: val = '' for v in val_files: try: val = val + self._read_file_opener(v) file_val = True except: pass if not file_val: logger.warning("All files for (%s) failed! Using (%s) instead. Filelist: (%s)"% (key+"_filelist",key,val_files)) if not file_val: for section in sections: try: val = self.get(section,key) if val and val.lower() == "false": val = False #print("getConfig(%s)=[%s]%s" % (key,section,val)) break except (configparser.NoOptionError, configparser.NoSectionError) as e: pass for section in sections[::-1]: # 'martian smiley' [::-1] reverses list by slicing whole list with -1 step. try: val = val + self.get(section,"add_to_"+key) #print("getConfig(add_to_%s)=[%s]%s" % (key,section,val)) except (configparser.NoOptionError, configparser.NoSectionError) as e: pass return val # split and strip each. def get_config_list(self, sections, key, default=[]): vlist = re.split(r'(?<!\\),',self.get_config(sections,key)) # don't split on \, vlist = [x for x in [ v.strip().replace(r'\,',',') for v in vlist ] if x !=''] if not vlist: return default else: return vlist # used by adapters & writers, non-convention naming style def getConfigList(self, key, default=[]): return self.get_config_list(self.sectionslist, key, default) # Moved here for test_config. def get_generate_cover_settings(self): return make_generate_cover_settings(self.getConfig('generate_cover_settings')) def get_lineno(self,section,key=None): if key: return self.linenos.get(section+','+key,None) else: return self.linenos.get(section,None) ## Copied from Python 2.7 library so as to make read utf8. def read(self, filenames): """Read and parse a filename or a list of filenames. Files that cannot be opened are silently ignored; this is designed so that you can specify a list of potential configuration file locations (e.g. current directory, user's home directory, systemwide directory), and all existing configuration files in the list will be read. A single filename may also be given. Return list of successfully read files. """ if isinstance(filenames, basestring): filenames = [filenames] read_ok = [] for filename in filenames: try: fp = codecs.open(filename,encoding='utf-8') except IOError: continue self._read(fp, filename) fp.close() read_ok.append(filename) return read_ok ## Copied from Python 2.7 library so as to make it save linenos too. # # Regular expressions for parsing section headers and options. # def _read(self, fp, fpname): """Parse a sectioned setup file. The sections in setup file contains a title line at the top, indicated by a name in square brackets (`[]'), plus key/value options lines, indicated by `name: value' format lines. Continuations are represented by an embedded newline then leading whitespace. Blank lines, lines beginning with a '#', and just about everything else are ignored. """ cursect = None # None, or a dictionary optname = None lineno = 0 e = None # None, or an exception while True: line = fp.readline() if not line: break lineno = lineno + 1 # comment or blank line? if line.strip() == '' or line[0] in '#;': continue if line.split(None, 1)[0].lower() == 'rem' and line[0] in "rR": # no leading whitespace continue # continuation line? if line[0].isspace() and cursect is not None and optname: value = line.strip() if value: cursect[optname] = "%s\n%s" % (cursect[optname], value) # a section header or option header? else: # is it a section header? mo = self.SECTCRE.match(line) if mo: sectname = mo.group('header') if sectname in self._sections: cursect = self._sections[sectname] elif sectname == DEFAULTSECT: cursect = self._defaults else: cursect = self._dict() cursect['__name__'] = sectname self._sections[sectname] = cursect self.linenos[sectname]=lineno # So sections can't start with a continuation line optname = None # no section header in the file? elif cursect is None: if not e: e = ParsingError(fpname) e.append(lineno, u'(Line outside section) '+line) #raise MissingSectionHeaderError(fpname, lineno, line) # an option line? else: mo = self.OPTCRE.match(line) # OPTCRE instead of # _optcre so it works # with python 2.6 if mo: optname, vi, optval = mo.group('option', 'vi', 'value') # This check is fine because the OPTCRE cannot # match if it would set optval to None if optval is not None: if vi in ('=', ':') and ';' in optval: # ';' is a comment delimiter only if it follows # a spacing character pos = optval.find(';') if pos != -1 and optval[pos-1].isspace(): optval = optval[:pos] optval = optval.strip() # allow empty values if optval == '""': optval = '' optname = self.optionxform(optname.rstrip()) cursect[optname] = optval self.linenos[cursect['__name__']+','+optname]=lineno else: # a non-fatal parsing error occurred. set up the # exception but keep going. the exception will be # raised at the end of the file and will contain a # list of all bogus lines if not e: e = ParsingError(fpname) e.append(lineno, line) # if any parsing errors occurred, raise an exception if e: raise e def test_config(self): errors=[] ## too complicated right now to enforce ## get_valid_set_options() warnings on teststory and ## [storyUrl] sections. allow_all_sections_re = re.compile(r'^(teststory:(defaults|[0-9]+)|https?://.*)$') allowedsections = get_valid_sections() clude_metadata_re = re.compile(r'(add_to_)?(in|ex)clude_metadata_(pre|post)$') replace_metadata_re = re.compile(r'(add_to_)?replace_metadata$') from .story import set_in_ex_clude, make_replacements custom_columns_settings_re = re.compile(r'(add_to_)?custom_columns_settings$') generate_cover_settings_re = re.compile(r'(add_to_)?generate_cover_settings$') valdict = get_valid_set_options() for section in self.sections(): allow_all_section = allow_all_sections_re.match(section) if section not in allowedsections and not allow_all_section: errors.append((self.get_lineno(section),"Bad Section Name: [%s]"%section)) else: sitename = section.replace('www.','') if ':' in sitename: formatname = sitename[sitename.index(':')+1:] sitename = sitename[:sitename.index(':')] elif sitename in formatsections: formatname = sitename sitename = None elif sitename in othersections: formatname = None sitename = None ## check each keyword in section. Due to precedence ## order of sections, it's possible for bad lines to ## never be used. for keyword,value in self.items(section): try: ## check regex bearing keywords first. Each ## will raise exceptions if flawed. if clude_metadata_re.match(keyword): set_in_ex_clude(value) if replace_metadata_re.match(keyword): make_replacements(value) if generate_cover_settings_re.match(keyword): make_generate_cover_settings(value) # if custom_columns_settings_re.match(keyword): #custom_columns_settings: # cliches=>#acolumn # themes=>#bcolumn,a # timeline=>#ccolumn,n # "FanFiction"=>#collection if not allow_all_section: def make_sections(x): return '['+'], ['.join(x)+']' if keyword in valdict: (valsites,valformats,vals)=valdict[keyword] if valsites != None and sitename != None and sitename not in valsites: errors.append((self.get_lineno(section,keyword),"%s not valid in section [%s] -- only valid in %s sections."%(keyword,section,make_sections(valsites)))) if valformats != None and formatname != None and formatname not in valformats: errors.append((self.get_lineno(section,keyword),"%s not valid in section [%s] -- only valid in %s sections."%(keyword,section,make_sections(valformats)))) if vals != None and value not in vals: errors.append((self.get_lineno(section,keyword),"%s not a valid value for %s"%(value,keyword))) ## skipping output_filename_safepattern ## regex--not used with plugin and this isn't ## used with CLI/web yet. except Exception as e: errors.append((self.get_lineno(section,keyword),"Error:%s in (%s:%s)"%(e,keyword,value))) return errors def _read_file_opener(self,fn): ''' For reading urls from _filelist entries. Used to use same fetch routines as for getting stories, but a) those make dependencies a mess and b) that has a lot more complication now with different caching. ''' if not self.opener: from .six.moves.urllib.request import build_opener self.opener = build_opener() # can't use with: structure in Cal v2.85.1 resp = self.opener.open(fn,None) data = resp.read() retval = None for code in self.getConfigList('filelist_encodings', default=["utf8", "Windows-1252", "iso-8859-1"]): try: retval = data.decode(code) break except: logger.debug("failed decode (%s) as (%s)"%(fn,code)) resp.close() return retval #### methods for fetching. Moved here from base_adapter when #### *_filelist feature was added. def get_fetcher(self, make_new = False): cookiejar = None if self.fetcher is not None and make_new: cookiejar = self.get_fetcher().get_cookiejar() # save and re-apply cookiejar when make_new. if not self.fetcher or make_new: if self.getConfig('use_nsapa_proxy',False): logger.debug("use_nsapa_proxy:%s"%self.getConfig('use_nsapa_proxy')) fetchcls = nsapa_proxy.NSAPA_ProxyFetcher elif self.getConfig('use_cloudscraper',False): logger.debug("use_cloudscraper:%s"%self.getConfig('use_cloudscraper')) fetchcls = fetcher.CloudScraperFetcher else: fetchcls = fetcher.RequestsFetcher self.fetcher = fetchcls(self.getConfig, self.getConfigList) ######################################################## ## Adding fetcher decorators. Order matters--last added, ## first called. If ProgressBarDecorator is added before ## Cache, it's never called for cache hits, for example. ## doesn't sleep when fromcache==True ## saved for set_sleep self.sleeper = fetcher.SleepDecorator() self.sleeper.decorate_fetcher(self.fetcher) ## cache decorator terminates the chain when found. logger.debug("use_browser_cache:%s"%self.getConfig('use_browser_cache')) if self.getConfig('use_browser_cache'): logger.debug("browser_cache_path:%s"%self.getConfig('browser_cache_path')) try: ## make a data list of decorators to re-apply if ## there are many more. if self.browser_cache is None: self.browser_cache = BrowserCache(self.getConfig("browser_cache_path"), age_limit=self.getConfig("browser_cache_age_limit")) fetcher.BrowserCacheDecorator(self.browser_cache).decorate_fetcher(self.fetcher) except Exception as e: logger.warn("Failed to setup BrowserCache(%s)"%e) raise ## cache decorator terminates the chain when found. logger.debug("use_basic_cache:%s"%self.getConfig('use_basic_cache')) if self.getConfig('use_basic_cache') and self.basic_cache is not None: fetcher.BasicCacheDecorator(self.basic_cache).decorate_fetcher(self.fetcher) if self.getConfig('progressbar'): fetcher.ProgressBarDecorator().decorate_fetcher(self.fetcher) if cookiejar is not None: self.fetcher.set_cookiejar(cookiejar) return self.fetcher ## used by plugin to change time for ffnet. def set_sleep_override(self,val): return self.sleeper.set_sleep_override(val) def get_cookiejar(self,filename=None): return self.get_fetcher().get_cookiejar(filename) def set_cookiejar(self,cookiejar): self.get_fetcher().set_cookiejar(cookiejar) def get_basic_cache(self): return self.basic_cache ## replace cache, then replace fetcher (while keeping cookiejar) ## to replace fetcher decorators. def set_basic_cache(self,cache): self.basic_cache = cache self.get_fetcher(make_new=True) def get_browser_cache(self): logger.debug("1configuration.get_browser_cache:%s"%self.browser_cache) if self.browser_cache is None: # force generation of browser cache if not there self.get_fetcher() logger.debug("2configuration.get_browser_cache:%s"%self.browser_cache) return self.browser_cache ## replace cache, then replace fetcher (while keeping cookiejar) ## to replace fetcher decorators. def set_browser_cache(self,cache): self.browser_cache = cache logger.debug("configuration.set_browser_cache:%s"%self.browser_cache) self.get_fetcher(make_new=True) # extended by adapter, writer and story for ease of calling configuration. class Configurable(object): def __init__(self, configuration): self.configuration = configuration def section_url_names(self,domain,section_url_f): return self.configuration.section_url_names(domain,section_url_f) def get_configuration(self): return self.configuration def is_lightweight(self): return self.configuration.lightweight def addUrlConfigSection(self,url): self.configuration.addUrlConfigSection(url) def isListType(self,key): return self.configuration.isListType(key) def isValidMetaEntry(self, key): return self.configuration.isValidMetaEntry(key) def
(self): return self.configuration.getValidMetaList() def hasConfig(self, key): return self.configuration.hasConfig(key) def has_config(self, sections, key): return self.configuration.has_config(sections, key) def getConfig(self, key, default=""): return self.configuration.getConfig(key,default) def get_config(self, sections, key, default=""): return self.configuration.get_config(sections,key,default) def getConfigList(self, key, default=[]): return self.configuration.getConfigList(key,default) def get_config_list(self, sections, key): return self.configuration.get_config_list(sections,key) def get_label(self, entry): if self.hasConfig(entry+"_label"): label=self.getConfig(entry+"_label") elif entry in titleLabels: label=titleLabels[entry] else: label=entry.title() return label
getValidMetaList
cms-address.component.ts
import { ChangeDetectionStrategy, Component, OnInit } from '@angular/core'; import { FormControl, FormGroup } from '@angular/forms'; import { IComponentConfig, IComponent } from './../config'; @Component({ selector: 'cms-address', templateUrl: './cms-address.component.html', styleUrls: ['./cms-address.component.css'], changeDetection: ChangeDetectionStrategy.OnPush }) export class
implements IComponentConfig, OnInit { config?: IComponent; formGroup: FormGroup = new FormGroup({}); controls: string[] = []; ngOnInit(): void { const formConfig = this.config?.address; if (!formConfig) { throw new Error(`An address component must have a defined address configuration`); } this.controls = Object.keys(formConfig).filter(control => control !== 'zip'); const group: { [key: string]: FormControl } = {}; for (const control of this.controls) { group[control] = new FormControl(); } group.zip = new FormControl(); this.formGroup = new FormGroup(group); } saveForm(): void { console.log(this.formGroup.value); } }
CmsAddressComponent
time.rs
use types::{TimestampMillis, TimestampNanos}; const NANOS_PER_MILLISECOND: u64 = 1_000_000; pub fn now_millis() -> TimestampMillis { ic_cdk::api::time() as u64 / NANOS_PER_MILLISECOND } pub fn
() -> TimestampNanos { ic_cdk::api::time() as u64 }
now_nanos
FolderNotchMinus.tsx
/* GENERATED FILE */ import * as React from 'react'; import Svg, { Rect, Path, Line } from 'react-native-svg'; import { IconProps } from '../lib'; function
(props: IconProps) { return ( <Svg id="Raw" viewBox="0 0 256 256" width={props.size} height={props.size} {...props} > <Rect width={256} height={256} fill="none" /> <Path d="M128,80h88a8,8,0,0,1,8,8V200a8,8,0,0,1-8,8H40a8.02352,8.02352,0,0,1-8-8V104" fill="none" stroke={props.color} strokeLinecap="round" strokeLinejoin="round" strokeWidth={16} /> <Path d="M93.33333,104H32V64a8,8,0,0,1,8-8H93.33333a8,8,0,0,1,4.8,1.6L128,80,98.13333,102.4A8,8,0,0,1,93.33333,104Z" fill="none" stroke={props.color} strokeLinecap="round" strokeLinejoin="round" strokeWidth={16} /> <Line x1={103.99707} y1={148} x2={151.99707} y2={148} fill="none" stroke={props.color} strokeLinecap="round" strokeLinejoin="round" strokeWidth={16} /> </Svg> ); } export default FolderNotchMinus;
FolderNotchMinus
BatchChangeListPage.tsx
import classNames from 'classnames' import React, { useEffect, useCallback, useState, useMemo } from 'react' import { RouteComponentProps } from 'react-router' import { Observable, ReplaySubject } from 'rxjs' import { filter, map, tap, withLatestFrom } from 'rxjs/operators' import { TelemetryProps } from '@sourcegraph/shared/src/telemetry/telemetryService' import { useObservable } from '@sourcegraph/shared/src/util/useObservable' import { Page } from '@sourcegraph/web/src/components/Page' import { Container, PageHeader } from '@sourcegraph/wildcard' import { BatchChangesIcon } from '../../../batches/icons' import { FilteredConnection, FilteredConnectionFilter } from '../../../components/FilteredConnection' import { ListBatchChange, Scalars, BatchChangeState, BatchChangesVariables, BatchChangesResult, BatchChangesByNamespaceVariables, } from '../../../graphql-operations' import { areBatchChangesLicensed as _areBatchChangesLicensed, queryBatchChanges as _queryBatchChanges, queryBatchChangesByNamespace, } from './backend' import styles from './BatchChangeListPage.module.scss' import { BatchChangeNode, BatchChangeNodeProps } from './BatchChangeNode' import { BatchChangesListIntro } from './BatchChangesListIntro' import { GettingStarted } from './GettingStarted' import { NewBatchChangeButton } from './NewBatchChangeButton' export interface BatchChangeListPageProps extends TelemetryProps, Pick<RouteComponentProps, 'location'> { headingElement: 'h1' | 'h2' displayNamespace?: boolean /** For testing only. */ queryBatchChanges?: typeof _queryBatchChanges /** For testing only. */ areBatchChangesLicensed?: typeof _areBatchChangesLicensed /** For testing only. */ openTab?: SelectedTab } const FILTERS: FilteredConnectionFilter[] = [ { id: 'status', label: 'Status', type: 'radio', values: [ { label: 'Open', value: 'open', tooltip: 'Show only batch changes that are open', args: { state: BatchChangeState.OPEN }, }, { label: 'Closed', value: 'closed', tooltip: 'Show only batch changes that are closed', args: { state: BatchChangeState.CLOSED }, }, { label: 'All', value: 'all', tooltip: 'Show all batch changes', args: {}, }, ], }, ] type SelectedTab = 'batchChanges' | 'gettingStarted' /** * A list of all batch changes on the Sourcegraph instance. */ export const BatchChangeListPage: React.FunctionComponent<BatchChangeListPageProps> = ({ queryBatchChanges = _queryBatchChanges, areBatchChangesLicensed = _areBatchChangesLicensed, displayNamespace = true, headingElement, location, openTab, ...props }) => { useEffect(() => props.telemetryService.logViewEvent('BatchChangesListPage'), [props.telemetryService]) /* * Tracks whether this is the first fetch since this page has been rendered the first time.
const subject = new ReplaySubject(1) subject.next(true) return subject }, []) const [selectedTab, setSelectedTab] = useState<SelectedTab>(openTab ?? 'batchChanges') const query = useCallback<(args: Partial<BatchChangesVariables>) => Observable<BatchChangesResult['batchChanges']>>( args => queryBatchChanges(args).pipe( withLatestFrom(isFirstFetch), tap(([response, isFirst]) => { if (isFirst) { isFirstFetch.next(false) if (!openTab && response.totalCount === 0) { setSelectedTab('gettingStarted') } } }), // Don't emit when we are switching to the getting started tab right away to prevent a costly render. // Only if: // - We don't fetch for the first time (the user clicked a tab) OR // - There are more than 0 changesets in the namespace OR // - A test forces us to display a specific tab filter(([response, isFirst]) => !isFirst || openTab !== undefined || response.totalCount > 0), map(([response]) => response.batchChanges) ), [queryBatchChanges, isFirstFetch, openTab] ) const licensed: boolean | undefined = useObservable( useMemo(() => areBatchChangesLicensed(), [areBatchChangesLicensed]) ) return ( <Page> <PageHeader path={[{ icon: BatchChangesIcon, text: 'Batch Changes' }]} className="test-batches-list-page mb-3" actions={<NewBatchChangeButton to={`${location.pathname}/create`} />} headingElement={headingElement} description="Run custom code over hundreds of repositories and manage the resulting changesets." /> <BatchChangesListIntro licensed={licensed} /> <BatchChangeListTabHeader selectedTab={selectedTab} setSelectedTab={setSelectedTab} /> {selectedTab === 'gettingStarted' && <GettingStarted className="mb-4" footer={<GettingStartedFooter />} />} {selectedTab === 'batchChanges' && ( <Container className="mb-4"> <FilteredConnection<ListBatchChange, Omit<BatchChangeNodeProps, 'node'>> {...props} location={location} nodeComponent={BatchChangeNode} nodeComponentProps={{ displayNamespace }} queryConnection={query} hideSearch={true} defaultFirst={15} filters={FILTERS} noun="batch change" pluralNoun="batch changes" listComponent="div" listClassName={styles.batchChangeListPageGrid} withCenteredSummary={true} cursorPaging={true} noSummaryIfAllNodesVisible={true} emptyElement={<BatchChangeListEmptyElement location={location} />} /> </Container> )} </Page> ) } export interface NamespaceBatchChangeListPageProps extends BatchChangeListPageProps { namespaceID: Scalars['ID'] } /** * A list of all batch changes in a namespace. */ export const NamespaceBatchChangeListPage: React.FunctionComponent<NamespaceBatchChangeListPageProps> = ({ namespaceID, ...props }) => { const queryConnection = useCallback( (args: Partial<BatchChangesByNamespaceVariables>) => queryBatchChangesByNamespace({ namespaceID, first: args.first ?? null, after: args.after ?? null, // The types for FilteredConnectionQueryArguments don't allow access to the filter arguments. state: (args as { state: BatchChangeState | undefined }).state ?? null, viewerCanAdminister: null, }), [namespaceID] ) return <BatchChangeListPage {...props} displayNamespace={false} queryBatchChanges={queryConnection} /> } interface BatchChangeListEmptyElementProps extends Pick<RouteComponentProps, 'location'> {} const BatchChangeListEmptyElement: React.FunctionComponent<BatchChangeListEmptyElementProps> = ({ location }) => ( <div className="w-100 py-5 text-center"> <p> <strong>No batch changes have been created</strong> </p> <NewBatchChangeButton to={`${location.pathname}/create`} /> </div> ) const BatchChangeListTabHeader: React.FunctionComponent<{ selectedTab: SelectedTab setSelectedTab: (selectedTab: SelectedTab) => void }> = ({ selectedTab, setSelectedTab }) => { const onSelectBatchChanges = useCallback<React.MouseEventHandler>( event => { event.preventDefault() setSelectedTab('batchChanges') }, [setSelectedTab] ) const onSelectGettingStarted = useCallback<React.MouseEventHandler>( event => { event.preventDefault() setSelectedTab('gettingStarted') }, [setSelectedTab] ) return ( <div className="overflow-auto mb-2"> <ul className="nav nav-tabs d-inline-flex d-sm-flex flex-nowrap text-nowrap"> <li className="nav-item"> {/* eslint-disable-next-line jsx-a11y/anchor-is-valid */} <a href="" onClick={onSelectBatchChanges} className={classNames('nav-link', selectedTab === 'batchChanges' && 'active')} role="button" > <span className="text-content" data-tab-content="All batch changes"> All batch changes </span> </a> </li> <li className="nav-item"> {/* eslint-disable-next-line jsx-a11y/anchor-is-valid */} <a href="" onClick={onSelectGettingStarted} className={classNames('nav-link', selectedTab === 'gettingStarted' && 'active')} role="button" > <span className="text-content" data-tab-content="Getting started"> Getting started </span> </a> </li> </ul> </div> ) } const GettingStartedFooter: React.FunctionComponent<{}> = () => ( <div className="row"> <div className="col-12 col-sm-8 offset-sm-2 col-md-6 offset-md-3"> <div className="card"> <div className="card-body text-center"> <p>Create your first batch change</p> <h2 className="mb-0"> <a href="https://docs.sourcegraph.com/batch_changes/quickstart" target="_blank" rel="noopener"> Batch Changes quickstart </a> </h2> </div> </div> </div> </div> )
* Used to only switch to the "Getting started" tab if the user didn't select the tab manually. */ const isFirstFetch = useMemo(() => {
modelRegistryServer.go
// Copyright 2022 AI Redefined Inc. <[email protected]> // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package grpcservers import ( "context" "io" "strconv" "time" grpcapi "github.com/cogment/cogment/grpcapi/cogment/api" "github.com/cogment/cogment/services/modelRegistry/backend" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" ) func timeFromNsTimestamp(timestamp uint64) time.Time { return time.Unix(0, int64(timestamp)) } func nsTimestampFromTime(timestamp time.Time) uint64 { return uint64(timestamp.UnixNano()) } type ModelRegistryServer struct { grpcapi.UnimplementedModelRegistrySPServer backendPromise BackendPromise sentModelVersionDataChunkSize int } func createPbModelVersionInfo(modelVersionInfo backend.VersionInfo) grpcapi.ModelVersionInfo { return grpcapi.ModelVersionInfo{ ModelId: modelVersionInfo.ModelID, VersionNumber: uint32(modelVersionInfo.VersionNumber), CreationTimestamp: nsTimestampFromTime(modelVersionInfo.CreationTimestamp), Archived: modelVersionInfo.Archived, DataHash: modelVersionInfo.DataHash, DataSize: uint64(modelVersionInfo.DataSize), UserData: modelVersionInfo.UserData, } } func (s *ModelRegistryServer) SetBackend(b backend.Backend) { s.backendPromise.Set(b) } func (s *ModelRegistryServer) CreateOrUpdateModel( ctx context.Context, req *grpcapi.CreateOrUpdateModelRequest, ) (*grpcapi.CreateOrUpdateModelReply, error) { log.Printf("CreateOrUpdateModel(req={ModelId: %q, UserData: %#v})\n", req.ModelInfo.ModelId, req.ModelInfo.UserData) modelInfo := backend.ModelInfo{ ModelID: req.ModelInfo.ModelId, UserData: req.ModelInfo.UserData, } b, err := s.backendPromise.Await(ctx) if err != nil { return nil, err } _, err = b.CreateOrUpdateModel(modelInfo) if err != nil { return nil, status.Errorf(codes.Internal, "unexpected error while creating model %q: %s", modelInfo.ModelID, err) } return &grpcapi.CreateOrUpdateModelReply{}, nil } func (s *ModelRegistryServer) DeleteModel( ctx context.Context, req *grpcapi.DeleteModelRequest, ) (*grpcapi.DeleteModelReply, error) { log.Printf("DeleteModel(req={ModelId: %q})\n", req.ModelId) b, err := s.backendPromise.Await(ctx) if err != nil { return nil, err } err = b.DeleteModel(req.ModelId) if err != nil { if _, ok := err.(*backend.UnknownModelError); ok { return nil, status.Errorf(codes.NotFound, "%s", err) } return nil, status.Errorf(codes.Internal, "unexpected error while deleting model %q: %s", req.ModelId, err) } return &grpcapi.DeleteModelReply{}, nil } func (s *ModelRegistryServer) RetrieveModels( ctx context.Context, req *grpcapi.RetrieveModelsRequest, ) (*grpcapi.RetrieveModelsReply, error) { log.Printf( "RetrieveModels(req={ModelIds: %#v, ModelsCount: %d, ModelHandle: %q})\n", req.ModelIds, req.ModelsCount, req.ModelHandle, ) offset := 0 if req.ModelHandle != "" { var err error offset64, err := strconv.ParseInt(req.ModelHandle, 10, 0) if err != nil { return nil, status.Errorf( codes.InvalidArgument, "Invalid value for `model_handle` (%q) only empty or values provided by a previous call should be used", req.ModelHandle, ) } offset = int(offset64) } b, err := s.backendPromise.Await(ctx) if err != nil { return nil, err } pbModelInfos := []*grpcapi.ModelInfo{} if len(req.ModelIds) == 0 { // Retrieve all models modelInfos, err := b.ListModels(offset, int(req.ModelsCount)) if err != nil { return nil, status.Errorf(codes.Internal, "unexpected error while retrieving models: %s", err) } for _, modelInfo := range modelInfos { pbModelInfo := grpcapi.ModelInfo{ModelId: modelInfo.ModelID, UserData: modelInfo.UserData} pbModelInfos = append(pbModelInfos, &pbModelInfo) } } else { modelIDsSlice := req.ModelIds[offset:] if req.ModelsCount > 0 { modelIDsSlice = modelIDsSlice[:req.ModelsCount] } for _, modelID := range modelIDsSlice { modelInfo, err := b.RetrieveModelInfo(modelID) if err != nil { if _, ok := err.(*backend.UnknownModelError); ok { return nil, status.Errorf(codes.NotFound, "%s", err) } return nil, status.Errorf(codes.Internal, `unexpected error while retrieving models: %s`, err) } pbModelInfo := grpcapi.ModelInfo{ModelId: modelInfo.ModelID, UserData: modelInfo.UserData} pbModelInfos = append(pbModelInfos, &pbModelInfo) } } nextOffset := offset + len(pbModelInfos) return &grpcapi.RetrieveModelsReply{ ModelInfos: pbModelInfos, NextModelHandle: strconv.FormatInt(int64(nextOffset), 10), }, nil } func (s *ModelRegistryServer) CreateVersion(inStream grpcapi.ModelRegistrySP_CreateVersionServer) error { log.Printf("CreateVersion(stream=...)\n") firstChunk, err := inStream.Recv() if err == io.EOF { return status.Errorf(codes.InvalidArgument, "empty request") } if err != nil { return err } if firstChunk.GetHeader() == nil { return status.Errorf(codes.InvalidArgument, "first request chunk do not include a Header") } receivedVersionInfo := firstChunk.GetHeader().GetVersionInfo() modelData := []byte{} for { chunk, err := inStream.Recv() if err == io.EOF { receivedDataSize := uint64(len(modelData)) if receivedDataSize == receivedVersionInfo.DataSize && err == io.EOF { break } if err == io.EOF { return status.Errorf( codes.InvalidArgument, "stream ended while having not received the expected data, expected %d bytes, received %d bytes", receivedVersionInfo.DataSize, receivedDataSize, ) } if receivedDataSize > receivedVersionInfo.DataSize { return status.Errorf( codes.InvalidArgument, "received more data than expected, expected %d bytes, received %d bytes", receivedVersionInfo.DataSize, receivedDataSize, ) } } if err != nil { return err } if chunk.GetBody() == nil { return status.Errorf(codes.InvalidArgument, "subsequent request chunk do not include a Body") } modelData = append(modelData, chunk.GetBody().DataChunk...) } receivedHash := backend.ComputeSHA256Hash(modelData) if receivedVersionInfo.DataHash != "" && receivedVersionInfo.DataHash != receivedHash { return status.Errorf( codes.InvalidArgument, "received data did not match the expected hash, expected %q, received %q", receivedVersionInfo.DataHash, receivedHash, ) } b, err := s.backendPromise.Await(inStream.Context()) if err != nil { return err } creationTimestamp := time.Now() if receivedVersionInfo.CreationTimestamp > 0 { creationTimestamp = timeFromNsTimestamp(receivedVersionInfo.CreationTimestamp) } versionInfo, err := b.CreateOrUpdateModelVersion(receivedVersionInfo.ModelId, backend.VersionArgs{ CreationTimestamp: creationTimestamp, Archived: receivedVersionInfo.Archived, DataHash: receivedHash, Data: modelData, UserData: receivedVersionInfo.UserData, }) if err != nil { return status.Errorf( codes.Internal, "unexpected error while creating a version for model %q: %s", receivedVersionInfo.ModelId, err, ) } pbVersionInfo := createPbModelVersionInfo(versionInfo) return inStream.SendAndClose(&grpcapi.CreateVersionReply{VersionInfo: &pbVersionInfo}) } func (s *ModelRegistryServer) RetrieveVersionInfos( ctx context.Context, req *grpcapi.RetrieveVersionInfosRequest, ) (*grpcapi.RetrieveVersionInfosReply, error) { log.Printf( "RetrieveVersionInfos(req={ModelId: %q, VersionNumbers: %#v, VersionsCount: %d, VersionHandle: %q})\n", req.ModelId, req.VersionNumbers, req.VersionsCount, req.VersionHandle, ) initialVersionNumber := uint(0) if req.VersionHandle != "" { var err error initialVersionNumber64, err := strconv.ParseUint(req.VersionHandle, 10, 0) if err != nil { return nil, status.Errorf( codes.InvalidArgument, "Invalid value for `version_handle` (%q) only empty or values provided by a previous call should be used", req.VersionHandle, ) } initialVersionNumber = uint(initialVersionNumber64) } b, err := s.backendPromise.Await(ctx) if err != nil { return nil, err } if len(req.VersionNumbers) == 0 { // Retrieve all version infos versionInfos, err := b.ListModelVersionInfos(req.ModelId, initialVersionNumber, int(req.VersionsCount)) if err != nil { if _, ok := err.(*backend.UnknownModelError); ok { return nil, status.Errorf(codes.NotFound, "%s", err) } return nil, status.Errorf(codes.Internal, "unexpected error while deleting model %q: %s", req.ModelId, err) } pbVersionInfos := []*grpcapi.ModelVersionInfo{} nextVersionNumber := initialVersionNumber for _, versionInfo := range versionInfos { pbVersionInfo := createPbModelVersionInfo(versionInfo) pbVersionInfos = append(pbVersionInfos, &pbVersionInfo) nextVersionNumber = versionInfo.VersionNumber + 1 } return &grpcapi.RetrieveVersionInfosReply{ VersionInfos: pbVersionInfos, NextVersionHandle: strconv.FormatUint(uint64(nextVersionNumber), 10), }, nil } pbVersionInfos := []*grpcapi.ModelVersionInfo{} versionNumberSlice := req.VersionNumbers[initialVersionNumber:] if req.VersionsCount > 0 { versionNumberSlice = versionNumberSlice[:req.VersionsCount] } nextVersionNumber := initialVersionNumber for _, versionNumber := range versionNumberSlice { versionInfo, err := b.RetrieveModelVersionInfo(req.ModelId, int(versionNumber)) if err != nil { if _, ok := err.(*backend.UnknownModelError); ok { return nil, status.Errorf(codes.NotFound, "%s", err) } if _, ok := err.(*backend.UnknownModelVersionError); ok { return nil, status.Errorf(codes.NotFound, "%s", err) } return nil, status.Errorf( codes.Internal, `unexpected error while retrieving version "%d" for model %q: %s`, versionNumber, req.ModelId, err, ) } pbVersionInfo := createPbModelVersionInfo(versionInfo) pbVersionInfos = append(pbVersionInfos, &pbVersionInfo) nextVersionNumber = versionInfo.VersionNumber + 1 } return &grpcapi.RetrieveVersionInfosReply{ VersionInfos: pbVersionInfos, NextVersionHandle: strconv.FormatUint(uint64(nextVersionNumber), 10), }, nil } func (s *ModelRegistryServer) RetrieveVersionData( req *grpcapi.RetrieveVersionDataRequest, outStream grpcapi.ModelRegistrySP_RetrieveVersionDataServer, ) error { log.Printf("RetrieveVersionData(req={ModelId: %q, VersionNumber: %d})\n", req.ModelId, req.VersionNumber) b, err := s.backendPromise.Await(outStream.Context()) if err != nil { return err } modelData, err := b.RetrieveModelVersionData(req.ModelId, int(req.VersionNumber)) if err != nil { if _, ok := err.(*backend.UnknownModelError); ok { return status.Errorf(codes.NotFound, "%s", err) } if _, ok := err.(*backend.UnknownModelVersionError); ok { return status.Errorf(codes.NotFound, "%s", err) } return status.Errorf( codes.Internal, `unexpected error while retrieving version "%d" for model %q: %s`, req.VersionNumber, req.ModelId, err, ) } dataLen := len(modelData) if dataLen == 0 { return outStream.Send(&grpcapi.RetrieveVersionDataReplyChunk{}) } for i := 0; i < dataLen; i += s.sentModelVersionDataChunkSize { var replyChunk grpcapi.RetrieveVersionDataReplyChunk if i+s.sentModelVersionDataChunkSize >= dataLen { replyChunk = grpcapi.RetrieveVersionDataReplyChunk{DataChunk: modelData[i:dataLen]} } else { replyChunk = grpcapi.RetrieveVersionDataReplyChunk{DataChunk: modelData[i : i+s.sentModelVersionDataChunkSize]} } err := outStream.Send(&replyChunk) if err != nil { return err } } return nil } func
( grpcServer grpc.ServiceRegistrar, sentModelVersionDataChunkSize int, ) (*ModelRegistryServer, error) { server := &ModelRegistryServer{ sentModelVersionDataChunkSize: sentModelVersionDataChunkSize, } grpcapi.RegisterModelRegistrySPServer(grpcServer, server) return server, nil }
RegisterModelRegistryServer
manager.go
// Copyright (c) 2016 The btcsuite developers // Use of this source code is governed by an ISC // license that can be found in the LICENSE file. package indexers import ( "bytes" "fmt" "github.com/gcash/bchd/blockchain" "github.com/gcash/bchd/chaincfg/chainhash" "github.com/gcash/bchd/database" "github.com/gcash/bchd/wire" "github.com/gcash/bchutil" ) var ( // indexTipsBucketName is the name of the db bucket used to house the // current tip of each index. indexTipsBucketName = []byte("idxtips") ) // ----------------------------------------------------------------------------- // The index manager tracks the current tip of each index by using a parent // bucket that contains an entry for index. // // The serialized format for an index tip is: // // [<block hash><block height>],... // // Field Type Size // block hash chainhash.Hash chainhash.HashSize // block height uint32 4 bytes // ----------------------------------------------------------------------------- // dbPutIndexerTip uses an existing database transaction to update or add the // current tip for the given index to the provided values. func dbPutIndexerTip(dbTx database.Tx, idxKey []byte, hash *chainhash.Hash, height int32) error
// dbFetchIndexerTip uses an existing database transaction to retrieve the // hash and height of the current tip for the provided index. func dbFetchIndexerTip(dbTx database.Tx, idxKey []byte) (*chainhash.Hash, int32, error) { indexesBucket := dbTx.Metadata().Bucket(indexTipsBucketName) serialized := indexesBucket.Get(idxKey) if len(serialized) < chainhash.HashSize+4 { return nil, 0, database.Error{ ErrorCode: database.ErrCorruption, Description: fmt.Sprintf("unexpected end of data for "+ "index %q tip", string(idxKey)), } } var hash chainhash.Hash copy(hash[:], serialized[:chainhash.HashSize]) height := int32(byteOrder.Uint32(serialized[chainhash.HashSize:])) return &hash, height, nil } // dbIndexConnectBlock adds all of the index entries associated with the // given block using the provided indexer and updates the tip of the indexer // accordingly. An error will be returned if the current tip for the indexer is // not the previous block for the passed block. func dbIndexConnectBlock(dbTx database.Tx, indexer Indexer, block *bchutil.Block, stxo []blockchain.SpentTxOut) error { // Assert that the block being connected properly connects to the // current tip of the index. idxKey := indexer.Key() curTipHash, _, err := dbFetchIndexerTip(dbTx, idxKey) if err != nil { return err } if !curTipHash.IsEqual(&block.MsgBlock().Header.PrevBlock) { return AssertError(fmt.Sprintf("dbIndexConnectBlock must be "+ "called with a block that extends the current index "+ "tip (%s, tip %s, block %s)", indexer.Name(), curTipHash, block.Hash())) } // Notify the indexer with the connected block so it can index it. if err := indexer.ConnectBlock(dbTx, block, stxo); err != nil { return err } // Update the current index tip. return dbPutIndexerTip(dbTx, idxKey, block.Hash(), block.Height()) } // dbIndexDisconnectBlock removes all of the index entries associated with the // given block using the provided indexer and updates the tip of the indexer // accordingly. An error will be returned if the current tip for the indexer is // not the passed block. func dbIndexDisconnectBlock(dbTx database.Tx, indexer Indexer, block *bchutil.Block, stxo []blockchain.SpentTxOut) error { // Assert that the block being disconnected is the current tip of the // index. idxKey := indexer.Key() curTipHash, _, err := dbFetchIndexerTip(dbTx, idxKey) if err != nil { return err } if !curTipHash.IsEqual(block.Hash()) { return AssertError(fmt.Sprintf("dbIndexDisconnectBlock must "+ "be called with the block at the current index tip "+ "(%s, tip %s, block %s)", indexer.Name(), curTipHash, block.Hash())) } // Notify the indexer with the disconnected block so it can remove all // of the appropriate entries. if err := indexer.DisconnectBlock(dbTx, block, stxo); err != nil { return err } // Update the current index tip. prevHash := &block.MsgBlock().Header.PrevBlock return dbPutIndexerTip(dbTx, idxKey, prevHash, block.Height()-1) } // Manager defines an index manager that manages multiple optional indexes and // implements the blockchain.IndexManager interface so it can be seamlessly // plugged into normal chain processing. type Manager struct { db database.DB enabledIndexes []Indexer } // Ensure the Manager type implements the blockchain.IndexManager interface. var _ blockchain.IndexManager = (*Manager)(nil) // indexDropKey returns the key for an index which indicates it is in the // process of being dropped. func indexDropKey(idxKey []byte) []byte { dropKey := make([]byte, len(idxKey)+1) dropKey[0] = 'd' copy(dropKey[1:], idxKey) return dropKey } // maybeFinishDrops determines if each of the enabled indexes are in the middle // of being dropped and finishes dropping them when the are. This is necessary // because dropping and index has to be done in several atomic steps rather than // one big atomic step due to the massive number of entries. func (m *Manager) maybeFinishDrops(interrupt <-chan struct{}) error { indexNeedsDrop := make([]bool, len(m.enabledIndexes)) err := m.db.View(func(dbTx database.Tx) error { // None of the indexes needs to be dropped if the index tips // bucket hasn't been created yet. indexesBucket := dbTx.Metadata().Bucket(indexTipsBucketName) if indexesBucket == nil { return nil } // Mark the indexer as requiring a drop if one is already in // progress. for i, indexer := range m.enabledIndexes { dropKey := indexDropKey(indexer.Key()) if indexesBucket.Get(dropKey) != nil { indexNeedsDrop[i] = true } } return nil }) if err != nil { return err } if interruptRequested(interrupt) { return errInterruptRequested } // Finish dropping any of the enabled indexes that are already in the // middle of being dropped. for i, indexer := range m.enabledIndexes { if !indexNeedsDrop[i] { continue } log.Infof("Resuming %s drop", indexer.Name()) err := dropIndex(m.db, indexer.Key(), indexer.Name(), interrupt) if err != nil { return err } } return nil } // maybeCreateIndexes determines if each of the enabled indexes have already // been created and creates them if not. func (m *Manager) maybeCreateIndexes(dbTx database.Tx) error { indexesBucket := dbTx.Metadata().Bucket(indexTipsBucketName) for _, indexer := range m.enabledIndexes { // Nothing to do if the index tip already exists. idxKey := indexer.Key() if indexesBucket.Get(idxKey) != nil { continue } // The tip for the index does not exist, so create it and // invoke the create callback for the index so it can perform // any one-time initialization it requires. if err := indexer.Create(dbTx); err != nil { return err } // Set the tip for the index to values which represent an // uninitialized index. err := dbPutIndexerTip(dbTx, idxKey, &chainhash.Hash{}, -1) if err != nil { return err } } return nil } // Init initializes the enabled indexes. This is called during chain // initialization and primarily consists of catching up all indexes to the // current best chain tip. This is necessary since each index can be disabled // and re-enabled at any time and attempting to catch-up indexes at the same // time new blocks are being downloaded would lead to an overall longer time to // catch up due to the I/O contention. // // This is part of the blockchain.IndexManager interface. func (m *Manager) Init(chain *blockchain.BlockChain, interrupt <-chan struct{}) error { // Nothing to do when no indexes are enabled. if len(m.enabledIndexes) == 0 { return nil } if interruptRequested(interrupt) { return errInterruptRequested } // Finish and drops that were previously interrupted. if err := m.maybeFinishDrops(interrupt); err != nil { return err } // Create the initial state for the indexes as needed. err := m.db.Update(func(dbTx database.Tx) error { // Create the bucket for the current tips as needed. meta := dbTx.Metadata() _, err := meta.CreateBucketIfNotExists(indexTipsBucketName) if err != nil { return err } return m.maybeCreateIndexes(dbTx) }) if err != nil { return err } // Initialize each of the enabled indexes. for _, indexer := range m.enabledIndexes { if err := indexer.Init(); err != nil { return err } } // Rollback indexes to the main chain if their tip is an orphaned fork. // This is fairly unlikely, but it can happen if the chain is // reorganized while the index is disabled. This has to be done in // reverse order because later indexes can depend on earlier ones. for i := len(m.enabledIndexes); i > 0; i-- { indexer := m.enabledIndexes[i-1] // Fetch the current tip for the index. var height int32 var hash *chainhash.Hash err := m.db.View(func(dbTx database.Tx) error { idxKey := indexer.Key() hash, height, err = dbFetchIndexerTip(dbTx, idxKey) return err }) if err != nil { return err } // Nothing to do if the index does not have any entries yet. if height == -1 { continue } // Loop until the tip is a block that exists in the main chain. initialHeight := height for !chain.MainChainHasBlock(hash) { // At this point the index tip is orphaned, so load the // orphaned block from the database directly and // disconnect it from the index. The block has to be // loaded directly since it is no longer in the main // chain and thus the chain.BlockByHash function would // error. var block *bchutil.Block err := m.db.View(func(dbTx database.Tx) error { blockBytes, err := dbTx.FetchBlock(hash) if err != nil { return err } block, err = bchutil.NewBlockFromBytes(blockBytes) if err != nil { return err } block.SetHeight(height) return err }) if err != nil { return err } // We'll also grab the set of outputs spent by this // block so we can remove them from the index. spentTxos, err := chain.FetchSpendJournal(block) if err != nil { return err } // With the block and stxo set for that block retrieved, // we can now update the index itself. err = m.db.Update(func(dbTx database.Tx) error { // Remove all of the index entries associated // with the block and update the indexer tip. err = dbIndexDisconnectBlock( dbTx, indexer, block, spentTxos, ) if err != nil { return err } // Update the tip to the previous block. hash = &block.MsgBlock().Header.PrevBlock height-- return nil }) if err != nil { return err } if interruptRequested(interrupt) { return errInterruptRequested } } if initialHeight != height { log.Infof("Removed %d orphaned blocks from %s "+ "(heights %d to %d)", initialHeight-height, indexer.Name(), height+1, initialHeight) } } // Fetch the current tip heights for each index along with tracking the // lowest one so the catchup code only needs to start at the earliest // block and is able to skip connecting the block for the indexes that // don't need it. bestHeight := chain.BestSnapshot().Height lowestHeight := bestHeight indexerHeights := make([]int32, len(m.enabledIndexes)) err = m.db.View(func(dbTx database.Tx) error { for i, indexer := range m.enabledIndexes { idxKey := indexer.Key() hash, height, err := dbFetchIndexerTip(dbTx, idxKey) if err != nil { return err } log.Debugf("Current %s tip (height %d, hash %v)", indexer.Name(), height, hash) indexerHeights[i] = height if height < lowestHeight { lowestHeight = height } } return nil }) if err != nil { return err } // Nothing to index if all of the indexes are caught up. if lowestHeight == bestHeight { return nil } // Create a progress logger for the indexing process below. progressLogger := newBlockProgressLogger("Indexed", log) // At this point, one or more indexes are behind the current best chain // tip and need to be caught up, so log the details and loop through // each block that needs to be indexed. log.Infof("Catching up indexes from height %d to %d", lowestHeight, bestHeight) for height := lowestHeight + 1; height <= bestHeight; height++ { // Load the block for the height since it is required to index // it. block, err := chain.BlockByHeight(height) if err != nil { return err } if interruptRequested(interrupt) { return errInterruptRequested } // Connect the block for all indexes that need it. var spentTxos []blockchain.SpentTxOut for i, indexer := range m.enabledIndexes { // Skip indexes that don't need to be updated with this // block. if indexerHeights[i] >= height { continue } // When the index requires all of the referenced txouts // and they haven't been loaded yet, they need to be // retrieved from the spend journal. if spentTxos == nil && indexNeedsInputs(indexer) { spentTxos, err = chain.FetchSpendJournal(block) if err != nil { return err } } err := m.db.Update(func(dbTx database.Tx) error { return dbIndexConnectBlock( dbTx, indexer, block, spentTxos, ) }) if err != nil { return err } indexerHeights[i] = height } // Log indexing progress. progressLogger.LogBlockHeight(block) if interruptRequested(interrupt) { return errInterruptRequested } } log.Infof("Indexes caught up to height %d", bestHeight) return nil } // indexNeedsInputs returns whether or not the index needs access to the txouts // referenced by the transaction inputs being indexed. func indexNeedsInputs(index Indexer) bool { if idx, ok := index.(NeedsInputser); ok { return idx.NeedsInputs() } return false } // dbFetchTx looks up the passed transaction hash in the transaction index and // loads it from the database. func dbFetchTx(dbTx database.Tx, hash *chainhash.Hash) (*wire.MsgTx, error) { // Look up the location of the transaction. blockRegion, err := dbFetchTxIndexEntry(dbTx, hash) if err != nil { return nil, err } if blockRegion == nil { return nil, fmt.Errorf("transaction %v not found", hash) } // Load the raw transaction bytes from the database. txBytes, err := dbTx.FetchBlockRegion(blockRegion) if err != nil { return nil, err } // Deserialize the transaction. var msgTx wire.MsgTx err = msgTx.Deserialize(bytes.NewReader(txBytes)) if err != nil { return nil, err } return &msgTx, nil } // ConnectBlock must be invoked when a block is extending the main chain. It // keeps track of the state of each index it is managing, performs some sanity // checks, and invokes each indexer. // // This is part of the blockchain.IndexManager interface. func (m *Manager) ConnectBlock(dbTx database.Tx, block *bchutil.Block, stxos []blockchain.SpentTxOut) error { // Call each of the currently active optional indexes with the block // being connected so they can update accordingly. for _, index := range m.enabledIndexes { err := dbIndexConnectBlock(dbTx, index, block, stxos) if err != nil { return err } } return nil } // DisconnectBlock must be invoked when a block is being disconnected from the // end of the main chain. It keeps track of the state of each index it is // managing, performs some sanity checks, and invokes each indexer to remove // the index entries associated with the block. // // This is part of the blockchain.IndexManager interface. func (m *Manager) DisconnectBlock(dbTx database.Tx, block *bchutil.Block, stxo []blockchain.SpentTxOut) error { // Call each of the currently active optional indexes with the block // being disconnected so they can update accordingly. for _, index := range m.enabledIndexes { err := dbIndexDisconnectBlock(dbTx, index, block, stxo) if err != nil { return err } } return nil } // NewManager returns a new index manager with the provided indexes enabled. // // The manager returned satisfies the blockchain.IndexManager interface and thus // cleanly plugs into the normal blockchain processing path. func NewManager(db database.DB, enabledIndexes []Indexer) *Manager { return &Manager{ db: db, enabledIndexes: enabledIndexes, } } // dropIndex drops the passed index from the database. Since indexes can be // massive, it deletes the index in multiple database transactions in order to // keep memory usage to reasonable levels. It also marks the drop in progress // so the drop can be resumed if it is stopped before it is done before the // index can be used again. func dropIndex(db database.DB, idxKey []byte, idxName string, interrupt <-chan struct{}) error { // Nothing to do if the index doesn't already exist. var needsDelete bool err := db.View(func(dbTx database.Tx) error { indexesBucket := dbTx.Metadata().Bucket(indexTipsBucketName) if indexesBucket != nil && indexesBucket.Get(idxKey) != nil { needsDelete = true } return nil }) if err != nil { return err } if !needsDelete { log.Infof("Not dropping %s because it does not exist", idxName) return nil } // Mark that the index is in the process of being dropped so that it // can be resumed on the next start if interrupted before the process is // complete. log.Infof("Dropping all %s entries. This might take a while...", idxName) err = db.Update(func(dbTx database.Tx) error { indexesBucket := dbTx.Metadata().Bucket(indexTipsBucketName) return indexesBucket.Put(indexDropKey(idxKey), idxKey) }) if err != nil { return err } // Since the indexes can be so large, attempting to simply delete // the bucket in a single database transaction would result in massive // memory usage and likely crash many systems due to ulimits. In order // to avoid this, use a cursor to delete a maximum number of entries out // of the bucket at a time. Recurse buckets depth-first to delete any // sub-buckets. const maxDeletions = 2000000 var totalDeleted uint64 // Recurse through all buckets in the index, cataloging each for // later deletion. var subBuckets [][][]byte var subBucketClosure func(database.Tx, []byte, [][]byte) error subBucketClosure = func(dbTx database.Tx, subBucket []byte, tlBucket [][]byte) error { // Get full bucket name and append to subBuckets for later // deletion. var bucketName [][]byte if (tlBucket == nil) || (len(tlBucket) == 0) { bucketName = append(bucketName, subBucket) } else { bucketName = append(tlBucket, subBucket) } subBuckets = append(subBuckets, bucketName) // Recurse sub-buckets to append to subBuckets slice. bucket := dbTx.Metadata() for _, subBucketName := range bucketName { bucket = bucket.Bucket(subBucketName) } return bucket.ForEachBucket(func(k []byte) error { return subBucketClosure(dbTx, k, bucketName) }) } // Call subBucketClosure with top-level bucket. err = db.View(func(dbTx database.Tx) error { return subBucketClosure(dbTx, idxKey, nil) }) if err != nil { return nil } // Iterate through each sub-bucket in reverse, deepest-first, deleting // all keys inside them and then dropping the buckets themselves. for i := range subBuckets { bucketName := subBuckets[len(subBuckets)-1-i] // Delete maxDeletions key/value pairs at a time. for numDeleted := maxDeletions; numDeleted == maxDeletions; { numDeleted = 0 err := db.Update(func(dbTx database.Tx) error { subBucket := dbTx.Metadata() for _, subBucketName := range bucketName { subBucket = subBucket.Bucket(subBucketName) } cursor := subBucket.Cursor() for ok := cursor.First(); ok; ok = cursor.Next() && numDeleted < maxDeletions { if err := cursor.Delete(); err != nil { return err } numDeleted++ } return nil }) if err != nil { return err } if numDeleted > 0 { totalDeleted += uint64(numDeleted) log.Infof("Deleted %d keys (%d total) from %s", numDeleted, totalDeleted, idxName) } } if interruptRequested(interrupt) { return errInterruptRequested } // Drop the bucket itself. err = db.Update(func(dbTx database.Tx) error { bucket := dbTx.Metadata() for j := 0; j < len(bucketName)-1; j++ { bucket = bucket.Bucket(bucketName[j]) } return bucket.DeleteBucket(bucketName[len(bucketName)-1]) }) if err != nil { return err } } // Call extra index specific deinitialization for the transaction index. if idxName == txIndexName { if err := dropBlockIDIndex(db); err != nil { return err } } // Remove the index tip, index bucket, and in-progress drop flag now // that all index entries have been removed. err = db.Update(func(dbTx database.Tx) error { meta := dbTx.Metadata() indexesBucket := meta.Bucket(indexTipsBucketName) if err := indexesBucket.Delete(idxKey); err != nil { return err } return indexesBucket.Delete(indexDropKey(idxKey)) }) if err != nil { return err } log.Infof("Dropped %s", idxName) return nil }
{ serialized := make([]byte, chainhash.HashSize+4) copy(serialized, hash[:]) byteOrder.PutUint32(serialized[chainhash.HashSize:], uint32(height)) indexesBucket := dbTx.Metadata().Bucket(indexTipsBucketName) return indexesBucket.Put(idxKey, serialized) }
star_score_controller.py
from pygame.math import Vector2 from Balance.game_objects.mesh_objects.star import Star from pygame import mixer from pygin.time import Time from pygin.game_object import GameObject from random import uniform as randfloat from pygin.basic_objects.text import Text from pygin.material import Material from pygin.color import Color from Balance.scripts.constants import Constants from Balance.animations.text_up_fade_out_animation import TextUpFadeOutAnimation from pygin.components.animator import Animator class StarScoreController(GameObject): def start(self): self.fall_velocity = 150 self.angular_speed = 0 self.game_object_list = [] self.size = Constants.screen_width * 0.025 self.points_per_star = 50 self.sound_collect = mixer.Sound('Balance/assets/soundtrack/star_collect_01.ogg') self.should_delete_plus_score_text = False self.plus_score_text_gen_time = 0.0 def awake(self):
def update(self): for obstacle in self.game_object_list: if obstacle.transform.position.y > Constants.screen_height: self.game_object_list.remove(obstacle) obstacle.destroy(obstacle) GameObject.destroy(obstacle) else: self.fall(obstacle) self.delete_plus_score_text() def fall(self, obstacle): obstacle.fall(self.fall_velocity * Time.delta_time(), self.angular_speed * Time.delta_time()) def get_star(self): self.sound_collect.play() obstacle = self.game_object_list[0] #plus score effect font_path = "Balance/assets/fonts/neuropolxrg.ttf" plus_score = Text(obstacle.transform.position, "+50", Material(Color.white, alpha=255), 15, font_path) plus_score.transform.position.x -= plus_score.text_mesh.size plus_score.animation = TextUpFadeOutAnimation(plus_score) plus_score.animator = Animator(plus_score, [plus_score.animation]) plus_score.animator.play() self.time_of_last_plus_score = Time.now() self.plus_score = plus_score self.should_delete_plus_score_text = True self.score_controller.score += self.points_per_star def delete_plus_score_text(self): if self.should_delete_plus_score_text: if Time.now() - self.time_of_last_plus_score > 1.0: self.should_delete_plus_score_text = False self.plus_score.destroy_me() def generate_obstacle(self): random_pos = int(randfloat(self.size / 2 + Constants.circCenter_x - Constants.circRadius, Constants.screen_width - (self.size / 2 + Constants.circCenter_x - Constants.circRadius))) star = Star(Vector2(random_pos, -self.size), self.size, Material(Color.yellow)) self.game_object_list.append(star)
self.score_controller = GameObject.find_by_type("ScoreController")[0]
radiobutton.stories.js
import React, { Component } from 'react'; import { storiesOf } from '@storybook/react'; import { Box, Button, Grommet, RadioButton } from 'grommet'; import { grommet } from 'grommet/themes'; import { deepMerge } from 'grommet/utils'; class SimpleRadioButton extends Component { constructor(props) { super(props); this.state = { selected: props.selected }; } onChange = event => this.setState({ selected: event.target.value }); render() { const { selected } = this.state; return ( <Grommet theme={grommet}> <Box gap="small"> <RadioButton label="Choice 1" name="radio" value="c1" checked={selected === 'c1'} onChange={this.onChange} {...this.props} /> <RadioButton label="Choice 2" name="radio" value="c2" checked={selected === 'c2'} onChange={this.onChange} {...this.props} /> </Box> </Grommet> ); } } const customTheme = deepMerge(grommet, { radioButton: { gap: 'xsmall', size: '18px', hover: { border: { color: 'dark-4', },
light: 'neutral-1', }, }, icon: { size: '10px', }, }, }); class CustomRadioButton extends Component { state = { selected: undefined }; onChange = event => this.setState({ selected: event.target.value }); render() { const { selected } = this.state; return ( <Grommet theme={customTheme}> <Box gap="xsmall"> <RadioButton label="Choice 1" name="radio" value="c1" checked={selected === 'c1'} onChange={this.onChange} /> <RadioButton label="Choice 2" name="radio" value="c2" checked={selected === 'c2'} onChange={this.onChange} /> </Box> </Grommet> ); } } class CheckBoxInsideButton extends Component { state = { selected: undefined }; render() { const { selected } = this.state; return ( <Grommet theme={grommet}> <Button hoverIndicator="background" onClick={() => { if (selected) { this.setState({ selected: undefined }); } else { this.setState({ selected: 'c1' }); } }} > <RadioButton label="Choice 1" name="radio" value="c1" checked={selected === 'c1'} {...this.props} /> </Button> </Grommet> ); } } storiesOf('RadioButton', module) .add('Simple RadioButton', () => <SimpleRadioButton />) .add('Disabled RadioButton', () => ( <SimpleRadioButton disabled selected="c2" /> )) .add('Custom Theme', () => <CustomRadioButton />) .add('Inside a Button Theme', () => <CheckBoxInsideButton />);
}, check: { color: {
welcome_test.go
/* Copyright 2018 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package welcome import ( "fmt" "io/ioutil" "regexp" "strings" "testing" "github.com/sirupsen/logrus" "sigs.k8s.io/yaml" "k8s.io/apimachinery/pkg/util/sets" "k8s.io/test-infra/prow/config" "k8s.io/test-infra/prow/github" "k8s.io/test-infra/prow/plugins" ) const ( testWelcomeTemplate = "Welcome human! 🤖 {{.AuthorName}} {{.AuthorLogin}} {{.Repo}} {{.Org}}}" ) type fakeClient struct { commentsAdded map[int][]string prs map[string]sets.Int // orgMembers maps org name to a list of member names. orgMembers map[string][]string // collaborators is a list of collaborators names. collaborators []string } func newFakeClient() *fakeClient { return &fakeClient{ commentsAdded: make(map[int][]string), prs: make(map[string]sets.Int), orgMembers: make(map[string][]string), } } func (fc *fakeClient) BotUserChecker() (func(candidate string) bool, error) { return func(_ string) bool { return false }, nil } // CreateComment adds and tracks a comment in the client func (fc *fakeClient) CreateComment(owner, repo string, number int, comment string) error { fc.commentsAdded[number] = append(fc.commentsAdded[number], comment) return nil } // ClearComments removes all comments in the client func (fc *fakeClient) ClearComments() { fc.commentsAdded = map[int][]string{} } // NumComments counts the number of tracked comments func (fc *fakeClient) NumComments() int { n := 0 for _, comments := range fc.commentsAdded { n += len(comments) } return n } // IsMember returns true if user is in org. func (fc *fakeClient) IsMember(org, user string) (bool, error) { for _, m := range fc.orgMembers[org] { if m == user { return true, nil } } return false, nil } // IsCollaborator returns true if the user is a collaborator of the repo. func (fc *fakeClient) IsCollaborator(org, repo, login string) (bool, error) { for _, collab := range fc.collaborators { if collab == login { return true, nil } } return false, nil } func (fc *fakeClient) addOrgMember(org, user string) { fc.orgMembers[org] = append(fc.orgMembers[org], user) } func (fc *fakeClient) addCollaborator(user string) { fc.collaborators = append(fc.collaborators, user) } var ( expectedQueryRegex = regexp.MustCompile(`is:pr repo:(.+)/(.+) author:(.+)`) ) // AddPR records an PR in the client func (fc *fakeClient) AddPR(owner, repo string, author github.User, number int) { key := fmt.Sprintf("%s,%s,%s", github.NormLogin(owner), github.NormLogin(repo), github.NormLogin(author.Login)) if _, ok := fc.prs[key]; !ok { fc.prs[key] = sets.Int{} } fc.prs[key].Insert(number) } // ClearPRs removes all PRs from the client func (fc *fakeClient) ClearPRs() { fc.prs = make(map[string]sets.Int) } // FindIssues fails if the query does not match the expected query regex and // looks up issues based on parsing the expected query format func (fc *fakeClient) FindIssues(query, sort string, asc bool) ([]github.Issue, error) { fields := expectedQueryRegex.FindStringSubmatch(query) if fields == nil || len(fields) != 4 { return nil, fmt.Errorf("invalid query: `%s` does not match expected regex `%s`", query, expectedQueryRegex.String()) } // "find" results owner, repo, author := fields[1], fields[2], fields[3] key := fmt.Sprintf("%s,%s,%s", github.NormLogin(owner), github.NormLogin(repo), github.NormLogin(author)) issues := []github.Issue{} for _, number := range fc.prs[key].List() { issues = append(issues, github.Issue{ Number: number, }) } return issues, nil } func makeFakePullRequestEvent(owner, repo string, user github.User, number int, action github.PullRequestEventAction) github.PullRequestEvent { return github.PullRequestEvent{ Action: action, Number: number, PullRequest: github.PullRequest{ Base: github.PullRequestBranch{ Repo: github.Repo{ Owner: github.User{ Login: owner, }, Name: repo, }, }, User: user, }, } } func TestHandlePR(t *testing.T) { fc := newFakeClient() newContributor := github.User{ Login: "newContributor", Name: "newContributor fullname", Type: github.UserTypeUser, } contributorA := github.User{ Login: "contributorA", Name: "contributorA fullname", Type: github.UserTypeUser, } contributorB := github.User{ Login: "contributorB", Name: "contributorB fullname", Type: github.UserTypeUser, } member := github.User{ Login: "member", Name: "Member Member", Type: github.UserTypeUser, } collaborator := github.User{ Login: "collab", Name: "Collab Collab", Type: github.UserTypeUser, } robot := github.User{ Login: "robot", Name: "robot fullname", Type: github.UserTypeBot, } // old PRs fc.AddPR("kubernetes", "test-infra", contributorA, 1) fc.AddPR("kubernetes", "test-infra", contributorB, 2) fc.AddPR("kubernetes", "test-infra", contributorB, 3) // members & collaborators fc.addOrgMember("kubernetes", member.Login) fc.addCollaborator(collaborator.Login) testCases := []struct { name string repoOwner string repoName string author github.User prNumber int prAction github.PullRequestEventAction addPR bool alwaysPost bool expectComment bool }{ { name: "existing contributorA", repoOwner: "kubernetes", repoName: "test-infra", author: contributorA, prNumber: 20, prAction: github.PullRequestActionOpened, alwaysPost: false, expectComment: false, }, { name: "existing contributorB", repoOwner: "kubernetes", repoName: "test-infra", author: contributorB, prNumber: 40, prAction: github.PullRequestActionOpened, alwaysPost: false, expectComment: false, }, { name: "existing contributor when it should greet everyone", repoOwner: "kubernetes", repoName: "test-infra", author: contributorB, prNumber: 40, prAction: github.PullRequestActionOpened, alwaysPost: true, expectComment: true, }, { name: "new contributor", repoOwner: "kubernetes", repoName: "test-infra", author: newContributor, prAction: github.PullRequestActionOpened, prNumber: 50, alwaysPost: false, expectComment: true, }, { name: "new contributor when it should greet everyone", repoOwner: "kubernetes", repoName: "test-infra", author: newContributor, prAction: github.PullRequestActionOpened, prNumber: 50, alwaysPost: true, expectComment: true, }, { name: "new contributor and API recorded PR already", repoOwner: "kubernetes", repoName: "test-infra", author: newContributor, prAction: github.PullRequestActionOpened, prNumber: 50, expectComment: true, alwaysPost: false, addPR: true, }, { name: "new contributor, not PR open event", repoOwner: "kubernetes", repoName: "test-infra", author: newContributor, prAction: github.PullRequestActionEdited, prNumber: 50, alwaysPost: false, expectComment: false, }, { name: "new contributor, but is a bot", repoOwner: "kubernetes", repoName: "test-infra", author: robot, prAction: github.PullRequestActionOpened, prNumber: 500, alwaysPost: false, expectComment: false, }, { name: "new contribution from the org member", repoOwner: "kubernetes", repoName: "test-infra", author: member, prNumber: 101, prAction: github.PullRequestActionOpened, alwaysPost: false, expectComment: false, }, { name: "new contribution from collaborator", repoOwner: "kubernetes", repoName: "test-infra", author: collaborator, prNumber: 102, prAction: github.PullRequestActionOpened, alwaysPost: false, expectComment: false, }, } c := client{ GitHubClient: fc, Logger: &logrus.Entry{}, } for _, tc := range testCases { // clear out comments from the last test case fc.ClearComments() event := makeFakePullRequestEvent(tc.repoOwner, tc.repoName, tc.author, tc.prNumber, tc.prAction) if tc.addPR { // make sure the PR in the event is recorded fc.AddPR(tc.repoOwner, tc.repoName, tc.author, tc.prNumber) } tr := plugins.Trigger{ TrustedOrg: "kubernetes", OnlyOrgMembers: false, } // try handling it if err := handlePR(c, tr, event, testWelcomeTemplate, tc.alwaysPost); err != nil {
// verify that comments were made numComments := fc.NumComments() if numComments > 1 { t.Fatalf("did not expect multiple comments for any test case and got %d comments", numComments) } if numComments == 0 && tc.expectComment { t.Fatalf("expected a comment for case '%s' and got none", tc.name) } else if numComments > 0 && !tc.expectComment { t.Fatalf("did not expect comments for case '%s' and got %d comments", tc.name, numComments) } } } func TestWelcomeConfig(t *testing.T) { var ( orgMessage = "defined message for an org" repoMessage = "defined message for a repo" ) config := &plugins.Configuration{ Welcome: []plugins.Welcome{ { Repos: []string{"kubernetes/test-infra"}, MessageTemplate: repoMessage, }, { Repos: []string{"kubernetes"}, MessageTemplate: orgMessage, }, { Repos: []string{"kubernetes/repo-infra"}, MessageTemplate: repoMessage, }, }, } testCases := []struct { name string repo string org string expectedMessage string }{ { name: "default message", org: "kubernetes-sigs", repo: "kind", expectedMessage: defaultWelcomeMessage, }, { name: "org defined message", org: "kubernetes", repo: "community", expectedMessage: orgMessage, }, { name: "repo defined message, before an org", org: "kubernetes", repo: "test-infra", expectedMessage: repoMessage, }, { name: "repo defined message, after an org", org: "kubernetes", repo: "repo-infra", expectedMessage: repoMessage, }, } for _, tc := range testCases { receivedMessage := welcomeMessageForRepo(optionsForRepo(config, tc.org, tc.repo)) if receivedMessage != tc.expectedMessage { t.Fatalf("%s: expected to get '%s' and received '%s'", tc.name, tc.expectedMessage, receivedMessage) } } } // TestPluginConfig validates that there are no duplicate repos in the welcome plugin config. func TestPluginConfig(t *testing.T) { pa := &plugins.ConfigAgent{} b, err := ioutil.ReadFile("../../../config/prow/plugins.yaml") if err != nil { t.Fatalf("Failed to read plugin config: %v.", err) } np := &plugins.Configuration{} if err := yaml.Unmarshal(b, np); err != nil { t.Fatalf("Failed to unmarshal plugin config: %v.", err) } pa.Set(np) orgs := map[string]bool{} repos := map[string]bool{} for _, config := range pa.Config().Welcome { for _, entry := range config.Repos { if strings.Contains(entry, "/") { if repos[entry] { t.Errorf("The repo %q is duplicated in the 'welcome' plugin configuration.", entry) } repos[entry] = true } else { if orgs[entry] { t.Errorf("The org %q is duplicated in the 'welcome' plugin configuration.", entry) } orgs[entry] = true } } } for repo := range repos { org := strings.Split(repo, "/")[0] if orgs[org] { t.Errorf("The repo %q is duplicated with %q in the 'welcome' plugin configuration.", repo, org) } } } func TestHelpProvider(t *testing.T) { enabledRepos := []config.OrgRepo{ {Org: "org1", Repo: "repo"}, {Org: "org2", Repo: "repo"}, } cases := []struct { name string config *plugins.Configuration enabledRepos []config.OrgRepo err bool }{ { name: "Empty config", config: &plugins.Configuration{}, enabledRepos: enabledRepos, }, { name: "All configs enabled", config: &plugins.Configuration{ Welcome: []plugins.Welcome{ { Repos: []string{"org2/repo"}, MessageTemplate: "Hello, welcome!", }, }, }, enabledRepos: enabledRepos, }, } for _, c := range cases { t.Run(c.name, func(t *testing.T) { _, err := helpProvider(c.config, c.enabledRepos) if err != nil && !c.err { t.Fatalf("helpProvider error: %v", err) } }) } }
t.Fatalf("did not expect error handling PR for case '%s': %v", tc.name, err) }
jscontext.go
// Package jscontext contains functionality for information we pass down into // the JS webapp. package jscontext import ( "context" "net" "net/http" "strings" "time" "github.com/gorilla/csrf" "github.com/sourcegraph/sourcegraph/cmd/frontend/auth/providers" "github.com/sourcegraph/sourcegraph/cmd/frontend/enterprise" "github.com/sourcegraph/sourcegraph/cmd/frontend/envvar" "github.com/sourcegraph/sourcegraph/cmd/frontend/globals" "github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend" "github.com/sourcegraph/sourcegraph/cmd/frontend/internal/app/assetsutil" "github.com/sourcegraph/sourcegraph/cmd/frontend/internal/auth/userpasswd" "github.com/sourcegraph/sourcegraph/cmd/frontend/internal/siteid" "github.com/sourcegraph/sourcegraph/cmd/frontend/webhooks" "github.com/sourcegraph/sourcegraph/internal/actor" "github.com/sourcegraph/sourcegraph/internal/conf" "github.com/sourcegraph/sourcegraph/internal/database/dbconn" "github.com/sourcegraph/sourcegraph/internal/database/globalstatedb" "github.com/sourcegraph/sourcegraph/internal/env" "github.com/sourcegraph/sourcegraph/internal/lazyregexp" "github.com/sourcegraph/sourcegraph/internal/version" "github.com/sourcegraph/sourcegraph/schema" ) // BillingPublishableKey is the publishable (non-secret) API key for the billing system, if any. var BillingPublishableKey string type authProviderInfo struct { IsBuiltin bool `json:"isBuiltin"` DisplayName string `json:"displayName"` ServiceType string `json:"serviceType"` AuthenticationURL string `json:"authenticationURL"` } // JSContext is made available to JavaScript code via the // "sourcegraph/app/context" module. // // 🚨 SECURITY: This struct is sent to all users regardless of whether or // not they are logged in, for example on an auth.public=false private // server. Including secret fields here is OK if it is based on the user's // authentication above, but do not include e.g. hard-coded secrets about // the server instance here as they would be sent to anonymous users. type JSContext struct { AppRoot string `json:"appRoot,omitempty"` ExternalURL string `json:"externalURL,omitempty"` XHRHeaders map[string]string `json:"xhrHeaders"` CSRFToken string `json:"csrfToken"` UserAgentIsBot bool `json:"userAgentIsBot"` AssetsRoot string `json:"assetsRoot"` Version string `json:"version"` IsAuthenticatedUser bool `json:"isAuthenticatedUser"` SentryDSN *string `json:"sentryDSN"` SiteID string `json:"siteID"` SiteGQLID string `json:"siteGQLID"` Debug bool `json:"debug"` NeedsSiteInit bool `json:"needsSiteInit"` EmailEnabled bool `json:"emailEnabled"` Site schema.SiteConfiguration `json:"site"` // public subset of site configuration LikelyDockerOnMac bool `json:"likelyDockerOnMac"` NeedServerRestart bool `json:"needServerRestart"` DeployType string `json:"deployType"` SourcegraphDotComMode bool `json:"sourcegraphDotComMode"` BillingPublishableKey string `json:"billingPublishableKey,omitempty"` AccessTokensAllow conf.AccessTokenAllow `json:"accessTokensAllow"` AllowSignup bool `json:"allowSignup"` ResetPasswordEnabled bool `json:"resetPasswordEnabled"` ExternalServicesUserMode string `json:"externalServicesUserMode"` AuthProviders []authProviderInfo `json:"authProviders"` Branding *schema.Branding `json:"branding"` BatchChangesEnabled bool `json:"batchChangesEnabled"` BatchChangesDisableWebhooksWarning bool `json:"batchChangesDisableWebhooksWarning"` BatchChangesWebhookLogsEnabled bool `json:"batchChangesWebhookLogsEnabled"` ExecutorsEnabled bool `json:"executorsEnabled"` CodeIntelAutoIndexingEnabled bool `json:"codeIntelAutoIndexingEnabled"` CodeIntelAutoIndexingAllowGlobalPolicies bool `json:"codeIntelAutoIndexingAllowGlobalPolicies"` ProductResearchPageEnabled bool `json:"productResearchPageEnabled"` ExperimentalFeatures schema.ExperimentalFeatures `json:"experimentalFeatures"` } // NewJSContextFromRequest populates a JSContext struct from the HTTP // request. func NewJSContextFromRequest(req *http.Request) JSContext { actor := actor.FromContext(req.Context()) headers := make(map[string]string) headers["x-sourcegraph-client"] = globals.ExternalURL().String() headers["X-Requested-With"] = "Sourcegraph" // required for httpapi to use cookie auth // Propagate Cache-Control no-cache and max-age=0 directives // to the requests made by our client-side JavaScript. This is // not a perfect parser, but it catches the important cases. if cc := req.Header.Get("cache-control"); strings.Contains(cc, "no-cache") || strings.Contains(cc, "max-age=0") { headers["Cache-Control"] = "no-cache" } csrfToken := csrf.Token(req) headers["X-Csrf-Token"] = csrfToken siteID := siteid.Get() // Show the site init screen? globalState, err := globalstatedb.Get(req.Context()) needsSiteInit := err == nil && !globalState.Initialized // Auth providers var authProviders []authProviderInfo for _, p := range providers.Providers() { info := p.CachedInfo() if info != nil { authProviders = append(authProviders, authProviderInfo{ IsBuiltin: p.Config().Builtin != nil, DisplayName: info.DisplayName, ServiceType: p.ConfigID().Type, AuthenticationURL: info.AuthenticationURL, }) } } var sentryDSN *string siteConfig := conf.Get().SiteConfiguration if siteConfig.Log != nil && siteConfig.Log.Sentry != nil && siteConfig.Log.Sentry.Dsn != "" { sentryDSN = &siteConfig.Log.Sentry.Dsn } // 🚨 SECURITY: This struct is sent to all users regardless of whether or // not they are logged in, for example on an auth.public=false private // server. Including secret fields here is OK if it is based on the user's // authentication above, but do not include e.g. hard-coded secrets about // the server instance here as they would be sent to anonymous users. return JSContext{ ExternalURL: globals.ExternalURL().String(), XHRHeaders: headers, CSRFToken: csrfToken, UserAgentIsBot: isBot(req.UserAgent()), AssetsRoot: assetsutil.URL("").String(), Version: version.Version(), IsAuthenticatedUser: actor.IsAuthenticated(), SentryDSN: sentryDSN, Debug: env.InsecureDev, SiteID: siteID, SiteGQLID: string(graphqlbackend.SiteGQLID()), NeedsSiteInit: needsSiteInit, EmailEnabled: conf.CanSendEmail(), Site: publicSiteConfiguration(), LikelyDockerOnMac: likelyDockerOnMac(), NeedServerRestart: globals.ConfigurationServerFrontendOnly.NeedServerRestart(), DeployType: conf.DeployType(), SourcegraphDotComMode: envvar.SourcegraphDotComMode(), BillingPublishableKey: BillingPublishableKey, // Experiments. We pass these through explicitly so we can // do the default behavior only in Go land. AccessTokensAllow: conf.AccessTokensAllow(), ResetPasswordEnabled: userpasswd.ResetPasswordEnabled(), ExternalServicesUserMode: conf.ExternalServiceUserMode().String(), AllowSignup: conf.AuthAllowSignup(), AuthProviders: authProviders, Branding: globals.Branding(), BatchChangesEnabled: enterprise.BatchChangesEnabledForUser(req.Context(), dbconn.Global) == nil, BatchChangesDisableWebhooksWarning: conf.Get().BatchChangesDisableWebhooksWarning, BatchChangesWebhookLogsEnabled: webhooks.LoggingEnabled(conf.Get()), ExecutorsEnabled: conf.ExecutorsEnabled(), CodeIntelAutoIndexingEnabled: conf.CodeIntelAutoIndexingEnabled(), CodeIntelAutoIndexingAllowGlobalPolicies: conf.CodeIntelAutoIndexingAllowGlobalPolicies(), ProductResearchPageEnabled: conf.ProductResearchPageEnabled(), ExperimentalFeatures: conf.ExperimentalFeatures(), } } // publicSiteConfiguration is the subset of the site.schema.json site // configuration that is necessary for the web app and is not sensitive/secret. func publicSiteConfiguration() schema.SiteConfiguration { c := conf.Get() updateChannel := c.UpdateChannel if updateChannel == "" { updateChannel = "release" } return schema.SiteConfiguration{ AuthPublic: c.AuthPublic, UpdateChannel: updateChannel, AuthzEnforceForSiteAdmins: c.AuthzEnforceForSiteAdmins, DisableNonCriticalTelemetry: c.DisableNonCriticalTelemetry, } } var isBotPat = lazyregexp.New(`(?i:googlecloudmonitoring|pingdom.com|go .* package http|sourcegraph e2etest|bot|crawl|slurp|spider|feed|rss|camo asset proxy|http-client|sourcegraph-client)`) func isBot(userAgent string) bool { ret
likelyDockerOnMac() bool { r := net.DefaultResolver ctx, cancel := context.WithTimeout(context.Background(), time.Second*10) defer cancel() addrs, err := r.LookupHost(ctx, "host.docker.internal") if err != nil || len(addrs) == 0 { return false // Assume we're not docker for mac. } return true }
urn isBotPat.MatchString(userAgent) } func
main.go
package main import ( "html/template" "net/http" ) var tpl *template.Template func init()
func main() { http.HandleFunc("/", index) http.HandleFunc("/about", about) http.ListenAndServe(":8080", nil) } func index(w http.ResponseWriter, r *http.Request) { tpl.ExecuteTemplate(w, "index.gohtml", 42) //io.WriteString(w, "Hello Kelowna") } func about(w http.ResponseWriter, r *http.Request) { tpl.ExecuteTemplate(w, "about.gohtml", "JAMES BOND") }
{ tpl = template.Must(template.ParseGlob("templates/*.gohtml")) }
listen.go
package cluster import ( "context" "net" "golang.org/x/xerrors" ) func newLocalListener(ctx context.Context) (net.Listener, error) { cfg := net.ListenConfig{} l, err := cfg.Listen(ctx, "tcp4", "127.0.0.1:0") if err != nil
return l, nil }
{ return nil, xerrors.Errorf("listen: %w", err) }
by-project-key-product-projections-search-request-builder.ts
/** * * Generated file, please do not change!!! * From http://www.vrap.io/ with love * * ,d88b.d88b, * 88888888888 * `Y8888888Y' * `Y888Y' * `Y' * */ import { ProductProjectionPagedSearchResponse } from 'models/product' import { executeRequest, QueryParam } from 'shared/utils/common-types' import { ApiRequest } from 'shared/utils/requests-utils' export class
{ constructor( protected readonly args: { pathArgs: { projectKey: string } executeRequest: executeRequest baseUri?: string } ) {} /** * Search Product Projection */ public post(methodArgs?: { headers?: { [key: string]: string } }): ApiRequest<void> { return new ApiRequest<void>( { baseUri: this.args.baseUri, method: 'POST', uriTemplate: '/{projectKey}/product-projections/search', pathVariables: this.args.pathArgs, headers: { ...methodArgs?.headers, }, }, this.args.executeRequest ) } /** * Search Product Projection */ public get(methodArgs: { queryArgs: { fuzzy?: boolean | boolean[] fuzzyLevel?: number | number[] markMatchingVariants: boolean | boolean[] staged?: boolean | boolean[] filter?: string | string[] 'filter.facets'?: string | string[] 'filter.query'?: string | string[] facet?: string | string[] sort?: string | string[] limit?: number | number[] offset?: number | number[] withTotal?: boolean | boolean[] priceCurrency?: string | string[] priceCountry?: string | string[] priceCustomerGroup?: string | string[] priceChannel?: string | string[] localeProjection?: string | string[] storeProjection?: string | string[] expand?: string | string[] [key: string]: QueryParam } headers?: { [key: string]: string } }): ApiRequest<ProductProjectionPagedSearchResponse> { return new ApiRequest<ProductProjectionPagedSearchResponse>( { baseUri: this.args.baseUri, method: 'GET', uriTemplate: '/{projectKey}/product-projections/search', pathVariables: this.args.pathArgs, headers: { ...methodArgs?.headers, }, queryParams: methodArgs?.queryArgs, }, this.args.executeRequest ) } }
ByProjectKeyProductProjectionsSearchRequestBuilder
archytas.py
#!/usr/bin/env python3 import sys from PySide2 import QtCore, QtWidgets, QtGui from os.path import exists, join, abspath from os import remove import tweepy from auth.auth import AuthData from tweet.tweet import getTweetsKeyword, tweetRandom, OutputerInterface import types class AuthDataInput(QtWidgets.QWidget): def __init__(self, wrapper: 'AuthDataWidget', authdata_path: str): super().__init__() self.authdata_path = authdata_path self.wrapper = wrapper self.layout_consumer_key = QtWidgets.QHBoxLayout() self.label_consumer_key = QtWidgets.QLabel("Consumer Key", alignment=QtCore.Qt.AlignCenter) self.edit_consumer_key = QtWidgets.QLineEdit() self.layout_consumer_key.addWidget(self.label_consumer_key) self.layout_consumer_key.addWidget(self.edit_consumer_key) self.layout_consumer_secret = QtWidgets.QHBoxLayout() self.label_consumer_secret = QtWidgets.QLabel("Consumer Secret", alignment=QtCore.Qt.AlignCenter) self.edit_consumer_secret = QtWidgets.QLineEdit() self.layout_consumer_secret.addWidget(self.label_consumer_secret) self.layout_consumer_secret.addWidget(self.edit_consumer_secret) self.layout_access_token = QtWidgets.QHBoxLayout() self.label_access_token = QtWidgets.QLabel("Access Token", alignment=QtCore.Qt.AlignCenter) self.edit_access_token = QtWidgets.QLineEdit() self.layout_access_token.addWidget(self.label_access_token) self.layout_access_token.addWidget(self.edit_access_token) self.layout_access_token_secret = QtWidgets.QHBoxLayout() self.label_access_token_secret = QtWidgets.QLabel("Access Token Secret", alignment=QtCore.Qt.AlignCenter) self.edit_access_token_secret = QtWidgets.QLineEdit() self.layout_access_token_secret.addWidget(self.label_access_token_secret) self.layout_access_token_secret.addWidget(self.edit_access_token_secret) self.button_save = QtWidgets.QPushButton("Save") self.layout = QtWidgets.QVBoxLayout(self) self.layout.addLayout(self.layout_consumer_key) self.layout.addLayout(self.layout_consumer_secret) self.layout.addLayout(self.layout_access_token) self.layout.addLayout(self.layout_access_token_secret) self.layout.addWidget(self.button_save) self.button_save.clicked.connect(self.save) @QtCore.Slot() def save(self): # Save in self.authdata_path consumer_key = self.edit_consumer_key.text() consumer_secret = self.edit_consumer_secret.text() access_token = self.edit_access_token.text() access_token_secret = self.edit_access_token_secret.text() ad = AuthData(consumer_key, consumer_secret, access_token, access_token_secret) ad.SaveToJson(self.authdata_path) # Notify parent wrapper self.wrapper.update_storage_status() class AuthDataStored(QtWidgets.QWidget): def __init__(self, wrapper: 'AuthDataWidget', authdata_path: str): super().__init__() self.wrapper = wrapper self.authdata_path = authdata_path # Read auth data self.ad = AuthData.CreateFromJson(authdata_path) self.layout_consumer_key = QtWidgets.QHBoxLayout() self.label_consumer_key = QtWidgets.QLabel("Consumer Key", alignment=QtCore.Qt.AlignLeft) self.label_literal_consumer_key = QtWidgets.QLabel(self.ad.consumer_key, alignment=QtCore.Qt.AlignRight) self.layout_consumer_key.addWidget(self.label_consumer_key) self.layout_consumer_key.addWidget(self.label_literal_consumer_key) self.layout_consumer_secret = QtWidgets.QHBoxLayout() self.label_consumer_secret = QtWidgets.QLabel("Consumer Secret", alignment=QtCore.Qt.AlignLeft) self.label_literal_consumer_secret = QtWidgets.QLabel(self.ad.consumer_secret, alignment=QtCore.Qt.AlignRight) self.layout_consumer_secret.addWidget(self.label_consumer_secret) self.layout_consumer_secret.addWidget(self.label_literal_consumer_secret) self.layout_access_token = QtWidgets.QHBoxLayout() self.label_access_token = QtWidgets.QLabel("Access Token", alignment=QtCore.Qt.AlignLeft) self.label_literal_access_token = QtWidgets.QLabel(self.ad.access_token, alignment=QtCore.Qt.AlignRight) self.layout_access_token.addWidget(self.label_access_token) self.layout_access_token.addWidget(self.label_literal_access_token) self.layout_access_token_secret = QtWidgets.QHBoxLayout() self.label_access_token_secret = QtWidgets.QLabel("Access Token Secret", alignment=QtCore.Qt.AlignLeft) self.label_literal_access_token_secret = QtWidgets.QLabel(self.ad.access_token_secret, alignment=QtCore.Qt.AlignRight) self.layout_access_token_secret.addWidget(self.label_access_token_secret) self.layout_access_token_secret.addWidget(self.label_literal_access_token_secret) self.text = QtWidgets.QLabel("", alignment=QtCore.Qt.AlignCenter) self.layout_buttons = QtWidgets.QHBoxLayout() self.button_connect = QtWidgets.QPushButton("Connect") self.button_edit = QtWidgets.QPushButton("Edit") self.button_delete = QtWidgets.QPushButton("Delete") self.layout_buttons.addWidget(self.button_connect) self.layout_buttons.addWidget(self.button_edit) self.layout_buttons.addWidget(self.button_delete) self.layout = QtWidgets.QVBoxLayout(self) self.layout.addWidget(self.text) self.layout.addLayout(self.layout_consumer_key) self.layout.addLayout(self.layout_consumer_secret) self.layout.addLayout(self.layout_access_token) self.layout.addLayout(self.layout_access_token_secret) self.layout.addLayout(self.layout_buttons) self.button_connect.clicked.connect(self.authenticate) self.button_delete.clicked.connect(self.delete_auth) @QtCore.Slot() def authenticate(self): # Authenticate to Twitter auth = tweepy.OAuthHandler(self.ad.consumer_key, self.ad.consumer_secret) auth.set_access_token(self.ad.access_token, self.ad.access_token_secret) api = tweepy.API(auth) auth_success = False try: api.verify_credentials() result_text = "Authentication OK" auth_success = True except: result_text = "Error during authentication" self.text.setText(result_text) if (auth_success): self.wrapper.update_api(api) @QtCore.Slot() def delete_auth(self): # Remove auth file if exists(self.authdata_path): remove(self.authdata_path) # Notify parent wrapper self.wrapper.update_storage_status() class AuthDataWidget(QtWidgets.QWidget): def __init__(self, authdata_path, archytas: 'ArchytasWidget'): super().__init__() self.authdata_path = authdata_path self.archytas = archytas self.calculate_authdata() self.title = QtWidgets.QLabel("Authentication", alignment=QtCore.Qt.AlignCenter) self.title.setFont(QtGui.QFont("Default", 16)) self.layout = QtWidgets.QVBoxLayout(self) self.layout.addWidget(self.title) self.layout.addWidget(self.authdata_inner) def calculate_authdata(self): self.isAuthDataStored = exists(self.authdata_path) ad = AuthData.CreateFromJson(self.authdata_path) if (self.isAuthDataStored and ad is not None): self.authdata_inner = AuthDataStored(self, self.authdata_path) else: self.authdata_inner = AuthDataInput(self, self.authdata_path) def update_storage_status(self): aw = self.layout.takeAt(1) aw.widget().deleteLater() self.calculate_authdata() self.layout.addWidget(self.authdata_inner) def update_api(self, api: tweepy.API): self.api = api if (self.api is not None): self.archytas.update_api(api) class RetweetWidget(QtWidgets.QWidget): def __init__(self, archytas: 'ArchytasWidget'): super().__init__() self.archytas = archytas self.title = QtWidgets.QLabel("Auto retweeter", alignment=QtCore.Qt.AlignCenter) self.title.setFont(QtGui.QFont("Default", 16)) self.label_err_message = QtWidgets.QLabel("", alignment=QtCore.Qt.AlignCenter) self.layout_number_retweets = QtWidgets.QHBoxLayout() self.label_number_retweets = QtWidgets.QLabel("Number of retweets", alignment=QtCore.Qt.AlignLeft) self.edit_number_retweets = QtWidgets.QLineEdit() self.layout_number_retweets.addWidget(self.label_number_retweets) self.layout_number_retweets.addWidget(self.edit_number_retweets) self.layout_keyword = QtWidgets.QHBoxLayout() self.label_keyword = QtWidgets.QLabel("Keyword", alignment=QtCore.Qt.AlignLeft) self.edit_keyword = QtWidgets.QLineEdit() self.layout_keyword.addWidget(self.label_keyword) self.layout_keyword.addWidget(self.edit_keyword) self.button_retweet = QtWidgets.QPushButton("Retweet") self.layout = QtWidgets.QVBoxLayout(self) self.layout.addWidget(self.title) self.layout.addWidget(self.label_err_message) self.layout.addLayout(self.layout_number_retweets) self.layout.addLayout(self.layout_keyword) self.layout.addWidget(self.button_retweet) self.button_retweet.clicked.connect(self.retweet) self.set_connected(False) def QtRetweetList(self, api: tweepy.API, tweets: list, index: int, secs: int, finishedAction: types.FunctionType): tweet = tweets[index] try: print('\nRetweet Bot found tweet by @' + tweet.user.screen_name + '. ' + 'Attempting to retweet.') tweet.retweet() print('Retweet published successfully.') index = index + 1 if (index < len(tweets)): QtCore.QTimer.singleShot(secs * 1000, lambda: self.QtRetweetList(api, tweets, index, secs, finishedAction)) else: finishedAction() # Some basic error handling. Will print out why retweet failed, into your terminal. except tweepy.TweepyException as error: print('\nError TweepyException. Retweet not successful. Reason: ') print(error) except tweepy.HTTPException as error: print('\nError HTTPException. Retweet not successful. Reason: ') print(error) def QtRetweetKeyword(self, api, keyword, rewteetRange, secs, finishedAction: types.FunctionType): tweets = getTweetsKeyword(api, keyword, rewteetRange) self.QtRetweetList(api, tweets, 0, secs, finishedAction) @QtCore.Slot() def retweet(self): self.label_err_message.setText("") if (not self.archytas.connected): self.label_err_message.setText("Error: The app is not connected to Twitter") return api = self.archytas.api # Retweet some tweets with the hashtag try: retweetRange = int(self.edit_number_retweets.text()) except: self.label_err_message.setText("Error: Number of retweets is not a number") return keyword = self.edit_keyword.text() self.button_retweet.setEnabled(False) self.label_err_message.setText("Retweeting...") finishedAction = self.finishedRetweetingActions seconds_between_retweets = 2 self.QtRetweetKeyword(api, keyword, retweetRange, seconds_between_retweets, finishedAction) def finishedRetweetingActions(self): self.button_retweet.setEnabled(True) self.label_err_message.setText("Successfully retweeted") def set_connected(self, connected: bool): self.button_retweet.setEnabled(connected) class OutputerTweetWidget(OutputerInterface): def __init__(self, TweetWidget): super().__init__() self.tweetw = TweetWidget def print(self, message: str) -> None: self.tweetw.update_message(message) class TweetWidget(QtWidgets.QWidget): def __init__(self, archytas: 'ArchytasWidget'): super().__init__() self.archytas = archytas self.csv_path = "<No file loaded>" self.loaded_csv = False self.connected = False self.title = QtWidgets.QLabel("Random tweet", alignment=QtCore.Qt.AlignCenter) self.title.setFont(QtGui.QFont("Default", 16)) self.label_err_message = QtWidgets.QLabel("", alignment=QtCore.Qt.AlignCenter) self.layout_input_csv = QtWidgets.QHBoxLayout() self.label_input_csv = QtWidgets.QLabel("Random tweet source:", alignment=QtCore.Qt.AlignLeft) self.label_location_csv = QtWidgets.QLabel(self.csv_path, alignment=QtCore.Qt.AlignLeft) self.button_load_csv = QtWidgets.QPushButton("Browse...") self.layout_input_csv.addWidget(self.label_input_csv) self.layout_input_csv.addWidget(self.label_location_csv) self.layout_input_csv.addWidget(self.button_load_csv) self.button_tweet = QtWidgets.QPushButton("Tweet") self.layout = QtWidgets.QVBoxLayout(self) self.layout.addWidget(self.title) self.layout.addWidget(self.label_err_message) self.layout.addLayout(self.layout_input_csv) self.layout.addWidget(self.button_tweet) self.button_tweet.clicked.connect(self.tweet) self.button_load_csv.clicked.connect(self.browse_csv) self.try_enabling_tweet_button() @QtCore.Slot() def tweet(self): self.label_err_message.setText("") if (not self.archytas.connected): self.label_err_message.setText("Error: The app is not connected to Twitter") return if (not self.loaded_csv): self.label_err_message.setText("Error: Invalid tweet source file path") return api = self.archytas.api # Tweet randomly selected tweets number_of_tweets = 1 outputer = OutputerTweetWidget(self) tweetRandom(api, self.csv_path, number_of_tweets, outputer) @QtCore.Slot() def browse_csv(self): file_input_csv = QtWidgets.QFileDialog() path_tuple: tuple = file_input_csv.getOpenFileName() path = path_tuple[0] self.update_csv_path(path) def update_csv_path(self, path): self.loaded_csv = True self.csv_path = path self.label_location_csv.setText(self.csv_path) self.try_enabling_tweet_button() def try_enabling_tweet_button(self):
def set_connected(self, connected: bool): self.connected = connected self.try_enabling_tweet_button() def update_message(self, message: str) -> None: self.label_err_message.setText(message) class ArchytasWidget(QtWidgets.QWidget): def __init__(self, authdata_path): super().__init__() self.connected = False self.authdataw = AuthDataWidget(authdata_path, self) self.retweetw = RetweetWidget(self) self.tweetw = TweetWidget(self) self.line1 = QtWidgets.QFrame() self.line1.setFrameShape(QtWidgets.QFrame.HLine) self.line1.setFrameShadow(QtWidgets.QFrame.Sunken) self.line2 = QtWidgets.QFrame() self.line2.setFrameShape(QtWidgets.QFrame.HLine) self.line2.setFrameShadow(QtWidgets.QFrame.Sunken) self.layout = QtWidgets.QVBoxLayout(self) self.layout.addWidget(self.authdataw) self.layout.addWidget(self.line1) self.layout.addWidget(self.retweetw) self.layout.addWidget(self.line2) self.layout.addWidget(self.tweetw) def update_api(self, api: tweepy.API): self.api = api if (self.api is not None): self.connected = True else: self.connected = False self.retweetw.set_connected(self.connected) self.tweetw.set_connected(self.connected) def resource_path(relative_path): if hasattr(sys, '_MEIPASS'): return join(sys._MEIPASS, relative_path) return join(abspath('.'), relative_path) def main(): app = QtWidgets.QApplication([]) widget = ArchytasWidget("auth_data.json") widget.resize(800, 600) widget.show() widget.setWindowTitle("Archytas") widget.setWindowIcon( QtGui.QIcon(resource_path("./assets/icon.png")) ) sys.exit(app.exec_()) if __name__=="__main__": main()
if (self.connected and self.loaded_csv): self.button_tweet.setEnabled(True) else: self.button_tweet.setEnabled(False)
sumOfNumbers.js
function sumOfNumbers(num1, num2) { let number1 = Number(num1); let number2 = Number(num2); let result = 0;
} console.log(result) } sumOfNumbers('1','5')
for (let i = number1; i <= number2; i++) { result += Number(i);
models.go
// +build go1.9 // Copyright 2018 Microsoft Corporation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // This code was auto-generated by: // github.com/Azure/azure-sdk-for-go/tools/profileBuilder package job import ( "context" original "github.com/Azure/azure-sdk-for-go/services/datalake/analytics/2016-11-01/job" ) const ( DefaultAdlaJobDNSSuffix = original.DefaultAdlaJobDNSSuffix ) type CompileMode = original.CompileMode const ( Full CompileMode = original.Full Semantic CompileMode = original.Semantic SingleBox CompileMode = original.SingleBox ) type ResourceType = original.ResourceType const ( JobManagerResource ResourceType = original.JobManagerResource JobManagerResourceInUserFolder ResourceType = original.JobManagerResourceInUserFolder StatisticsResource ResourceType = original.StatisticsResource StatisticsResourceInUserFolder ResourceType = original.StatisticsResourceInUserFolder VertexResource ResourceType = original.VertexResource VertexResourceInUserFolder ResourceType = original.VertexResourceInUserFolder ) type Result = original.Result const ( Cancelled Result = original.Cancelled Failed Result = original.Failed None Result = original.None Succeeded Result = original.Succeeded ) type SeverityTypes = original.SeverityTypes const ( Deprecated SeverityTypes = original.Deprecated Error SeverityTypes = original.Error Info SeverityTypes = original.Info SevereWarning SeverityTypes = original.SevereWarning UserWarning SeverityTypes = original.UserWarning Warning SeverityTypes = original.Warning ) type State = original.State const ( StateAccepted State = original.StateAccepted StateCompiling State = original.StateCompiling StateEnded State = original.StateEnded StateNew State = original.StateNew StatePaused State = original.StatePaused StateQueued State = original.StateQueued StateRunning State = original.StateRunning StateScheduling State = original.StateScheduling StateStarting State = original.StateStarting StateWaitingForCapacity State = original.StateWaitingForCapacity ) type Type = original.Type const ( TypeHive Type = original.TypeHive TypeJobProperties Type = original.TypeJobProperties TypeUSQL Type = original.TypeUSQL ) type TypeBasicCreateJobProperties = original.TypeBasicCreateJobProperties const ( TypeBasicCreateJobPropertiesTypeCreateJobProperties TypeBasicCreateJobProperties = original.TypeBasicCreateJobPropertiesTypeCreateJobProperties TypeBasicCreateJobPropertiesTypeUSQL TypeBasicCreateJobProperties = original.TypeBasicCreateJobPropertiesTypeUSQL ) type TypeEnum = original.TypeEnum const ( Hive TypeEnum = original.Hive USQL TypeEnum = original.USQL ) type BaseClient = original.BaseClient type BaseJobParameters = original.BaseJobParameters type BasicCreateJobProperties = original.BasicCreateJobProperties type BasicProperties = original.BasicProperties type BuildJobParameters = original.BuildJobParameters type Client = original.Client type CreateJobParameters = original.CreateJobParameters type CreateJobProperties = original.CreateJobProperties type CreateUSQLJobProperties = original.CreateUSQLJobProperties type DataPath = original.DataPath type Diagnostics = original.Diagnostics type ErrorDetails = original.ErrorDetails type HiveJobProperties = original.HiveJobProperties type InfoListResult = original.InfoListResult type InfoListResultIterator = original.InfoListResultIterator type InfoListResultPage = original.InfoListResultPage type Information = original.Information type InformationBasic = original.InformationBasic type InnerError = original.InnerError type PipelineClient = original.PipelineClient type PipelineInformation = original.PipelineInformation type PipelineInformationListResult = original.PipelineInformationListResult type PipelineInformationListResultIterator = original.PipelineInformationListResultIterator type PipelineInformationListResultPage = original.PipelineInformationListResultPage type PipelineRunInformation = original.PipelineRunInformation type Properties = original.Properties type RecurrenceClient = original.RecurrenceClient type RecurrenceInformation = original.RecurrenceInformation type RecurrenceInformationListResult = original.RecurrenceInformationListResult type RecurrenceInformationListResultIterator = original.RecurrenceInformationListResultIterator type RecurrenceInformationListResultPage = original.RecurrenceInformationListResultPage type RelationshipProperties = original.RelationshipProperties type Resource = original.Resource type StateAuditRecord = original.StateAuditRecord type Statistics = original.Statistics type StatisticsVertexStage = original.StatisticsVertexStage type USQLJobProperties = original.USQLJobProperties func New() BaseClient { return original.New() } func NewClient() Client { return original.NewClient() } func NewInfoListResultIterator(page InfoListResultPage) InfoListResultIterator { return original.NewInfoListResultIterator(page) } func NewInfoListResultPage(getNextPage func(context.Context, InfoListResult) (InfoListResult, error)) InfoListResultPage { return original.NewInfoListResultPage(getNextPage) } func NewPipelineClient() PipelineClient { return original.NewPipelineClient() } func NewPipelineInformationListResultIterator(page PipelineInformationListResultPage) PipelineInformationListResultIterator { return original.NewPipelineInformationListResultIterator(page) } func NewPipelineInformationListResultPage(getNextPage func(context.Context, PipelineInformationListResult) (PipelineInformationListResult, error)) PipelineInformationListResultPage { return original.NewPipelineInformationListResultPage(getNextPage) } func NewRecurrenceClient() RecurrenceClient { return original.NewRecurrenceClient() } func NewRecurrenceInformationListResultIterator(page RecurrenceInformationListResultPage) RecurrenceInformationListResultIterator { return original.NewRecurrenceInformationListResultIterator(page) } func NewRecurrenceInformationListResultPage(getNextPage func(context.Context, RecurrenceInformationListResult) (RecurrenceInformationListResult, error)) RecurrenceInformationListResultPage { return original.NewRecurrenceInformationListResultPage(getNextPage) } func NewWithoutDefaults(adlaJobDNSSuffix string) BaseClient { return original.NewWithoutDefaults(adlaJobDNSSuffix) } func PossibleCompileModeValues() []CompileMode { return original.PossibleCompileModeValues() } func PossibleResourceTypeValues() []ResourceType { return original.PossibleResourceTypeValues() } func PossibleResultValues() []Result { return original.PossibleResultValues() } func PossibleSeverityTypesValues() []SeverityTypes { return original.PossibleSeverityTypesValues() } func PossibleStateValues() []State { return original.PossibleStateValues() } func PossibleTypeBasicCreateJobPropertiesValues() []TypeBasicCreateJobProperties { return original.PossibleTypeBasicCreateJobPropertiesValues() } func PossibleTypeEnumValues() []TypeEnum { return original.PossibleTypeEnumValues() } func PossibleTypeValues() []Type { return original.PossibleTypeValues() } func UserAgent() string { return original.UserAgent() + " profiles/preview" } func Version() string
{ return original.Version() }
discovery_test.go
package discovery import ( "context" "fmt" "net" "strconv" "strings" "sync" "testing" "time" "github.com/miekg/dns" "github.com/wish/discovery/resolver" ) func RunLocalUDPServer(laddr string) (*dns.Server, string, error) { pc, err := net.ListenPacket("udp", laddr) if err != nil { return nil, "", err } server := &dns.Server{PacketConn: pc, ReadTimeout: time.Hour, WriteTimeout: time.Hour} waitLock := sync.Mutex{} waitLock.Lock() server.NotifyStartedFunc = waitLock.Unlock go func() { server.ActivateAndServe() pc.Close() }() waitLock.Lock() return server, pc.LocalAddr().String(), nil } func TestDiscovery(t *testing.T) { tests := []struct { q string r []ServiceAddress err bool }{ // Query with IP, we expect that same IP back { q: "127.0.0.1", r: []ServiceAddress{ServiceAddress{ Name: "127.0.0.1", IP: net.ParseIP("127.0.0.1"), isStatic: true, }}, }, // Query with IP+port, expect back IP+port { q: "127.0.0.1:1234", r: []ServiceAddress{ServiceAddress{ Name: "127.0.0.1", IP: net.ParseIP("127.0.0.1"), Port: 1234, isStatic: true, }}, }, // Just a name, no port { q: "example.com", r: []ServiceAddress{ServiceAddress{ Name: "1.2.3.4", IP: net.ParseIP("1.2.3.4"), }}, }, // SRV response { q: "srv.com", r: []ServiceAddress{ServiceAddress{ Name: "example.com", IP: net.ParseIP("1.2.3.4"), Port: 5060, }}, }, // SRV with a port-- since SRV exists the port won't match { q: "srv.com:80", r: []ServiceAddress{ServiceAddress{ Name: "example.com", IP: net.ParseIP("1.2.3.4"), Port: 5060, }}, }, // ipv6 destination { q: "v6.com", r: []ServiceAddress{ServiceAddress{ Name: "::102:304", IP: net.ParseIP("::1.2.3.4"), }}, }, // SRV ipv6 response { q: "srvV6.com", r: []ServiceAddress{ServiceAddress{ Name: "v6.com", IP: net.ParseIP("::1.2.3.4"), Port: 5060, }}, }, // SRV ipv6 a port-- since SRV exists the port won't match { q: "srvV6.com:80", r: []ServiceAddress{ServiceAddress{ Name: "v6.com", IP: net.ParseIP("::1.2.3.4"), Port: 5060, }}, }, } zones := map[string]string{ "example.com.": "example.com. 1 IN A 1.2.3.4", "v6.com.": "example.com. 1 IN AAAA ::1.2.3.4", "srv.com.": "srv.com. 1 IN SRV 0 0 5060 example.com.\nsrv.com. 600 IN A 1.2.3.4", "srvV6.com.": "srv.com. 1 IN SRV 0 0 5060 v6.com.\nsrv.com. 600 IN AAAA ::1.2.3.4", } // Start DNS server dns.HandleFunc(".", func(w dns.ResponseWriter, req *dns.Msg) { msg := dns.Msg{} msg.SetReply(req) msg.Authoritative = true domain := msg.Question[0].Name zoneStr, ok := zones[domain] if ok { parser := dns.NewZoneParser(strings.NewReader(zoneStr), domain, "") for { rr, ok := parser.Next() if !ok { break } if rr.Header().Rrtype == req.Question[0].Qtype { msg.Answer = append(msg.Answer, rr) } } } w.WriteMsg(&msg) }) defer dns.HandleRemove(".") s, addrstr, err := RunLocalUDPServer(":0") if err != nil { t.Fatalf("unable to run test server: %v", err) } defer s.Shutdown() _, port, err := net.SplitHostPort(addrstr) if err != nil { t.Fatal(err) } r := resolver.NewResolverFromConfig(&dns.ClientConfig{ Servers: []string{"127.0.0.1"}, Port: port, }) d := discovery{c: DefaultConfig, r: r} ctx := context.TODO() for i, test := range tests { t.Run(strconv.Itoa(i), func(t *testing.T) { addrs, err := d.GetServiceAddresses(ctx, test.q) if (err != nil) != test.err { t.Fatalf("Wrong error, err=%v expectedErr=%v", err, test.err) } // Hack to avoid creating separate comparator for x, a := range addrs { test.r[x].expiresAt = a.expiresAt } if !ServiceAddresses(test.r).Equal(addrs) { t.Fatalf("Mismatch in addrs \nexpected=%v \nactual=%v", test.r, addrs) } }) } } func TestDiscoverySubscribe(t *testing.T)
{ zones := map[string]string{ "example.com.": "example.com. 1 IN A 1.2.3.4", } // Start DNS server dns.HandleFunc(".", func(w dns.ResponseWriter, req *dns.Msg) { msg := dns.Msg{} msg.SetReply(req) msg.Authoritative = true domain := msg.Question[0].Name zoneStr, ok := zones[domain] if ok { parser := dns.NewZoneParser(strings.NewReader(zoneStr), domain, "") for { rr, ok := parser.Next() if !ok { break } if rr.Header().Rrtype == req.Question[0].Qtype { msg.Answer = append(msg.Answer, rr) } } } w.WriteMsg(&msg) }) defer dns.HandleRemove(".") s, addrstr, err := RunLocalUDPServer(":0") if err != nil { t.Fatalf("unable to run test server: %v", err) } defer s.Shutdown() _, port, err := net.SplitHostPort(addrstr) if err != nil { t.Fatal(err) } r := resolver.NewResolverFromConfig(&dns.ClientConfig{ Servers: []string{"127.0.0.1"}, Port: port, }) d := discovery{c: DefaultConfig, r: r} ctx := context.TODO() var cbAddrs ServiceAddresses var cbErr error cbCh := make(chan struct{}) cb := func(_ context.Context, addrs ServiceAddresses) error { select { case cbCh <- struct{}{}: default: } cbAddrs = addrs return cbErr } cbWait := func() { select { case <-cbCh: case <-time.After(time.Second * 2): t.Fatalf("CB failed") } } // Do a subscribe if err := d.SubscribeServiceAddresses(ctx, "example.com", cb); err != nil { t.Fatalf("Error doing initial subscribe: %v", err) } // wait a second, ensure that we got another cbWait() // set an error, ensure that we get some more retires cbErr = fmt.Errorf("some error") prevAddrs := cbAddrs for i := 0; i < 3; i++ { cbWait() } // Clear error ensure that we get an update cbErr = nil cbWait() if cbAddrs.Equal(prevAddrs) { t.Fatalf("no update!") } // test that an update is seen immediately prevAddrs = cbAddrs cbWait() // Update immediately zones["example.com."] = "example.com. 1 IN A 1.2.3.4\nexample.com. 1 IN A 1.2.3.5" // Wait, and see if we get updates for i := 0; i < 3; i++ { cbWait() if len(prevAddrs) == len(cbAddrs) { t.Fatalf("%d callback missing update!", i) } } }
styles.js
import { StyleSheet } from 'react-native'; import Constants from 'expo-constants'; const styles = StyleSheet.create({ container: { flex: 1, paddingHorizontal: 24, paddingTop: Constants.statusBarHeight + 20 }, header: { flexDirection: 'row', justifyContent: 'space-between', alignItems: 'center' }, headerText: { fontSize: 15, color: '#737380' }, headerTextBold: { fontWeight: 'bold' }, title: { fontSize: 30, marginBottom: 16, marginTop: 48, color: '#13131a', fontWeight: 'bold' }, description: { fontSize: 16, lineHeight: 24, color: '#737380' }, incidentList: { marginTop: 32 }, incident: { padding: 24, borderRadius: 10, backgroundColor: '#fff', marginBottom: 16 }, incidentProperty: { fontSize: 14, color: '#41414d',
marginTop: 8, fontSize: 15, marginBottom: 24, color: '#737380' }, detailsButton: { flexDirection: 'row', justifyContent: 'space-between', alignItems: 'center', backgroundColor: '#e02041', padding: 10, borderRadius: 5 }, detailsButtonText: { color: '#fff', fontSize: 15, fontWeight: 'bold' } }); export default styles;
fontWeight: 'bold' }, incidentValue: {
bigNumber.ts
export const DECIMAL_UNITS = 3; export const parseToFormattedNumber = ( value: string | BigNumberish, unit: BigNumberish = DECIMAL_UNITS ) => commify(formatUnits(value, unit));
import type { BigNumberish } from '@ethersproject/bignumber'; import { commify, formatUnits, parseUnits } from '@ethersproject/units';
api.rs
use std::collections::HashMap; use std::cell::RefCell; use std::default::Default; use std::collections::BTreeMap; use serde_json as json; use std::io; use std::fs; use std::mem; use std::thread::sleep; use crate::client; // ############## // UTILITIES ### // ############ /// Identifies the an OAuth2 authorization scope. /// A scope is needed when requesting an /// [authorization token](https://developers.google.com/youtube/v3/guides/authentication). #[derive(PartialEq, Eq, Hash)] pub enum Scope { /// See, edit, configure, and delete your Google Cloud Platform data CloudPlatform, } impl AsRef<str> for Scope { fn as_ref(&self) -> &str { match *self { Scope::CloudPlatform => "https://www.googleapis.com/auth/cloud-platform", } } } impl Default for Scope { fn default() -> Scope { Scope::CloudPlatform } } // ######## // HUB ### // ###### /// Central instance to access all FirebaseCloudMessaging related resource activities /// /// # Examples /// /// Instantiate a new hub /// /// ```test_harness,no_run /// extern crate hyper; /// extern crate hyper_rustls; /// extern crate yup_oauth2 as oauth2; /// extern crate google_fcm1 as fcm1; /// use fcm1::api::SendMessageRequest; /// use fcm1::{Result, Error}; /// # async fn dox() { /// use std::default::Default; /// use oauth2; /// use fcm1::FirebaseCloudMessaging; /// /// // Get an ApplicationSecret instance by some means. It contains the `client_id` and /// // `client_secret`, among other things. /// let secret: oauth2::ApplicationSecret = Default::default(); /// // Instantiate the authenticator. It will choose a suitable authentication flow for you, /// // unless you replace `None` with the desired Flow. /// // Provide your own `AuthenticatorDelegate` to adjust the way it operates and get feedback about /// // what's going on. You probably want to bring in your own `TokenStorage` to persist tokens and /// // retrieve them from storage. /// let auth = yup_oauth2::InstalledFlowAuthenticator::builder( /// secret, /// yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect, /// ).build().await.unwrap(); /// let mut hub = FirebaseCloudMessaging::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth); /// // As the method needs a request, you would usually fill it with the desired information /// // into the respective structure. Some of the parts shown here might not be applicable ! /// // Values shown here are possibly random and not representative ! /// let mut req = SendMessageRequest::default(); /// /// // You can configure optional parameters by calling the respective setters at will, and /// // execute the final call using `doit()`. /// // Values shown here are possibly random and not representative ! /// let result = hub.projects().messages_send(req, "parent") /// .doit().await; /// /// match result { /// Err(e) => match e { /// // The Error enum provides details about what exactly happened. /// // You can also just use its `Debug`, `Display` or `Error` traits /// Error::HttpError(_) /// |Error::Io(_) /// |Error::MissingAPIKey /// |Error::MissingToken(_) /// |Error::Cancelled /// |Error::UploadSizeLimitExceeded(_, _) /// |Error::Failure(_) /// |Error::BadRequest(_) /// |Error::FieldClash(_) /// |Error::JsonDecodeError(_, _) => println!("{}", e), /// }, /// Ok(res) => println!("Success: {:?}", res), /// } /// # } /// ``` #[derive(Clone)] pub struct FirebaseCloudMessaging<> { client: hyper::Client<hyper_rustls::HttpsConnector<hyper::client::connect::HttpConnector>, hyper::body::Body>, auth: oauth2::authenticator::Authenticator<hyper_rustls::HttpsConnector<hyper::client::connect::HttpConnector>>, _user_agent: String, _base_url: String, _root_url: String, } impl<'a, > client::Hub for FirebaseCloudMessaging<> {} impl<'a, > FirebaseCloudMessaging<> { pub fn new(client: hyper::Client<hyper_rustls::HttpsConnector<hyper::client::connect::HttpConnector>, hyper::body::Body>, authenticator: oauth2::authenticator::Authenticator<hyper_rustls::HttpsConnector<hyper::client::connect::HttpConnector>>) -> FirebaseCloudMessaging<> { FirebaseCloudMessaging { client, auth: authenticator, _user_agent: "google-api-rust-client/2.0.8".to_string(), _base_url: "https://fcm.googleapis.com/".to_string(), _root_url: "https://fcm.googleapis.com/".to_string(), } } pub fn projects(&'a self) -> ProjectMethods<'a> { ProjectMethods { hub: &self } } /// Set the user-agent header field to use in all requests to the server. /// It defaults to `google-api-rust-client/2.0.8`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { mem::replace(&mut self._user_agent, agent_name) } /// Set the base url to use in all requests to the server. /// It defaults to `https://fcm.googleapis.com/`. /// /// Returns the previously set base url. pub fn base_url(&mut self, new_base_url: String) -> String { mem::replace(&mut self._base_url, new_base_url) } /// Set the root url to use in all requests to the server. /// It defaults to `https://fcm.googleapis.com/`. /// /// Returns the previously set root url. pub fn root_url(&mut self, new_root_url: String) -> String { mem::replace(&mut self._root_url, new_root_url) } } // ############ // SCHEMAS ### // ########## /// Android specific options for messages sent through [FCM connection server](https://goo.gl/4GLdUl). /// /// This type is not used in any activity, and only used as *part* of another schema. /// #[derive(Default, Clone, Debug, Serialize, Deserialize)] pub struct AndroidConfig { /// An identifier of a group of messages that can be collapsed, so that only the last message gets sent when delivery can be resumed. A maximum of 4 different collapse keys is allowed at any given time. #[serde(rename="collapseKey")] pub collapse_key: Option<String>, /// Arbitrary key/value payload. If present, it will override google.firebase.fcm.v1.Message.data. pub data: Option<HashMap<String, String>>, /// If set to true, messages will be allowed to be delivered to the app while the device is in direct boot mode. See [Support Direct Boot mode](https://developer.android.com/training/articles/direct-boot). #[serde(rename="directBootOk")] pub direct_boot_ok: Option<bool>, /// Options for features provided by the FCM SDK for Android. #[serde(rename="fcmOptions")] pub fcm_options: Option<AndroidFcmOptions>, /// Notification to send to android devices. pub notification: Option<AndroidNotification>, /// Message priority. Can take "normal" and "high" values. For more information, see [Setting the priority of a message](https://goo.gl/GjONJv). pub priority: Option<String>, /// Package name of the application where the registration token must match in order to receive the message. #[serde(rename="restrictedPackageName")] pub restricted_package_name: Option<String>, /// How long (in seconds) the message should be kept in FCM storage if the device is offline. The maximum time to live supported is 4 weeks, and the default value is 4 weeks if not set. Set it to 0 if want to send the message immediately. In JSON format, the Duration type is encoded as a string rather than an object, where the string ends in the suffix "s" (indicating seconds) and is preceded by the number of seconds, with nanoseconds expressed as fractional seconds. For example, 3 seconds with 0 nanoseconds should be encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should be expressed in JSON format as "3.000000001s". The ttl will be rounded down to the nearest second. pub ttl: Option<String>, } impl client::Part for AndroidConfig {} /// Options for features provided by the FCM SDK for Android. /// /// This type is not used in any activity, and only used as *part* of another schema. /// #[derive(Default, Clone, Debug, Serialize, Deserialize)] pub struct AndroidFcmOptions { /// Label associated with the message's analytics data. #[serde(rename="analyticsLabel")] pub analytics_label: Option<String>, } impl client::Part for AndroidFcmOptions {} /// Notification to send to android devices. /// /// This type is not used in any activity, and only used as *part* of another schema. /// #[derive(Default, Clone, Debug, Serialize, Deserialize)] pub struct AndroidNotification { /// The notification's body text. If present, it will override google.firebase.fcm.v1.Notification.body. pub body: Option<String>, /// Variable string values to be used in place of the format specifiers in body_loc_key to use to localize the body text to the user's current localization. See [Formatting and Styling](https://goo.gl/MalYE3) for more information. #[serde(rename="bodyLocArgs")] pub body_loc_args: Option<Vec<String>>, /// The key to the body string in the app's string resources to use to localize the body text to the user's current localization. See [String Resources](https://goo.gl/NdFZGI) for more information. #[serde(rename="bodyLocKey")] pub body_loc_key: Option<String>, /// The [notification's channel id](https://developer.android.com/guide/topics/ui/notifiers/notifications#ManageChannels) (new in Android O). The app must create a channel with this channel ID before any notification with this channel ID is received. If you don't send this channel ID in the request, or if the channel ID provided has not yet been created by the app, FCM uses the channel ID specified in the app manifest. #[serde(rename="channelId")] pub channel_id: Option<String>, /// The action associated with a user click on the notification. If specified, an activity with a matching intent filter is launched when a user clicks on the notification. #[serde(rename="clickAction")] pub click_action: Option<String>, /// The notification's icon color, expressed in #rrggbb format. pub color: Option<String>, /// If set to true, use the Android framework's default LED light settings for the notification. Default values are specified in [config.xml](https://android.googlesource.com/platform/frameworks/base/+/master/core/res/res/values/config.xml). If `default_light_settings` is set to true and `light_settings` is also set, the user-specified `light_settings` is used instead of the default value. #[serde(rename="defaultLightSettings")] pub default_light_settings: Option<bool>, /// If set to true, use the Android framework's default sound for the notification. Default values are specified in [config.xml](https://android.googlesource.com/platform/frameworks/base/+/master/core/res/res/values/config.xml). #[serde(rename="defaultSound")] pub default_sound: Option<bool>, /// If set to true, use the Android framework's default vibrate pattern for the notification. Default values are specified in [config.xml](https://android.googlesource.com/platform/frameworks/base/+/master/core/res/res/values/config.xml). If `default_vibrate_timings` is set to true and `vibrate_timings` is also set, the default value is used instead of the user-specified `vibrate_timings`. #[serde(rename="defaultVibrateTimings")] pub default_vibrate_timings: Option<bool>, /// Set the time that the event in the notification occurred. Notifications in the panel are sorted by this time. A point in time is represented using [protobuf.Timestamp](https://developers.google.com/protocol-buffers/docs/reference/java/com/google/protobuf/Timestamp). #[serde(rename="eventTime")] pub event_time: Option<String>, /// The notification's icon. Sets the notification icon to myicon for drawable resource myicon. If you don't send this key in the request, FCM displays the launcher icon specified in your app manifest. pub icon: Option<String>, /// Contains the URL of an image that is going to be displayed in a notification. If present, it will override google.firebase.fcm.v1.Notification.image. pub image: Option<String>, /// Settings to control the notification's LED blinking rate and color if LED is available on the device. The total blinking time is controlled by the OS. #[serde(rename="lightSettings")] pub light_settings: Option<LightSettings>, /// Set whether or not this notification is relevant only to the current device. Some notifications can be bridged to other devices for remote display, such as a Wear OS watch. This hint can be set to recommend this notification not be bridged. See [Wear OS guides](https://developer.android.com/training/wearables/notifications/bridger#existing-method-of-preventing-bridging) #[serde(rename="localOnly")] pub local_only: Option<bool>, /// Sets the number of items this notification represents. May be displayed as a badge count for launchers that support badging.See [Notification Badge](https://developer.android.com/training/notify-user/badges). For example, this might be useful if you're using just one notification to represent multiple new messages but you want the count here to represent the number of total new messages. If zero or unspecified, systems that support badging use the default, which is to increment a number displayed on the long-press menu each time a new notification arrives. #[serde(rename="notificationCount")] pub notification_count: Option<i32>, /// Set the relative priority for this notification. Priority is an indication of how much of the user's attention should be consumed by this notification. Low-priority notifications may be hidden from the user in certain situations, while the user might be interrupted for a higher-priority notification. The effect of setting the same priorities may differ slightly on different platforms. Note this priority differs from `AndroidMessagePriority`. This priority is processed by the client after the message has been delivered, whereas [AndroidMessagePriority](https://firebase.google.com/docs/reference/fcm/rest/v1/projects.messages#androidmessagepriority) is an FCM concept that controls when the message is delivered. #[serde(rename="notificationPriority")] pub notification_priority: Option<String>, /// The sound to play when the device receives the notification. Supports "default" or the filename of a sound resource bundled in the app. Sound files must reside in /res/raw/. pub sound: Option<String>, /// When set to false or unset, the notification is automatically dismissed when the user clicks it in the panel. When set to true, the notification persists even when the user clicks it. pub sticky: Option<bool>, /// Identifier used to replace existing notifications in the notification drawer. If not specified, each request creates a new notification. If specified and a notification with the same tag is already being shown, the new notification replaces the existing one in the notification drawer. pub tag: Option<String>, /// Sets the "ticker" text, which is sent to accessibility services. Prior to API level 21 (`Lollipop`), sets the text that is displayed in the status bar when the notification first arrives. pub ticker: Option<String>, /// The notification's title. If present, it will override google.firebase.fcm.v1.Notification.title. pub title: Option<String>, /// Variable string values to be used in place of the format specifiers in title_loc_key to use to localize the title text to the user's current localization. See [Formatting and Styling](https://goo.gl/MalYE3) for more information. #[serde(rename="titleLocArgs")] pub title_loc_args: Option<Vec<String>>, /// The key to the title string in the app's string resources to use to localize the title text to the user's current localization. See [String Resources](https://goo.gl/NdFZGI) for more information. #[serde(rename="titleLocKey")] pub title_loc_key: Option<String>, /// Set the vibration pattern to use. Pass in an array of [protobuf.Duration](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.Duration) to turn on or off the vibrator. The first value indicates the `Duration` to wait before turning the vibrator on. The next value indicates the `Duration` to keep the vibrator on. Subsequent values alternate between `Duration` to turn the vibrator off and to turn the vibrator on. If `vibrate_timings` is set and `default_vibrate_timings` is set to `true`, the default value is used instead of the user-specified `vibrate_timings`. #[serde(rename="vibrateTimings")] pub vibrate_timings: Option<Vec<String>>, /// Set the [Notification.visibility](https://developer.android.com/reference/android/app/Notification.html#visibility) of the notification. pub visibility: Option<String>, } impl client::Part for AndroidNotification {} /// [Apple Push Notification Service](https://goo.gl/MXRTPa) specific options. /// /// This type is not used in any activity, and only used as *part* of another schema. /// #[derive(Default, Clone, Debug, Serialize, Deserialize)] pub struct ApnsConfig { /// Options for features provided by the FCM SDK for iOS. #[serde(rename="fcmOptions")] pub fcm_options: Option<ApnsFcmOptions>, /// HTTP request headers defined in Apple Push Notification Service. Refer to [APNs request headers](https://developer.apple.com/documentation/usernotifications/setting_up_a_remote_notification_server/sending_notification_requests_to_apns) for supported headers, e.g. "apns-priority": "10". pub headers: Option<HashMap<String, String>>, /// APNs payload as a JSON object, including both `aps` dictionary and custom payload. See [Payload Key Reference](https://developer.apple.com/documentation/usernotifications/setting_up_a_remote_notification_server/generating_a_remote_notification). If present, it overrides google.firebase.fcm.v1.Notification.title and google.firebase.fcm.v1.Notification.body. pub payload: Option<HashMap<String, String>>, } impl client::Part for ApnsConfig {} /// Options for features provided by the FCM SDK for iOS. /// /// This type is not used in any activity, and only used as *part* of another schema. /// #[derive(Default, Clone, Debug, Serialize, Deserialize)] pub struct ApnsFcmOptions { /// Label associated with the message's analytics data. #[serde(rename="analyticsLabel")] pub analytics_label: Option<String>, /// Contains the URL of an image that is going to be displayed in a notification. If present, it will override google.firebase.fcm.v1.Notification.image. pub image: Option<String>, } impl client::Part for ApnsFcmOptions {} /// Represents a color in the RGBA color space. This representation is designed for simplicity of conversion to/from color representations in various languages over compactness. For example, the fields of this representation can be trivially provided to the constructor of `java.awt.Color` in Java; it can also be trivially provided to UIColor's `+colorWithRed:green:blue:alpha` method in iOS; and, with just a little work, it can be easily formatted into a CSS `rgba()` string in JavaScript. This reference page doesn't carry information about the absolute color space that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB, DCI-P3, BT.2020, etc.). By default, applications should assume the sRGB color space. When color equality needs to be decided, implementations, unless documented otherwise, treat two colors as equal if all their red, green, blue, and alpha values each differ by at most 1e-5. Example (Java): import com.google.type.Color; // ... public static java.awt.Color fromProto(Color protocolor) { float alpha = protocolor.hasAlpha() ? protocolor.getAlpha().getValue() : 1.0; return new java.awt.Color( protocolor.getRed(), protocolor.getGreen(), protocolor.getBlue(), alpha); } public static Color toProto(java.awt.Color color) { float red = (float) color.getRed(); float green = (float) color.getGreen(); float blue = (float) color.getBlue(); float denominator = 255.0; Color.Builder resultBuilder = Color .newBuilder() .setRed(red / denominator) .setGreen(green / denominator) .setBlue(blue / denominator); int alpha = color.getAlpha(); if (alpha != 255) { result.setAlpha( FloatValue .newBuilder() .setValue(((float) alpha) / denominator) .build()); } return resultBuilder.build(); } // ... Example (iOS / Obj-C): // ... static UIColor* fromProto(Color* protocolor) { float red = [protocolor red]; float green = [protocolor green]; float blue = [protocolor blue]; FloatValue* alpha_wrapper = [protocolor alpha]; float alpha = 1.0; if (alpha_wrapper != nil) { alpha = [alpha_wrapper value]; } return [UIColor colorWithRed:red green:green blue:blue alpha:alpha]; } static Color* toProto(UIColor* color) { CGFloat red, green, blue, alpha; if (![color getRed:&red green:&green blue:&blue alpha:&alpha]) { return nil; } Color* result = [[Color alloc] init]; [result setRed:red]; [result setGreen:green]; [result setBlue:blue]; if (alpha <= 0.9999) { [result setAlpha:floatWrapperWithValue(alpha)]; } [result autorelease]; return result; } // ... Example (JavaScript): // ... var protoToCssColor = function(rgb_color) { var redFrac = rgb_color.red || 0.0; var greenFrac = rgb_color.green || 0.0; var blueFrac = rgb_color.blue || 0.0; var red = Math.floor(redFrac * 255); var green = Math.floor(greenFrac * 255); var blue = Math.floor(blueFrac * 255); if (!('alpha' in rgb_color)) { return rgbToCssColor(red, green, blue); } var alphaFrac = rgb_color.alpha.value || 0.0; var rgbParams = [red, green, blue].join(','); return ['rgba(', rgbParams, ',', alphaFrac, ')'].join(''); }; var rgbToCssColor = function(red, green, blue) { var rgbNumber = new Number((red << 16) | (green << 8) | blue); var hexString = rgbNumber.toString(16); var missingZeros = 6 - hexString.length; var resultBuilder = ['#']; for (var i = 0; i < missingZeros; i++) { resultBuilder.push('0'); } resultBuilder.push(hexString); return resultBuilder.join(''); }; // ... /// /// This type is not used in any activity, and only used as *part* of another schema. /// #[derive(Default, Clone, Debug, Serialize, Deserialize)] pub struct Color { /// The fraction of this color that should be applied to the pixel. That is, the final pixel color is defined by the equation: `pixel color = alpha * (this color) + (1.0 - alpha) * (background color)` This means that a value of 1.0 corresponds to a solid color, whereas a value of 0.0 corresponds to a completely transparent color. This uses a wrapper message rather than a simple float scalar so that it is possible to distinguish between a default value and the value being unset. If omitted, this color object is rendered as a solid color (as if the alpha value had been explicitly given a value of 1.0). pub alpha: Option<f32>, /// The amount of blue in the color as a value in the interval [0, 1]. pub blue: Option<f32>, /// The amount of green in the color as a value in the interval [0, 1]. pub green: Option<f32>, /// The amount of red in the color as a value in the interval [0, 1]. pub red: Option<f32>, } impl client::Part for Color {} /// Platform independent options for features provided by the FCM SDKs. /// /// This type is not used in any activity, and only used as *part* of another schema. /// #[derive(Default, Clone, Debug, Serialize, Deserialize)] pub struct FcmOptions { /// Label associated with the message's analytics data. #[serde(rename="analyticsLabel")] pub analytics_label: Option<String>, } impl client::Part for FcmOptions {} /// Settings to control notification LED. /// /// This type is not used in any activity, and only used as *part* of another schema. /// #[derive(Default, Clone, Debug, Serialize, Deserialize)] pub struct LightSettings { /// Required. Set `color` of the LED with [google.type.Color](https://github.com/googleapis/googleapis/blob/master/google/type/color.proto). pub color: Option<Color>, /// Required. Along with `light_on_duration `, define the blink rate of LED flashes. Resolution defined by [proto.Duration](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.Duration) #[serde(rename="lightOffDuration")] pub light_off_duration: Option<String>, /// Required. Along with `light_off_duration`, define the blink rate of LED flashes. Resolution defined by [proto.Duration](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.Duration) #[serde(rename="lightOnDuration")] pub light_on_duration: Option<String>, } impl client::Part for LightSettings {} /// Message to send by Firebase Cloud Messaging Service. /// /// # Activities /// /// This type is used in activities, which are methods you may call on this type or where this type is involved in. /// The list links the activity name, along with information about where it is used (one of *request* and *response*). /// /// * [messages send projects](ProjectMessageSendCall) (response) /// #[derive(Default, Clone, Debug, Serialize, Deserialize)] pub struct Message { /// Input only. Android specific options for messages sent through [FCM connection server](https://goo.gl/4GLdUl). pub android: Option<AndroidConfig>, /// Input only. [Apple Push Notification Service](https://goo.gl/MXRTPa) specific options. pub apns: Option<ApnsConfig>, /// Condition to send a message to, e.g. "'foo' in topics && 'bar' in topics". pub condition: Option<String>, /// Input only. Arbitrary key/value payload. The key should not be a reserved word ("from", "message_type", or any word starting with "google" or "gcm"). pub data: Option<HashMap<String, String>>, /// Input only. Template for FCM SDK feature options to use across all platforms. #[serde(rename="fcmOptions")] pub fcm_options: Option<FcmOptions>, /// Output Only. The identifier of the message sent, in the format of `projects/*/messages/{message_id}`. pub name: Option<String>, /// Input only. Basic notification template to use across all platforms. pub notification: Option<Notification>, /// Registration token to send a message to. pub token: Option<String>, /// Topic name to send a message to, e.g. "weather". Note: "/topics/" prefix should not be provided. pub topic: Option<String>, /// Input only. [Webpush protocol](https://tools.ietf.org/html/rfc8030) options. pub webpush: Option<WebpushConfig>, } impl client::ResponseResult for Message {} /// Basic notification template to use across all platforms. /// /// This type is not used in any activity, and only used as *part* of another schema. /// #[derive(Default, Clone, Debug, Serialize, Deserialize)] pub struct Notification { /// The notification's body text. pub body: Option<String>, /// Contains the URL of an image that is going to be downloaded on the device and displayed in a notification. JPEG, PNG, BMP have full support across platforms. Animated GIF and video only work on iOS. WebP and HEIF have varying levels of support across platforms and platform versions. Android has 1MB image size limit. Quota usage and implications/costs for hosting image on Firebase Storage: https://firebase.google.com/pricing pub image: Option<String>, /// The notification's title. pub title: Option<String>, } impl client::Part for Notification {} /// Request to send a message to specified target. /// /// # Activities /// /// This type is used in activities, which are methods you may call on this type or where this type is involved in. /// The list links the activity name, along with information about where it is used (one of *request* and *response*). /// /// * [messages send projects](ProjectMessageSendCall) (request) /// #[derive(Default, Clone, Debug, Serialize, Deserialize)] pub struct SendMessageRequest { /// Required. Message to send. pub message: Option<Message>, /// Flag for testing the request without actually delivering the message. #[serde(rename="validateOnly")] pub validate_only: Option<bool>, } impl client::RequestValue for SendMessageRequest {} /// [Webpush protocol](https://tools.ietf.org/html/rfc8030) options. /// /// This type is not used in any activity, and only used as *part* of another schema. /// #[derive(Default, Clone, Debug, Serialize, Deserialize)] pub struct WebpushConfig { /// Arbitrary key/value payload. If present, it will override google.firebase.fcm.v1.Message.data. pub data: Option<HashMap<String, String>>, /// Options for features provided by the FCM SDK for Web. #[serde(rename="fcmOptions")] pub fcm_options: Option<WebpushFcmOptions>, /// HTTP headers defined in webpush protocol. Refer to [Webpush protocol](https://tools.ietf.org/html/rfc8030#section-5) for supported headers, e.g. "TTL": "15". pub headers: Option<HashMap<String, String>>, /// Web Notification options as a JSON object. Supports Notification instance properties as defined in [Web Notification API](https://developer.mozilla.org/en-US/docs/Web/API/Notification). If present, "title" and "body" fields override [google.firebase.fcm.v1.Notification.title] and [google.firebase.fcm.v1.Notification.body]. pub notification: Option<HashMap<String, String>>, } impl client::Part for WebpushConfig {} /// Options for features provided by the FCM SDK for Web. /// /// This type is not used in any activity, and only used as *part* of another schema. /// #[derive(Default, Clone, Debug, Serialize, Deserialize)] pub struct WebpushFcmOptions { /// Label associated with the message's analytics data. #[serde(rename="analyticsLabel")] pub analytics_label: Option<String>, /// The link to open when the user clicks on the notification. For all URL values, HTTPS is required. pub link: Option<String>, } impl client::Part for WebpushFcmOptions {} // ################### // MethodBuilders ### // ################# /// A builder providing access to all methods supported on *project* resources. /// It is not used directly, but through the `FirebaseCloudMessaging` hub. /// /// # Example /// /// Instantiate a resource builder /// /// ```test_harness,no_run /// extern crate hyper; /// extern crate hyper_rustls; /// extern crate yup_oauth2 as oauth2; /// extern crate google_fcm1 as fcm1; /// /// # async fn dox() { /// use std::default::Default; /// use oauth2; /// use fcm1::FirebaseCloudMessaging; /// /// let secret: oauth2::ApplicationSecret = Default::default(); /// let auth = yup_oauth2::InstalledFlowAuthenticator::builder( /// secret, /// yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect, /// ).build().await.unwrap(); /// let mut hub = FirebaseCloudMessaging::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth); /// // Usually you wouldn't bind this to a variable, but keep calling *CallBuilders* /// // like `messages_send(...)` /// // to build up your call. /// let rb = hub.projects(); /// # } /// ``` pub struct ProjectMethods<'a> where { hub: &'a FirebaseCloudMessaging<>, } impl<'a> client::MethodsBuilder for ProjectMethods<'a> {} impl<'a> ProjectMethods<'a> { /// Create a builder to help you perform the following task: /// /// Send a message to specified target (a registration token, topic or condition). /// /// # Arguments /// /// * `request` - No description provided. /// * `parent` - Required. It contains the Firebase project id (i.e. the unique identifier for your Firebase project), in the format of `projects/{project_id}`. For legacy support, the numeric project number with no padding is also supported in the format of `projects/{project_number}`. pub fn
(&self, request: SendMessageRequest, parent: &str) -> ProjectMessageSendCall<'a> { ProjectMessageSendCall { hub: self.hub, _request: request, _parent: parent.to_string(), _delegate: Default::default(), _additional_params: Default::default(), _scopes: Default::default(), } } } // ################### // CallBuilders ### // ################# /// Send a message to specified target (a registration token, topic or condition). /// /// A builder for the *messages.send* method supported by a *project* resource. /// It is not used directly, but through a `ProjectMethods` instance. /// /// # Example /// /// Instantiate a resource method builder /// /// ```test_harness,no_run /// # extern crate hyper; /// # extern crate hyper_rustls; /// # extern crate yup_oauth2 as oauth2; /// # extern crate google_fcm1 as fcm1; /// use fcm1::api::SendMessageRequest; /// # async fn dox() { /// # use std::default::Default; /// # use oauth2; /// # use fcm1::FirebaseCloudMessaging; /// /// # let secret: oauth2::ApplicationSecret = Default::default(); /// # let auth = yup_oauth2::InstalledFlowAuthenticator::builder( /// # secret, /// # yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect, /// # ).build().await.unwrap(); /// # let mut hub = FirebaseCloudMessaging::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth); /// // As the method needs a request, you would usually fill it with the desired information /// // into the respective structure. Some of the parts shown here might not be applicable ! /// // Values shown here are possibly random and not representative ! /// let mut req = SendMessageRequest::default(); /// /// // You can configure optional parameters by calling the respective setters at will, and /// // execute the final call using `doit()`. /// // Values shown here are possibly random and not representative ! /// let result = hub.projects().messages_send(req, "parent") /// .doit().await; /// # } /// ``` pub struct ProjectMessageSendCall<'a> where { hub: &'a FirebaseCloudMessaging<>, _request: SendMessageRequest, _parent: String, _delegate: Option<&'a mut dyn client::Delegate>, _additional_params: HashMap<String, String>, _scopes: BTreeMap<String, ()> } impl<'a> client::CallBuilder for ProjectMessageSendCall<'a> {} impl<'a> ProjectMessageSendCall<'a> { /// Perform the operation you have build so far. pub async fn doit(mut self) -> client::Result<(hyper::Response<hyper::body::Body>, Message)> { use url::percent_encoding::{percent_encode, DEFAULT_ENCODE_SET}; use std::io::{Read, Seek}; use hyper::header::{CONTENT_TYPE, CONTENT_LENGTH, AUTHORIZATION, USER_AGENT, LOCATION}; use client::ToParts; let mut dd = client::DefaultDelegate; let mut dlg: &mut dyn client::Delegate = match self._delegate { Some(d) => d, None => &mut dd }; dlg.begin(client::MethodInfo { id: "fcm.projects.messages.send", http_method: hyper::Method::POST }); let mut params: Vec<(&str, String)> = Vec::with_capacity(4 + self._additional_params.len()); params.push(("parent", self._parent.to_string())); for &field in ["alt", "parent"].iter() { if self._additional_params.contains_key(field) { dlg.finished(false); return Err(client::Error::FieldClash(field)); } } for (name, value) in self._additional_params.iter() { params.push((&name, value.clone())); } params.push(("alt", "json".to_string())); let mut url = self.hub._base_url.clone() + "v1/{+parent}/messages:send"; if self._scopes.len() == 0 { self._scopes.insert(Scope::CloudPlatform.as_ref().to_string(), ()); } for &(find_this, param_name) in [("{+parent}", "parent")].iter() { let mut replace_with = String::new(); for &(name, ref value) in params.iter() { if name == param_name { replace_with = value.to_string(); break; } } if find_this.as_bytes()[1] == '+' as u8 { replace_with = percent_encode(replace_with.as_bytes(), DEFAULT_ENCODE_SET).to_string(); } url = url.replace(find_this, &replace_with); } { let mut indices_for_removal: Vec<usize> = Vec::with_capacity(1); for param_name in ["parent"].iter() { if let Some(index) = params.iter().position(|t| &t.0 == param_name) { indices_for_removal.push(index); } } for &index in indices_for_removal.iter() { params.remove(index); } } let url = url::Url::parse_with_params(&url, params).unwrap(); let mut json_mime_type: mime::Mime = "application/json".parse().unwrap(); let mut request_value_reader = { let mut value = json::value::to_value(&self._request).expect("serde to work"); client::remove_json_null_values(&mut value); let mut dst = io::Cursor::new(Vec::with_capacity(128)); json::to_writer(&mut dst, &value).unwrap(); dst }; let request_size = request_value_reader.seek(io::SeekFrom::End(0)).unwrap(); request_value_reader.seek(io::SeekFrom::Start(0)).unwrap(); loop { let token = match self.hub.auth.token(&self._scopes.keys().collect::<Vec<_>>()[..]).await { Ok(token) => token.clone(), Err(err) => { match dlg.token(&err) { Some(token) => token, None => { dlg.finished(false); return Err(client::Error::MissingToken(err)) } } } }; request_value_reader.seek(io::SeekFrom::Start(0)).unwrap(); let mut req_result = { let client = &self.hub.client; dlg.pre_request(); let mut req_builder = hyper::Request::builder().method(hyper::Method::POST).uri(url.clone().into_string()) .header(USER_AGENT, self.hub._user_agent.clone()) .header(AUTHORIZATION, format!("Bearer {}", token.as_str())); let request = req_builder .header(CONTENT_TYPE, format!("{}", json_mime_type.to_string())) .header(CONTENT_LENGTH, request_size as u64) .body(hyper::body::Body::from(request_value_reader.get_ref().clone())); client.request(request.unwrap()).await }; match req_result { Err(err) => { if let client::Retry::After(d) = dlg.http_error(&err) { sleep(d); continue; } dlg.finished(false); return Err(client::Error::HttpError(err)) } Ok(mut res) => { if !res.status().is_success() { let res_body_string = client::get_body_as_string(res.body_mut()).await; let json_server_error = json::from_str::<client::JsonServerError>(&res_body_string).ok(); let server_error = json::from_str::<client::ServerError>(&res_body_string) .or_else(|_| json::from_str::<client::ErrorResponse>(&res_body_string).map(|r| r.error)) .ok(); if let client::Retry::After(d) = dlg.http_failure(&res, json_server_error, server_error) { sleep(d); continue; } dlg.finished(false); return match json::from_str::<client::ErrorResponse>(&res_body_string){ Err(_) => Err(client::Error::Failure(res)), Ok(serr) => Err(client::Error::BadRequest(serr)) } } let result_value = { let res_body_string = client::get_body_as_string(res.body_mut()).await; match json::from_str(&res_body_string) { Ok(decoded) => (res, decoded), Err(err) => { dlg.response_json_decode_error(&res_body_string, &err); return Err(client::Error::JsonDecodeError(res_body_string, err)); } } }; dlg.finished(true); return Ok(result_value) } } } } /// /// Sets the *request* property to the given value. /// /// Even though the property as already been set when instantiating this call, /// we provide this method for API completeness. pub fn request(mut self, new_value: SendMessageRequest) -> ProjectMessageSendCall<'a> { self._request = new_value; self } /// Required. It contains the Firebase project id (i.e. the unique identifier for your Firebase project), in the format of `projects/{project_id}`. For legacy support, the numeric project number with no padding is also supported in the format of `projects/{project_number}`. /// /// Sets the *parent* path property to the given value. /// /// Even though the property as already been set when instantiating this call, /// we provide this method for API completeness. pub fn parent(mut self, new_value: &str) -> ProjectMessageSendCall<'a> { self._parent = new_value.to_string(); self } /// The delegate implementation is consulted whenever there is an intermediate result, or if something goes wrong /// while executing the actual API request. /// /// It should be used to handle progress information, and to implement a certain level of resilience. /// /// Sets the *delegate* property to the given value. pub fn delegate(mut self, new_value: &'a mut dyn client::Delegate) -> ProjectMessageSendCall<'a> { self._delegate = Some(new_value); self } /// Set any additional parameter of the query string used in the request. /// It should be used to set parameters which are not yet available through their own /// setters. /// /// Please note that this method must not be used to set any of the known parameters /// which have their own setter method. If done anyway, the request will fail. /// /// # Additional Parameters /// /// * *$.xgafv* (query-string) - V1 error format. /// * *access_token* (query-string) - OAuth access token. /// * *alt* (query-string) - Data format for response. /// * *callback* (query-string) - JSONP /// * *fields* (query-string) - Selector specifying which fields to include in a partial response. /// * *key* (query-string) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. /// * *oauth_token* (query-string) - OAuth 2.0 token for the current user. /// * *prettyPrint* (query-boolean) - Returns response with indentations and line breaks. /// * *quotaUser* (query-string) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. /// * *uploadType* (query-string) - Legacy upload protocol for media (e.g. "media", "multipart"). /// * *upload_protocol* (query-string) - Upload protocol for media (e.g. "raw", "multipart"). pub fn param<T>(mut self, name: T, value: T) -> ProjectMessageSendCall<'a> where T: AsRef<str> { self._additional_params.insert(name.as_ref().to_string(), value.as_ref().to_string()); self } /// Identifies the authorization scope for the method you are building. /// /// Use this method to actively specify which scope should be used, instead the default `Scope` variant /// `Scope::CloudPlatform`. /// /// The `scope` will be added to a set of scopes. This is important as one can maintain access /// tokens for more than one scope. /// If `None` is specified, then all scopes will be removed and no default scope will be used either. /// In that case, you have to specify your API-key using the `key` parameter (see the `param()` /// function for details). /// /// Usually there is more than one suitable scope to authorize an operation, some of which may /// encompass more rights than others. For example, for listing resources, a *read-only* scope will be /// sufficient, a read-write scope will do as well. pub fn add_scope<T, S>(mut self, scope: T) -> ProjectMessageSendCall<'a> where T: Into<Option<S>>, S: AsRef<str> { match scope.into() { Some(scope) => self._scopes.insert(scope.as_ref().to_string(), ()), None => None, }; self } }
messages_send
1047. Remove All Adjacent Duplicates In String.py
class Solution: def
(self, S: str) -> str: stack = [] for c in S: if stack and stack[-1] == c: stack.pop() else: stack.append(c) return ''.join(stack)
removeDuplicates
services.js
angular.module('Scout.services', [])
.factory('BlankFactory', [function(){ }]) .service('BlankService', [function(){ }]);
server.rs
use crate::{CommandRequest, CommandResponse, KvStoreError, KvsEngine, Result}; use slog::{info, Logger}; use std::io::{BufRead, BufReader, BufWriter}; use std::net::TcpListener; pub struct KvsServer { listener: TcpListener, kvs_engine: Box<dyn KvsEngine>, } impl KvsServer { pub fn
(listener: TcpListener, kvs_engine: Box<dyn KvsEngine>) -> Self { Self { listener, kvs_engine, } } pub fn serve(&mut self, log: &Logger) -> Result<()> { for connection in self.listener.incoming() { let mut connection = connection?; info!(log, "new connection"; "peer" => connection.peer_addr()?); let mut reader = BufReader::new(&mut connection); let mut line = String::new(); reader.read_line(&mut line)?; drop(reader); let response = match serde_json::from_str(line.as_str())? { CommandRequest::Get { key } => { info!(log, "client"; "command" => "get" ,"key" => &key); match self.kvs_engine.get(key) { Ok(value) => CommandResponse::Value { value }, Err(e) => CommandResponse::Error { reason: format!("{:?}", e), }, } } CommandRequest::Set { key, value } => { info!(log, "client"; "command" => "set", "key" => &key, "value" => &value); match self.kvs_engine.set(key, value) { Ok(_) => CommandResponse::Success {}, Err(e) => CommandResponse::Error { reason: format!("{:?}", e), }, } } CommandRequest::Remove { key } => { info!(log, "client"; "command" => "rm", "key" => &key); match self.kvs_engine.remove(key) { Ok(_) => CommandResponse::Success {}, Err(e) => { if let KvStoreError::KeyNotFound { .. } = e { CommandResponse::KeyNotFound {} } else { CommandResponse::Error { reason: format!("{:?}", e), } } } } } }; let mut writer = BufWriter::new(connection); serde_json::to_writer(&mut writer, &response)?; } Ok(()) } }
new
ResearchStudy_Arm.rs
#![allow(unused_imports, non_camel_case_types)] use crate::models::r4b::CodeableConcept::CodeableConcept; use crate::models::r4b::Element::Element; use crate::models::r4b::Extension::Extension; use serde_json::json; use serde_json::value::Value; use std::borrow::Cow; /// A process where a researcher or organization plans and then executes a series /// of steps intended to increase the field of healthcare-related knowledge. This /// includes studies of safety, efficacy, comparative effectiveness and other /// information about medications, devices, therapies and other interventional and /// investigative techniques. A ResearchStudy involves the gathering of information /// about human or animal subjects. #[derive(Debug)] pub struct ResearchStudy_Arm<'a> { pub(crate) value: Cow<'a, Value>, } impl ResearchStudy_Arm<'_> { pub fn new(value: &Value) -> ResearchStudy_Arm { ResearchStudy_Arm { value: Cow::Borrowed(value), } } pub fn to_json(&self) -> Value { (*self.value).clone() } /// Extensions for description
value: Cow::Borrowed(val), }); } return None; } /// Extensions for name pub fn _name(&self) -> Option<Element> { if let Some(val) = self.value.get("_name") { return Some(Element { value: Cow::Borrowed(val), }); } return None; } /// A succinct description of the path through the study that would be followed by a /// subject adhering to this arm. pub fn description(&self) -> Option<&str> { if let Some(Value::String(string)) = self.value.get("description") { return Some(string); } return None; } /// May be used to represent additional information that is not part of the basic /// definition of the element. To make the use of extensions safe and manageable, /// there is a strict set of governance applied to the definition and use of /// extensions. Though any implementer can define an extension, there is a set of /// requirements that SHALL be met as part of the definition of the extension. pub fn extension(&self) -> Option<Vec<Extension>> { if let Some(Value::Array(val)) = self.value.get("extension") { return Some( val.into_iter() .map(|e| Extension { value: Cow::Borrowed(e), }) .collect::<Vec<_>>(), ); } return None; } /// Unique id for the element within a resource (for internal references). This may be /// any string value that does not contain spaces. pub fn id(&self) -> Option<&str> { if let Some(Value::String(string)) = self.value.get("id") { return Some(string); } return None; } /// May be used to represent additional information that is not part of the basic /// definition of the element and that modifies the understanding of the element /// in which it is contained and/or the understanding of the containing element's /// descendants. Usually modifier elements provide negation or qualification. To make /// the use of extensions safe and manageable, there is a strict set of governance /// applied to the definition and use of extensions. Though any implementer can define /// an extension, there is a set of requirements that SHALL be met as part of the /// definition of the extension. Applications processing a resource are required to /// check for modifier extensions. Modifier extensions SHALL NOT change the meaning /// of any elements on Resource or DomainResource (including cannot change the meaning /// of modifierExtension itself). pub fn modifier_extension(&self) -> Option<Vec<Extension>> { if let Some(Value::Array(val)) = self.value.get("modifierExtension") { return Some( val.into_iter() .map(|e| Extension { value: Cow::Borrowed(e), }) .collect::<Vec<_>>(), ); } return None; } /// Unique, human-readable label for this arm of the study. pub fn name(&self) -> Option<&str> { if let Some(Value::String(string)) = self.value.get("name") { return Some(string); } return None; } /// Categorization of study arm, e.g. experimental, active comparator, placebo /// comparater. pub fn fhir_type(&self) -> Option<CodeableConcept> { if let Some(val) = self.value.get("type") { return Some(CodeableConcept { value: Cow::Borrowed(val), }); } return None; } pub fn validate(&self) -> bool { if let Some(_val) = self._description() { if !_val.validate() { return false; } } if let Some(_val) = self._name() { if !_val.validate() { return false; } } if let Some(_val) = self.description() {} if let Some(_val) = self.extension() { if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) { return false; } } if let Some(_val) = self.id() {} if let Some(_val) = self.modifier_extension() { if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) { return false; } } if let Some(_val) = self.name() {} if let Some(_val) = self.fhir_type() { if !_val.validate() { return false; } } return true; } } #[derive(Debug)] pub struct ResearchStudy_ArmBuilder { pub(crate) value: Value, } impl ResearchStudy_ArmBuilder { pub fn build(&self) -> ResearchStudy_Arm { ResearchStudy_Arm { value: Cow::Owned(self.value.clone()), } } pub fn with(existing: ResearchStudy_Arm) -> ResearchStudy_ArmBuilder { ResearchStudy_ArmBuilder { value: (*existing.value).clone(), } } pub fn new() -> ResearchStudy_ArmBuilder { let mut __value: Value = json!({}); return ResearchStudy_ArmBuilder { value: __value }; } pub fn _description<'a>(&'a mut self, val: Element) -> &'a mut ResearchStudy_ArmBuilder { self.value["_description"] = json!(val.value); return self; } pub fn _name<'a>(&'a mut self, val: Element) -> &'a mut ResearchStudy_ArmBuilder { self.value["_name"] = json!(val.value); return self; } pub fn description<'a>(&'a mut self, val: &str) -> &'a mut ResearchStudy_ArmBuilder { self.value["description"] = json!(val); return self; } pub fn extension<'a>(&'a mut self, val: Vec<Extension>) -> &'a mut ResearchStudy_ArmBuilder { self.value["extension"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>()); return self; } pub fn id<'a>(&'a mut self, val: &str) -> &'a mut ResearchStudy_ArmBuilder { self.value["id"] = json!(val); return self; } pub fn modifier_extension<'a>( &'a mut self, val: Vec<Extension>, ) -> &'a mut ResearchStudy_ArmBuilder { self.value["modifierExtension"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>()); return self; } pub fn name<'a>(&'a mut self, val: &str) -> &'a mut ResearchStudy_ArmBuilder { self.value["name"] = json!(val); return self; } pub fn fhir_type<'a>(&'a mut self, val: CodeableConcept) -> &'a mut ResearchStudy_ArmBuilder { self.value["type"] = json!(val.value); return self; } }
pub fn _description(&self) -> Option<Element> { if let Some(val) = self.value.get("_description") { return Some(Element {
validate-input.py
import quo session = quo.Prompt() class NumberValidator(quo.types.Validator):
number = int(session.prompt('Give a number: ', validator=NumberValidator())) quo.echo('You said: %i' % number)
def validate(self, document): text = document.text if text and not text.isdigit(): i = 0 # Get index of first non numeric character. # We want to move the cursor here. for i, cursor in enumerate(text): if not cursor.isdigit(): break raise quo.errors.ValidationError(message='This input contains non-numeric characters', line=i)
company.service.ts
import { ConsoleLogger, Injectable, NotFoundException } from '@nestjs/common'; import { InjectModel } from '@nestjs/mongoose'; import { FilterQuery, Model } from 'mongoose'; import * as mongoose from "mongoose"; import { Company, CompanyDocument } from './schema/company.schema' import { MatchListUpdateDto } from './dto/matchList-update.dto'; import { JobSeeker, JobSeekerDocument } from 'src/job-seeker/schema/jobSeeker.schema'; import { UserService } from '../user/user.service'; import { match } from 'assert/strict'; import { JobSeekerService } from 'src/job-seeker/job-seeker.service'; @Injectable() export class
{ constructor(@InjectModel(Company.name) private companyModel: Model<CompanyDocument>, @InjectModel(JobSeeker.name) private jobseekerModel: Model<JobSeekerDocument>, private readonly userService: UserService){} async findAll(): Promise<CompanyDocument[]> { return await this.companyModel.find().exec(); } async find(email: any): Promise<Company | undefined> { const user = await this.userService.find(email); return await this.companyModel.findOne({ user: user }).exec(); } async findById(id: string): Promise<Company | undefined> { return await this.companyModel.findById(id).exec() } async create(email: any, company: any) { const user = await this.userService.find(email); if (!user) { throw new NotFoundException(); } if (await this.companyModel.findOne({ name: company.name })) { throw new NotFoundException(); } else { const companyObject = { user: user, ...company }; await this.companyModel.create(companyObject); return await this.companyModel.findOne({ name: company.name }); } } async update(email: any, updateCompany: any) { const user = await this.userService.find(email); if (!user) { throw new NotFoundException(); } const company = await this.companyModel.findOne({ user }); if (!company) { throw new NotFoundException(); } await company.updateOne(updateCompany); return await this.companyModel.findOne({ user }); } async addFavorite(email:any,jobSeekerId:any): Promise<Company|undefined>{ const company= await this.find(email) const matchId= new mongoose.Types.ObjectId(jobSeekerId) const jobSeeker_= await this.jobseekerModel.findById(jobSeekerId).exec(); if(company && jobSeeker_){ // add jobSeekerId to company's waiting list await this.companyModel.updateOne( {user: company.user}, {$addToSet: {waitingList: matchId}} ); //add companyId to jobSeeker's linkedList await this.jobseekerModel.updateOne( {_id: matchId}, {$addToSet: {linkedList:company["_id"]}} ) // check if a company and job seeker are matched if(jobSeeker_.waitingList.indexOf(company["_id"])>-1){ // add to match list await this.companyModel.updateOne( {user: company.user}, {$addToSet: {matchedList: matchId}} ) await this.jobseekerModel.updateOne( {_id: matchId}, {$addToSet: {matchedList:company["_id"]}} ) } else{ console.log("There is no user"); } } return await company; } //ADD ITEM TO MATCH LIST async addMatchList(matchListFilterQuery: FilterQuery<CompanyDocument>, matchListUpdateDto: MatchListUpdateDto): Promise<void> { const matchId = new mongoose.Types.ObjectId(matchListUpdateDto.matchId); //CHECK IF COMPANY AND JOB SEEKER ACTUALLY EXISTS if (await this.companyModel.exists(matchListFilterQuery) && await this.jobseekerModel.exists({_id: matchId})){ await this.companyModel.updateOne(matchListFilterQuery, { $addToSet:{ matchedList: matchId } }); } else { throw new NotFoundException(); } } //REMOVE ITEM FROM MATCH LIST async removeMatchList(matchListFilterQuery: FilterQuery<CompanyDocument>, matchListUpdateDto: MatchListUpdateDto): Promise<void> { const matchId = new mongoose.Types.ObjectId(matchListUpdateDto.matchId); //CHECK IF COMPANY AND JOB SEEKER ACTUALLY EXISTS if (await this.companyModel.exists(matchListFilterQuery) && await this.jobseekerModel.exists({_id: matchId})){ await this.companyModel.updateOne(matchListFilterQuery, { $pull:{ matchedList: matchId } }); } else { throw new NotFoundException(); } } }
CompanyService
set.test.ts
import {expect} from 'chai'; import {RefSet, Subset} from '../src/gc-container'; describe('Testing Subsets', function () { beforeEach(function () { class
extends RefSet<number, string> { constructor (iterable: ReadonlyArray<string>) { super(iterable, 0); } uid (): number { return ++(this._uid as number); } } const a = ['a', 'b', 'c']; const ref = new Ref(['a', 'b', 'c']); const sub = new Subset(ref); // eslint-disable-next-line no-invalid-this Object.assign(this, {ref, sub, a}); }); it('Adding known elements', function () { // eslint-disable-next-line no-invalid-this const {ref, sub, a} = this; sub.add('a').add('b'); expect(Array.from(ref)).to.eql(a); expect(Array.from(sub)).to.eql(['a', 'b']); }); it('Adding new elements', function () { // eslint-disable-next-line no-invalid-this const {ref, sub, a} = this; sub.add('d').add('e'); expect(Array.from(ref)).to.eql(a.concat(['d', 'e'])); expect(Array.from(sub)).to.eql(['d', 'e']); }); it('Deleting known elements', function () { // eslint-disable-next-line no-invalid-this const {ref, sub} = this; sub.add('a').add('b'); sub.delete('a'); ref.delete('b'); expect(Array.from(ref)).to.eql(['a', 'c']); expect(Array.from(sub)).to.eql([]); }); it('Deleting unknown elements', function () { // eslint-disable-next-line no-invalid-this const {ref, sub, a} = this; sub.add('a').add('b'); sub.delete('d'); ref.delete('e'); expect(Array.from(ref)).to.eql(a); expect(Array.from(sub)).to.eql(['a', 'b']); }); it('Clearing', function () { // eslint-disable-next-line no-invalid-this const {ref, sub, a} = this; sub.add('a').add('b'); sub.clear(); expect(Array.from(ref)).to.eql(a); expect(Array.from(sub)).to.eql([]); sub.add('a').add('b'); expect(Array.from(ref)).to.eql(a); expect(Array.from(sub)).to.eql(['a', 'b']); ref.clear(); expect(Array.from(ref)).to.eql([]); expect(Array.from(sub)).to.eql([]); }); it('Checking existence of elements', function () { // eslint-disable-next-line no-invalid-this const {ref, sub} = this; sub.add('a').add('b'); expect(ref.has('a')).to.be.true; expect(ref.has('b')).to.be.true; expect(ref.has('c')).to.be.true; expect(sub.has('a')).to.be.true; expect(sub.has('b')).to.be.true; expect(sub.has('c')).to.be.false; sub.delete('a'); expect(ref.has('a')).to.be.true; expect(ref.has('b')).to.be.true; expect(ref.has('c')).to.be.true; expect(sub.has('a')).to.be.false; expect(sub.has('b')).to.be.true; expect(sub.has('c')).to.be.false; ref.delete('b'); expect(ref.has('a')).to.be.true; expect(ref.has('b')).to.be.false; expect(ref.has('c')).to.be.true; expect(sub.has('a')).to.be.false; expect(sub.has('b')).to.be.false; expect(sub.has('c')).to.be.false; }); it('Checking size', function () { // eslint-disable-next-line no-invalid-this const {ref, sub} = this; sub.add('a').add('b'); expect(ref.size).to.eql(3); expect(sub.size).to.eql(2); sub.delete('a'); expect(ref.size).to.eql(3); expect(sub.size).to.eql(1); ref.delete('b'); expect(ref.size).to.eql(2); expect(sub.size).to.eql(0); }); it('Looping', function () { // eslint-disable-next-line no-invalid-this const {ref, sub, a} = this; let i: number; sub.add('a').add('b'); i = 0; ref.forEach((value: string) => { expect(value).to.equal(a[i]); i++; }); i = 0; sub.forEach((value: string) => { expect(value).to.equal(['a', 'b'][i]); i++; }); sub.delete('a'); i = 0; ref.forEach((value: string) => { expect(value).to.equal(a[i]); i++; }); i = 0; sub.forEach((value: string) => { expect(value).to.equal(['b'][i]); i++; }); ref.delete('b'); i = 0; ref.forEach((value: string) => { expect(value).to.equal(['a','c'][i]); i++; }); i = 0; sub.forEach((value: string) => { expect(value).to.equal([][i]); i++; }); }); it('Iterators', function () { // eslint-disable-next-line no-invalid-this const {ref, sub} = this; function init (s: Set<string>) { return { i1: s[Symbol.iterator](), i2: s.keys(), i3: s.values(), i4: s.entries(), }; } function run (s: Set<string>) { const {i1, i2, i3, i4} = init(s); let size = s.size; expect(size).to.be.above(0); let stop = false; do { size--; const {value, done} = i1.next(); stop = done; expect(value).to.equal(i2.next().value); expect(value).to.equal(i3.next().value); expect([value, value]).to.eql(i4.next().value); } while (!stop); expect(size).to.equal(-1); } sub.add('a').add('b').add('c'); run(ref); run(sub); sub.delete('a'); run(ref); run(sub); ref.delete('b'); run(ref); run(sub); }); });
Ref
pongcontroller.py
from pyglet.window import key import random from pygletplus.controller import Controller class PongController(Controller): def __init__(self, scene): super().__init__(scene) self.keys = scene.keys self.player = scene.player self.cpu = scene.cpu self.ball = scene.ball self.close = scene.close def update(self, dt): if self.scene.paused: return self.player.update(dt) self.cpu.follow(self.ball.sprite.x, self.ball.sprite.y) self.cpu.update(dt) self.ball.update(dt) self.window_bound() self.bounce_ball() def on_key_press(self, symbol, _): if symbol == key.ESCAPE: self.close() if symbol == key.SPACE: self.scene.paused = not self.scene.paused # player movement (decouple from player class): if symbol == key.UP: self.player.vy += self.player.speed elif symbol == key.DOWN: self.player.vy -= self.player.speed def on_key_release(self, symbol, _): if symbol == key.UP: self.player.vy -= self.player.speed elif symbol == key.DOWN: self.player.vy += self.player.speed @staticmethod def bound_x(e, mini, maxi): mini += e.sprite.width / 2 maxi -= e.sprite.width / 2 if e.sprite.x < mini: e.sprite.x = mini elif e.sprite.x > maxi: e.sprite.x = maxi @staticmethod def bound_y(e, mini, maxi): mini += e.sprite.height / 2 maxi -= e.sprite.height / 2 if e.sprite.y < mini: e.sprite.y = mini elif e.sprite.y > maxi: e.sprite.y = maxi def window_bound(self): self.bound_x(self.player, 0, self.scene.width) self.bound_y(self.player, 0, self.scene.height) self.bound_x(self.cpu, 0, self.scene.width) self.bound_y(self.cpu, 0, self.scene.height) def bounce_ball(self):
x_min = self.scene.ball_img.anchor_x x_max = self.scene.width - self.scene.ball_img.anchor_x y_min = self.scene.ball_img.anchor_y y_max = self.scene.height - self.scene.ball_img.anchor_y # bounce off top and bottom walls of window if self.ball.sprite.y < y_min: self.ball.sprite.y = y_min self.ball.vy *= -1 self.scene.bounce_sound.play() elif self.ball.sprite.y > y_max: self.ball.sprite.y = y_max self.ball.vy *= -1 self.scene.bounce_sound.play() # score a point if touch left or right walls of window if self.ball.sprite.x < x_min: self.ball.sprite.x = self.scene.width / 2 - 200 self.ball.sprite.y = self.scene.height / 2 self.ball.vx = random.randint(300, 350) self.ball.vy = random.randint(300, 350) * (-1 if random.randint(0, 1) == 0 else 1) self.scene.cpu_score += 1 self.scene.cpu_label.text = str(self.scene.cpu_score) self.scene.point_sound.play() elif self.ball.sprite.x > x_max: self.ball.sprite.x = self.scene.width / 2 + 200 self.ball.sprite.y = self.scene.height / 2 self.ball.vx = -random.randint(300, 350) self.ball.vy = -random.randint(300, 350) * (-1 if random.randint(0, 1) == 0 else 1) self.scene.player_score += 1 self.scene.player_label.text = str(self.scene.player_score) self.scene.point_sound.play() if (self.player.sprite.x < self.ball.sprite.x < self.player.sprite.x + self.scene.paddle_img.anchor_x and self.player.sprite.y - self.scene.paddle_img.anchor_y < self.ball.sprite.y < self.player.sprite.y + self.scene.paddle_img.anchor_y): self.ball.sprite.x = self.player.sprite.x + self.scene.paddle_img.anchor_x self.ball.vx *= -1 self.scene.bounce_sound.play() elif (self.cpu.sprite.x > self.ball.sprite.x > self.cpu.sprite.x - self.scene.paddle_img.anchor_x and self.cpu.sprite.y - self.scene.paddle_img.anchor_y < self.ball.sprite.y < self.cpu.sprite.y + self.scene.paddle_img.anchor_y): self.ball.sprite.x = self.cpu.sprite.x - self.scene.ball_img.anchor_x self.ball.vx *= -1 self.scene.bounce_sound.play()
archiver.py
import argparse import dateutil.tz import errno import io import json import logging import os import pstats import random import re import shutil import socket import stat import subprocess import sys import tempfile import time import unittest from binascii import unhexlify, b2a_base64 from configparser import ConfigParser from datetime import datetime from datetime import timezone from datetime import timedelta from hashlib import sha256 from io import BytesIO, StringIO from unittest.mock import patch import pytest import borg from .. import xattr, helpers, platform from ..archive import Archive, ChunkBuffer from ..archiver import Archiver, parse_storage_quota, PURE_PYTHON_MSGPACK_WARNING from ..cache import Cache, LocalCache from ..chunker import has_seek_hole from ..constants import * # NOQA from ..crypto.low_level import bytes_to_long, num_cipher_blocks from ..crypto.key import KeyfileKeyBase, RepoKey, KeyfileKey, Passphrase, TAMRequiredError from ..crypto.keymanager import RepoIdMismatch, NotABorgKeyFile from ..crypto.file_integrity import FileIntegrityError from ..helpers import Location, get_security_dir from ..helpers import Manifest, MandatoryFeatureUnsupported from ..helpers import EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR from ..helpers import bin_to_hex from ..helpers import MAX_S from ..helpers import msgpack from ..helpers import flags_noatime, flags_normal from ..nanorst import RstToTextLazy, rst_to_terminal from ..patterns import IECommand, PatternMatcher, parse_pattern from ..item import Item, ItemDiff from ..locking import LockFailed from ..logger import setup_logging from ..remote import RemoteRepository, PathNotAllowed from ..repository import Repository from . import has_lchflags, llfuse from . import BaseTestCase, changedir, environment_variable, no_selinux from . import are_symlinks_supported, are_hardlinks_supported, are_fifos_supported, is_utime_fully_supported, is_birthtime_fully_supported from .platform import fakeroot_detected from .upgrader import make_attic_repo from . import key src_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) def exec_cmd(*args, archiver=None, fork=False, exe=None, input=b'', binary_output=False, **kw): if fork: try: if exe is None: borg = (sys.executable, '-m', 'borg.archiver') elif isinstance(exe, str): borg = (exe, ) elif not isinstance(exe, tuple): raise ValueError('exe must be None, a tuple or a str') output = subprocess.check_output(borg + args, stderr=subprocess.STDOUT, input=input) ret = 0 except subprocess.CalledProcessError as e: output = e.output ret = e.returncode except SystemExit as e: # possibly raised by argparse output = '' ret = e.code if binary_output: return ret, output else: return ret, os.fsdecode(output) else: stdin, stdout, stderr = sys.stdin, sys.stdout, sys.stderr try: sys.stdin = StringIO(input.decode()) sys.stdin.buffer = BytesIO(input) output = BytesIO() # Always use utf-8 here, to simply .decode() below output_text = sys.stdout = sys.stderr = io.TextIOWrapper(output, encoding='utf-8') if archiver is None: archiver = Archiver() archiver.prerun_checks = lambda *args: None archiver.exit_code = EXIT_SUCCESS helpers.exit_code = EXIT_SUCCESS try: args = archiver.parse_args(list(args)) # argparse parsing may raise SystemExit when the command line is bad or # actions that abort early (eg. --help) where given. Catch this and return # the error code as-if we invoked a Borg binary. except SystemExit as e: output_text.flush() return e.code, output.getvalue() if binary_output else output.getvalue().decode() ret = archiver.run(args) output_text.flush() return ret, output.getvalue() if binary_output else output.getvalue().decode() finally: sys.stdin, sys.stdout, sys.stderr = stdin, stdout, stderr def have_gnutar(): if not shutil.which('tar'): return False popen = subprocess.Popen(['tar', '--version'], stdout=subprocess.PIPE) stdout, stderr = popen.communicate() return b'GNU tar' in stdout # check if the binary "borg.exe" is available (for local testing a symlink to virtualenv/bin/borg should do) try: exec_cmd('help', exe='borg.exe', fork=True) BORG_EXES = ['python', 'binary', ] except FileNotFoundError: BORG_EXES = ['python', ] @pytest.fixture(params=BORG_EXES) def cmd(request): if request.param == 'python': exe = None elif request.param == 'binary': exe = 'borg.exe' else: raise ValueError("param must be 'python' or 'binary'") def exec_fn(*args, **kw): return exec_cmd(*args, exe=exe, fork=True, **kw) return exec_fn def test_return_codes(cmd, tmpdir): repo = tmpdir.mkdir('repo') input = tmpdir.mkdir('input') output = tmpdir.mkdir('output') input.join('test_file').write('content') rc, out = cmd('init', '--encryption=none', '%s' % str(repo)) assert rc == EXIT_SUCCESS rc, out = cmd('create', '%s::archive' % repo, str(input)) assert rc == EXIT_SUCCESS with changedir(str(output)): rc, out = cmd('extract', '%s::archive' % repo) assert rc == EXIT_SUCCESS rc, out = cmd('extract', '%s::archive' % repo, 'does/not/match') assert rc == EXIT_WARNING # pattern did not match rc, out = cmd('create', '%s::archive' % repo, str(input)) assert rc == EXIT_ERROR # duplicate archive name """ test_disk_full is very slow and not recommended to be included in daily testing. for this test, an empty, writable 16MB filesystem mounted on DF_MOUNT is required. for speed and other reasons, it is recommended that the underlying block device is in RAM, not a magnetic or flash disk. assuming /tmp is a tmpfs (in memory filesystem), one can use this: dd if=/dev/zero of=/tmp/borg-disk bs=16M count=1 mkfs.ext4 /tmp/borg-disk mkdir /tmp/borg-mount sudo mount /tmp/borg-disk /tmp/borg-mount if the directory does not exist, the test will be skipped. """ DF_MOUNT = '/tmp/borg-mount' @pytest.mark.skipif(not os.path.exists(DF_MOUNT), reason="needs a 16MB fs mounted on %s" % DF_MOUNT) def test_disk_full(cmd): def make_files(dir, count, size, rnd=True): shutil.rmtree(dir, ignore_errors=True) os.mkdir(dir) if rnd: count = random.randint(1, count) if size > 1: size = random.randint(1, size) for i in range(count): fn = os.path.join(dir, "file%03d" % i) with open(fn, 'wb') as f: data = os.urandom(size) f.write(data) with environment_variable(BORG_CHECK_I_KNOW_WHAT_I_AM_DOING='YES'): mount = DF_MOUNT assert os.path.exists(mount) repo = os.path.join(mount, 'repo') input = os.path.join(mount, 'input') reserve = os.path.join(mount, 'reserve') for j in range(100): shutil.rmtree(repo, ignore_errors=True) shutil.rmtree(input, ignore_errors=True) # keep some space and some inodes in reserve that we can free up later: make_files(reserve, 80, 100000, rnd=False) rc, out = cmd('init', repo) if rc != EXIT_SUCCESS: print('init', rc, out) assert rc == EXIT_SUCCESS try: success, i = True, 0 while success: i += 1 try: make_files(input, 20, 200000) except OSError as err: if err.errno == errno.ENOSPC: # already out of space break raise try: rc, out = cmd('create', '%s::test%03d' % (repo, i), input) success = rc == EXIT_SUCCESS if not success: print('create', rc, out) finally: # make sure repo is not locked shutil.rmtree(os.path.join(repo, 'lock.exclusive'), ignore_errors=True) os.remove(os.path.join(repo, 'lock.roster')) finally: # now some error happened, likely we are out of disk space. # free some space so we can expect borg to be able to work normally: shutil.rmtree(reserve, ignore_errors=True) rc, out = cmd('list', repo) if rc != EXIT_SUCCESS: print('list', rc, out) rc, out = cmd('check', '--repair', repo) if rc != EXIT_SUCCESS: print('check', rc, out) assert rc == EXIT_SUCCESS class ArchiverTestCaseBase(BaseTestCase): EXE = None # python source based FORK_DEFAULT = False prefix = '' def setUp(self): os.environ['BORG_CHECK_I_KNOW_WHAT_I_AM_DOING'] = 'YES' os.environ['BORG_DELETE_I_KNOW_WHAT_I_AM_DOING'] = 'YES' os.environ['BORG_PASSPHRASE'] = 'waytooeasyonlyfortests' self.archiver = not self.FORK_DEFAULT and Archiver() or None self.tmpdir = tempfile.mkdtemp() self.repository_path = os.path.join(self.tmpdir, 'repository') self.repository_location = self.prefix + self.repository_path self.input_path = os.path.join(self.tmpdir, 'input') self.output_path = os.path.join(self.tmpdir, 'output') self.keys_path = os.path.join(self.tmpdir, 'keys') self.cache_path = os.path.join(self.tmpdir, 'cache') self.exclude_file_path = os.path.join(self.tmpdir, 'excludes') self.patterns_file_path = os.path.join(self.tmpdir, 'patterns') os.environ['BORG_KEYS_DIR'] = self.keys_path os.environ['BORG_CACHE_DIR'] = self.cache_path os.mkdir(self.input_path) os.chmod(self.input_path, 0o777) # avoid troubles with fakeroot / FUSE os.mkdir(self.output_path) os.mkdir(self.keys_path) os.mkdir(self.cache_path) with open(self.exclude_file_path, 'wb') as fd: fd.write(b'input/file2\n# A comment line, then a blank line\n\n') with open(self.patterns_file_path, 'wb') as fd: fd.write(b'+input/file_important\n- input/file*\n# A comment line, then a blank line\n\n') self._old_wd = os.getcwd() os.chdir(self.tmpdir) def tearDown(self): os.chdir(self._old_wd) # note: ignore_errors=True as workaround for issue #862 shutil.rmtree(self.tmpdir, ignore_errors=True) setup_logging() def cmd(self, *args, **kw): exit_code = kw.pop('exit_code', 0) fork = kw.pop('fork', None) binary_output = kw.get('binary_output', False) if fork is None: fork = self.FORK_DEFAULT ret, output = exec_cmd(*args, fork=fork, exe=self.EXE, archiver=self.archiver, **kw) if ret != exit_code: print(output) self.assert_equal(ret, exit_code) # if tests are run with the pure-python msgpack, there will be warnings about # this in the output, which would make a lot of tests fail. pp_msg = PURE_PYTHON_MSGPACK_WARNING.encode() if binary_output else PURE_PYTHON_MSGPACK_WARNING empty = b'' if binary_output else '' output = empty.join(line for line in output.splitlines(keepends=True) if pp_msg not in line) return output def create_src_archive(self, name): self.cmd('create', '--compression=lz4', self.repository_location + '::' + name, src_dir) def open_archive(self, name): repository = Repository(self.repository_path, exclusive=True) with repository: manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK) archive = Archive(repository, key, manifest, name) return archive, repository def open_repository(self): return Repository(self.repository_path, exclusive=True) def create_regular_file(self, name, size=0, contents=None): assert not (size != 0 and contents and len(contents) != size), 'size and contents do not match' filename = os.path.join(self.input_path, name) if not os.path.exists(os.path.dirname(filename)): os.makedirs(os.path.dirname(filename)) with open(filename, 'wb') as fd: if contents is None: contents = b'X' * size fd.write(contents) def create_test_files(self): """Create a minimal test case including all supported file types """ # File self.create_regular_file('file1', size=1024 * 80) self.create_regular_file('flagfile', size=1024) # Directory self.create_regular_file('dir2/file2', size=1024 * 80) # File mode os.chmod('input/file1', 0o4755) # Hard link if are_hardlinks_supported(): os.link(os.path.join(self.input_path, 'file1'), os.path.join(self.input_path, 'hardlink')) # Symlink if are_symlinks_supported(): os.symlink('somewhere', os.path.join(self.input_path, 'link1')) self.create_regular_file('fusexattr', size=1) if not xattr.XATTR_FAKEROOT and xattr.is_enabled(self.input_path): fn = os.fsencode(os.path.join(self.input_path, 'fusexattr')) # ironically, due to the way how fakeroot works, comparing FUSE file xattrs to orig file xattrs # will FAIL if fakeroot supports xattrs, thus we only set the xattr if XATTR_FAKEROOT is False. # This is because fakeroot with xattr-support does not propagate xattrs of the underlying file # into "fakeroot space". Because the xattrs exposed by borgfs are these of an underlying file # (from fakeroots point of view) they are invisible to the test process inside the fakeroot. xattr.setxattr(fn, b'user.foo', b'bar') xattr.setxattr(fn, b'user.empty', b'') # XXX this always fails for me # ubuntu 14.04, on a TMP dir filesystem with user_xattr, using fakeroot # same for newer ubuntu and centos. # if this is supported just on specific platform, platform should be checked first, # so that the test setup for all tests using it does not fail here always for others. # xattr.setxattr(os.path.join(self.input_path, 'link1'), b'user.foo_symlink', b'bar_symlink', follow_symlinks=False) # FIFO node if are_fifos_supported(): os.mkfifo(os.path.join(self.input_path, 'fifo1')) if has_lchflags: platform.set_flags(os.path.join(self.input_path, 'flagfile'), stat.UF_NODUMP) try: # Block device os.mknod('input/bdev', 0o600 | stat.S_IFBLK, os.makedev(10, 20)) # Char device os.mknod('input/cdev', 0o600 | stat.S_IFCHR, os.makedev(30, 40)) # File mode os.chmod('input/dir2', 0o555) # if we take away write perms, we need root to remove contents # File owner os.chown('input/file1', 100, 200) # raises OSError invalid argument on cygwin have_root = True # we have (fake)root except PermissionError: have_root = False except OSError as e: # Note: ENOSYS "Function not implemented" happens as non-root on Win 10 Linux Subsystem. if e.errno not in (errno.EINVAL, errno.ENOSYS): raise have_root = False time.sleep(1) # "empty" must have newer timestamp than other files self.create_regular_file('empty', size=0) return have_root class ArchiverTestCase(ArchiverTestCaseBase): requires_hardlinks = pytest.mark.skipif(not are_hardlinks_supported(), reason='hardlinks not supported') def test_basic_functionality(self): have_root = self.create_test_files() # fork required to test show-rc output output = self.cmd('init', '--encryption=repokey', '--show-version', '--show-rc', self.repository_location, fork=True) self.assert_in('borgbackup version', output) self.assert_in('terminating with success status, rc 0', output) self.cmd('create', '--exclude-nodump', self.repository_location + '::test', 'input') output = self.cmd('create', '--exclude-nodump', '--stats', self.repository_location + '::test.2', 'input') self.assert_in('Archive name: test.2', output) self.assert_in('This archive: ', output) with changedir('output'): self.cmd('extract', self.repository_location + '::test') list_output = self.cmd('list', '--short', self.repository_location) self.assert_in('test', list_output) self.assert_in('test.2', list_output) expected = [ 'input', 'input/bdev', 'input/cdev', 'input/dir2', 'input/dir2/file2', 'input/empty', 'input/file1', 'input/flagfile', ] if are_fifos_supported(): expected.append('input/fifo1') if are_symlinks_supported(): expected.append('input/link1') if are_hardlinks_supported(): expected.append('input/hardlink') if not have_root: # we could not create these device files without (fake)root expected.remove('input/bdev') expected.remove('input/cdev') if has_lchflags: # remove the file we did not backup, so input and output become equal expected.remove('input/flagfile') # this file is UF_NODUMP os.remove(os.path.join('input', 'flagfile')) list_output = self.cmd('list', '--short', self.repository_location + '::test') for name in expected: self.assert_in(name, list_output) self.assert_dirs_equal('input', 'output/input') info_output = self.cmd('info', self.repository_location + '::test') item_count = 4 if has_lchflags else 5 # one file is UF_NODUMP self.assert_in('Number of files: %d' % item_count, info_output) shutil.rmtree(self.cache_path) info_output2 = self.cmd('info', self.repository_location + '::test') def filter(output): # filter for interesting "info" output, ignore cache rebuilding related stuff prefixes = ['Name:', 'Fingerprint:', 'Number of files:', 'This archive:', 'All archives:', 'Chunk index:', ] result = [] for line in output.splitlines(): for prefix in prefixes: if line.startswith(prefix): result.append(line) return '\n'.join(result) # the interesting parts of info_output2 and info_output should be same self.assert_equal(filter(info_output), filter(info_output2)) @requires_hardlinks def test_create_duplicate_root(self): # setup for #5603 path_a = os.path.join(self.input_path, 'a') path_b = os.path.join(self.input_path, 'b') os.mkdir(path_a) os.mkdir(path_b) hl_a = os.path.join(path_a, 'hardlink') hl_b = os.path.join(path_b, 'hardlink') self.create_regular_file(hl_a, contents=b'123456') os.link(hl_a, hl_b) self.cmd('init', '--encryption=none', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input', 'input') # give input twice! # test if created archive has 'input' contents twice: archive_list = self.cmd('list', '--json-lines', self.repository_location + '::test') paths = [json.loads(line)['path'] for line in archive_list.split('\n') if line] # we have all fs items exactly once! assert sorted(paths) == ['input', 'input/a', 'input/a/hardlink', 'input/b', 'input/b/hardlink'] def test_init_parent_dirs(self): parent_path = os.path.join(self.tmpdir, 'parent1', 'parent2') repository_path = os.path.join(parent_path, 'repository') repository_location = self.prefix + repository_path with pytest.raises(Repository.ParentPathDoesNotExist): # normal borg init does NOT create missing parent dirs self.cmd('init', '--encryption=none', repository_location) # but if told so, it does: self.cmd('init', '--encryption=none', '--make-parent-dirs', repository_location) assert os.path.exists(parent_path) def test_unix_socket(self): self.cmd('init', '--encryption=repokey', self.repository_location) try: sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock.bind(os.path.join(self.input_path, 'unix-socket')) except PermissionError as err: if err.errno == errno.EPERM: pytest.skip('unix sockets disabled or not supported') elif err.errno == errno.EACCES: pytest.skip('permission denied to create unix sockets') self.cmd('create', self.repository_location + '::test', 'input') sock.close() with changedir('output'): self.cmd('extract', self.repository_location + '::test') assert not os.path.exists('input/unix-socket') @pytest.mark.skipif(not are_symlinks_supported(), reason='symlinks not supported') def test_symlink_extract(self): self.create_test_files() self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') with changedir('output'): self.cmd('extract', self.repository_location + '::test') assert os.readlink('input/link1') == 'somewhere' @pytest.mark.skipif(not is_utime_fully_supported(), reason='cannot properly setup and execute test without utime') def test_atime(self): def has_noatime(some_file): atime_before = os.stat(some_file).st_atime_ns try: with open(os.open(some_file, flags_noatime)) as file: file.read() except PermissionError: return False else: atime_after = os.stat(some_file).st_atime_ns noatime_used = flags_noatime != flags_normal return noatime_used and atime_before == atime_after self.create_test_files() atime, mtime = 123456780, 234567890 have_noatime = has_noatime('input/file1') os.utime('input/file1', (atime, mtime)) self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', '--atime', self.repository_location + '::test', 'input') with changedir('output'): self.cmd('extract', self.repository_location + '::test') sti = os.stat('input/file1') sto = os.stat('output/input/file1') assert sti.st_mtime_ns == sto.st_mtime_ns == mtime * 1e9 if have_noatime: assert sti.st_atime_ns == sto.st_atime_ns == atime * 1e9 else: # it touched the input file's atime while backing it up assert sto.st_atime_ns == atime * 1e9 @pytest.mark.skipif(not is_utime_fully_supported(), reason='cannot properly setup and execute test without utime') @pytest.mark.skipif(not is_birthtime_fully_supported(), reason='cannot properly setup and execute test without birthtime') def test_birthtime(self): self.create_test_files() birthtime, mtime, atime = 946598400, 946684800, 946771200 os.utime('input/file1', (atime, birthtime)) os.utime('input/file1', (atime, mtime)) self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') with changedir('output'): self.cmd('extract', self.repository_location + '::test') sti = os.stat('input/file1') sto = os.stat('output/input/file1') assert int(sti.st_birthtime * 1e9) == int(sto.st_birthtime * 1e9) == birthtime * 1e9 assert sti.st_mtime_ns == sto.st_mtime_ns == mtime * 1e9 @pytest.mark.skipif(not is_utime_fully_supported(), reason='cannot properly setup and execute test without utime') @pytest.mark.skipif(not is_birthtime_fully_supported(), reason='cannot properly setup and execute test without birthtime') def test_nobirthtime(self): self.create_test_files() birthtime, mtime, atime = 946598400, 946684800, 946771200 os.utime('input/file1', (atime, birthtime)) os.utime('input/file1', (atime, mtime)) self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', '--nobirthtime', self.repository_location + '::test', 'input') with changedir('output'): self.cmd('extract', self.repository_location + '::test') sti = os.stat('input/file1') sto = os.stat('output/input/file1') assert int(sti.st_birthtime * 1e9) == birthtime * 1e9 assert int(sto.st_birthtime * 1e9) == mtime * 1e9 assert sti.st_mtime_ns == sto.st_mtime_ns == mtime * 1e9 def _extract_repository_id(self, path): with Repository(self.repository_path) as repository: return repository.id def _set_repository_id(self, path, id): config = ConfigParser(interpolation=None) config.read(os.path.join(path, 'config')) config.set('repository', 'id', bin_to_hex(id)) with open(os.path.join(path, 'config'), 'w') as fd: config.write(fd) with Repository(self.repository_path) as repository: return repository.id def test_sparse_file(self): def is_sparse(fn, total_size, hole_size): st = os.stat(fn) assert st.st_size == total_size sparse = True if sparse and hasattr(st, 'st_blocks') and st.st_blocks * 512 >= st.st_size: sparse = False if sparse and has_seek_hole: with open(fn, 'rb') as fd: # only check if the first hole is as expected, because the 2nd hole check # is problematic on xfs due to its "dynamic speculative EOF preallocation try: if fd.seek(0, os.SEEK_HOLE) != 0: sparse = False if fd.seek(0, os.SEEK_DATA) != hole_size: sparse = False except OSError: # OS/FS does not really support SEEK_HOLE/SEEK_DATA sparse = False return sparse filename = os.path.join(self.input_path, 'sparse') content = b'foobar' hole_size = 5 * (1 << CHUNK_MAX_EXP) # 5 full chunker buffers total_size = hole_size + len(content) + hole_size with open(filename, 'wb') as fd: # create a file that has a hole at the beginning and end (if the # OS and filesystem supports sparse files) fd.seek(hole_size, 1) fd.write(content) fd.seek(hole_size, 1) pos = fd.tell() fd.truncate(pos) # we first check if we could create a sparse input file: sparse_support = is_sparse(filename, total_size, hole_size) if sparse_support: # we could create a sparse input file, so creating a backup of it and # extracting it again (as sparse) should also work: self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') with changedir(self.output_path): self.cmd('extract', '--sparse', self.repository_location + '::test') self.assert_dirs_equal('input', 'output/input') filename = os.path.join(self.output_path, 'input', 'sparse') with open(filename, 'rb') as fd: # check if file contents are as expected self.assert_equal(fd.read(hole_size), b'\0' * hole_size) self.assert_equal(fd.read(len(content)), content) self.assert_equal(fd.read(hole_size), b'\0' * hole_size) self.assert_true(is_sparse(filename, total_size, hole_size)) def test_unusual_filenames(self): filenames = ['normal', 'with some blanks', '(with_parens)', ] for filename in filenames: filename = os.path.join(self.input_path, filename) with open(filename, 'wb'): pass self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') for filename in filenames: with changedir('output'): self.cmd('extract', self.repository_location + '::test', os.path.join('input', filename)) assert os.path.exists(os.path.join('output', 'input', filename)) def test_repository_swap_detection(self): self.create_test_files() os.environ['BORG_PASSPHRASE'] = 'passphrase' self.cmd('init', '--encryption=repokey', self.repository_location) repository_id = self._extract_repository_id(self.repository_path) self.cmd('create', self.repository_location + '::test', 'input') shutil.rmtree(self.repository_path) self.cmd('init', '--encryption=none', self.repository_location) self._set_repository_id(self.repository_path, repository_id) self.assert_equal(repository_id, self._extract_repository_id(self.repository_path)) if self.FORK_DEFAULT: self.cmd('create', self.repository_location + '::test.2', 'input', exit_code=EXIT_ERROR) else: with pytest.raises(Cache.EncryptionMethodMismatch): self.cmd('create', self.repository_location + '::test.2', 'input') def test_repository_swap_detection2(self): self.create_test_files() self.cmd('init', '--encryption=none', self.repository_location + '_unencrypted') os.environ['BORG_PASSPHRASE'] = 'passphrase' self.cmd('init', '--encryption=repokey', self.repository_location + '_encrypted') self.cmd('create', self.repository_location + '_encrypted::test', 'input') shutil.rmtree(self.repository_path + '_encrypted') os.rename(self.repository_path + '_unencrypted', self.repository_path + '_encrypted') if self.FORK_DEFAULT: self.cmd('create', self.repository_location + '_encrypted::test.2', 'input', exit_code=EXIT_ERROR) else: with pytest.raises(Cache.RepositoryAccessAborted): self.cmd('create', self.repository_location + '_encrypted::test.2', 'input') def test_repository_swap_detection_no_cache(self): self.create_test_files() os.environ['BORG_PASSPHRASE'] = 'passphrase' self.cmd('init', '--encryption=repokey', self.repository_location) repository_id = self._extract_repository_id(self.repository_path) self.cmd('create', self.repository_location + '::test', 'input') shutil.rmtree(self.repository_path) self.cmd('init', '--encryption=none', self.repository_location) self._set_repository_id(self.repository_path, repository_id) self.assert_equal(repository_id, self._extract_repository_id(self.repository_path)) self.cmd('delete', '--cache-only', self.repository_location) if self.FORK_DEFAULT: self.cmd('create', self.repository_location + '::test.2', 'input', exit_code=EXIT_ERROR) else: with pytest.raises(Cache.EncryptionMethodMismatch): self.cmd('create', self.repository_location + '::test.2', 'input') def test_repository_swap_detection2_no_cache(self): self.create_test_files() self.cmd('init', '--encryption=none', self.repository_location + '_unencrypted') os.environ['BORG_PASSPHRASE'] = 'passphrase' self.cmd('init', '--encryption=repokey', self.repository_location + '_encrypted') self.cmd('create', self.repository_location + '_encrypted::test', 'input') self.cmd('delete', '--cache-only', self.repository_location + '_unencrypted') self.cmd('delete', '--cache-only', self.repository_location + '_encrypted') shutil.rmtree(self.repository_path + '_encrypted') os.rename(self.repository_path + '_unencrypted', self.repository_path + '_encrypted') if self.FORK_DEFAULT: self.cmd('create', self.repository_location + '_encrypted::test.2', 'input', exit_code=EXIT_ERROR) else: with pytest.raises(Cache.RepositoryAccessAborted): self.cmd('create', self.repository_location + '_encrypted::test.2', 'input') def test_repository_swap_detection_repokey_blank_passphrase(self): # Check that a repokey repo with a blank passphrase is considered like a plaintext repo. self.create_test_files() # User initializes her repository with her passphrase self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') # Attacker replaces it with her own repository, which is encrypted but has no passphrase set shutil.rmtree(self.repository_path) with environment_variable(BORG_PASSPHRASE=''): self.cmd('init', '--encryption=repokey', self.repository_location) # Delete cache & security database, AKA switch to user perspective self.cmd('delete', '--cache-only', self.repository_location) repository_id = bin_to_hex(self._extract_repository_id(self.repository_path)) shutil.rmtree(get_security_dir(repository_id)) with environment_variable(BORG_PASSPHRASE=None): # This is the part were the user would be tricked, e.g. she assumes that BORG_PASSPHRASE # is set, while it isn't. Previously this raised no warning, # since the repository is, technically, encrypted. if self.FORK_DEFAULT: self.cmd('create', self.repository_location + '::test.2', 'input', exit_code=EXIT_ERROR) else: with pytest.raises(Cache.CacheInitAbortedError): self.cmd('create', self.repository_location + '::test.2', 'input') def test_repository_move(self): self.cmd('init', '--encryption=repokey', self.repository_location) repository_id = bin_to_hex(self._extract_repository_id(self.repository_path)) os.rename(self.repository_path, self.repository_path + '_new') with environment_variable(BORG_RELOCATED_REPO_ACCESS_IS_OK='yes'): self.cmd('info', self.repository_location + '_new') security_dir = get_security_dir(repository_id) with open(os.path.join(security_dir, 'location')) as fd: location = fd.read() assert location == Location(self.repository_location + '_new').canonical_path() # Needs no confirmation anymore self.cmd('info', self.repository_location + '_new') shutil.rmtree(self.cache_path) self.cmd('info', self.repository_location + '_new') shutil.rmtree(security_dir) self.cmd('info', self.repository_location + '_new') for file in ('location', 'key-type', 'manifest-timestamp'): assert os.path.exists(os.path.join(security_dir, file)) def test_security_dir_compat(self): self.cmd('init', '--encryption=repokey', self.repository_location) repository_id = bin_to_hex(self._extract_repository_id(self.repository_path)) security_dir = get_security_dir(repository_id) with open(os.path.join(security_dir, 'location'), 'w') as fd: fd.write('something outdated') # This is fine, because the cache still has the correct information. security_dir and cache can disagree # if older versions are used to confirm a renamed repository. self.cmd('info', self.repository_location) def test_unknown_unencrypted(self): self.cmd('init', '--encryption=none', self.repository_location) repository_id = bin_to_hex(self._extract_repository_id(self.repository_path)) security_dir = get_security_dir(repository_id) # Ok: repository is known self.cmd('info', self.repository_location) # Ok: repository is still known (through security_dir) shutil.rmtree(self.cache_path) self.cmd('info', self.repository_location) # Needs confirmation: cache and security dir both gone (eg. another host or rm -rf ~) shutil.rmtree(self.cache_path) shutil.rmtree(security_dir) if self.FORK_DEFAULT: self.cmd('info', self.repository_location, exit_code=EXIT_ERROR) else: with pytest.raises(Cache.CacheInitAbortedError): self.cmd('info', self.repository_location) with environment_variable(BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK='yes'): self.cmd('info', self.repository_location) def test_strip_components(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file('dir/file') self.cmd('create', self.repository_location + '::test', 'input') with changedir('output'): self.cmd('extract', self.repository_location + '::test', '--strip-components', '3') self.assert_true(not os.path.exists('file')) with self.assert_creates_file('file'): self.cmd('extract', self.repository_location + '::test', '--strip-components', '2') with self.assert_creates_file('dir/file'): self.cmd('extract', self.repository_location + '::test', '--strip-components', '1') with self.assert_creates_file('input/dir/file'): self.cmd('extract', self.repository_location + '::test', '--strip-components', '0') def _extract_hardlinks_setup(self): os.mkdir(os.path.join(self.input_path, 'dir1')) os.mkdir(os.path.join(self.input_path, 'dir1/subdir')) self.create_regular_file('source', contents=b'123456') os.link(os.path.join(self.input_path, 'source'), os.path.join(self.input_path, 'abba')) os.link(os.path.join(self.input_path, 'source'), os.path.join(self.input_path, 'dir1/hardlink')) os.link(os.path.join(self.input_path, 'source'), os.path.join(self.input_path, 'dir1/subdir/hardlink')) self.create_regular_file('dir1/source2') os.link(os.path.join(self.input_path, 'dir1/source2'), os.path.join(self.input_path, 'dir1/aaaa')) self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') @requires_hardlinks @unittest.skipUnless(llfuse, 'llfuse not installed') def test_fuse_mount_hardlinks(self): self._extract_hardlinks_setup() mountpoint = os.path.join(self.tmpdir, 'mountpoint') # we need to get rid of permissions checking because fakeroot causes issues with it. # On all platforms, borg defaults to "default_permissions" and we need to get rid of it via "ignore_permissions". # On macOS (darwin), we additionally need "defer_permissions" to switch off the checks in osxfuse. if sys.platform == 'darwin': ignore_perms = ['-o', 'ignore_permissions,defer_permissions'] else: ignore_perms = ['-o', 'ignore_permissions'] with self.fuse_mount(self.repository_location + '::test', mountpoint, '--strip-components=2', *ignore_perms), \ changedir(mountpoint): assert os.stat('hardlink').st_nlink == 2 assert os.stat('subdir/hardlink').st_nlink == 2 assert open('subdir/hardlink', 'rb').read() == b'123456' assert os.stat('aaaa').st_nlink == 2 assert os.stat('source2').st_nlink == 2 with self.fuse_mount(self.repository_location + '::test', mountpoint, 'input/dir1', *ignore_perms), \ changedir(mountpoint): assert os.stat('input/dir1/hardlink').st_nlink == 2 assert os.stat('input/dir1/subdir/hardlink').st_nlink == 2 assert open('input/dir1/subdir/hardlink', 'rb').read() == b'123456' assert os.stat('input/dir1/aaaa').st_nlink == 2 assert os.stat('input/dir1/source2').st_nlink == 2 with self.fuse_mount(self.repository_location + '::test', mountpoint, *ignore_perms), \ changedir(mountpoint): assert os.stat('input/source').st_nlink == 4 assert os.stat('input/abba').st_nlink == 4 assert os.stat('input/dir1/hardlink').st_nlink == 4 assert os.stat('input/dir1/subdir/hardlink').st_nlink == 4 assert open('input/dir1/subdir/hardlink', 'rb').read() == b'123456' @requires_hardlinks def test_extract_hardlinks1(self): self._extract_hardlinks_setup() with changedir('output'): self.cmd('extract', self.repository_location + '::test') assert os.stat('input/source').st_nlink == 4 assert os.stat('input/abba').st_nlink == 4 assert os.stat('input/dir1/hardlink').st_nlink == 4 assert os.stat('input/dir1/subdir/hardlink').st_nlink == 4 assert open('input/dir1/subdir/hardlink', 'rb').read() == b'123456' @requires_hardlinks def test_extract_hardlinks2(self): self._extract_hardlinks_setup() with changedir('output'): self.cmd('extract', self.repository_location + '::test', '--strip-components', '2') assert os.stat('hardlink').st_nlink == 2 assert os.stat('subdir/hardlink').st_nlink == 2 assert open('subdir/hardlink', 'rb').read() == b'123456' assert os.stat('aaaa').st_nlink == 2 assert os.stat('source2').st_nlink == 2 with changedir('output'): self.cmd('extract', self.repository_location + '::test', 'input/dir1') assert os.stat('input/dir1/hardlink').st_nlink == 2 assert os.stat('input/dir1/subdir/hardlink').st_nlink == 2 assert open('input/dir1/subdir/hardlink', 'rb').read() == b'123456' assert os.stat('input/dir1/aaaa').st_nlink == 2 assert os.stat('input/dir1/source2').st_nlink == 2 @requires_hardlinks def test_extract_hardlinks_twice(self): # setup for #5603 path_a = os.path.join(self.input_path, 'a') path_b = os.path.join(self.input_path, 'b') os.mkdir(path_a) os.mkdir(path_b) hl_a = os.path.join(path_a, 'hardlink') hl_b = os.path.join(path_b, 'hardlink') self.create_regular_file(hl_a, contents=b'123456') os.link(hl_a, hl_b) self.cmd('init', '--encryption=none', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input', 'input') # give input twice! # now test extraction with changedir('output'): self.cmd('extract', self.repository_location + '::test') # if issue #5603 happens, extraction gives rc == 1 (triggering AssertionError) and warnings like: # input/a/hardlink: link: [Errno 2] No such file or directory: 'input/a/hardlink' -> 'input/a/hardlink' # input/b/hardlink: link: [Errno 2] No such file or directory: 'input/a/hardlink' -> 'input/b/hardlink' # otherwise, when fixed, the hardlinks should be there and have a link count of 2 assert os.stat('input/a/hardlink').st_nlink == 2 assert os.stat('input/b/hardlink').st_nlink == 2 def test_extract_include_exclude(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file('file1', size=1024 * 80) self.create_regular_file('file2', size=1024 * 80) self.create_regular_file('file3', size=1024 * 80) self.create_regular_file('file4', size=1024 * 80) self.cmd('create', '--exclude=input/file4', self.repository_location + '::test', 'input') with changedir('output'): self.cmd('extract', self.repository_location + '::test', 'input/file1', ) self.assert_equal(sorted(os.listdir('output/input')), ['file1']) with changedir('output'): self.cmd('extract', '--exclude=input/file2', self.repository_location + '::test') self.assert_equal(sorted(os.listdir('output/input')), ['file1', 'file3']) with changedir('output'): self.cmd('extract', '--exclude-from=' + self.exclude_file_path, self.repository_location + '::test') self.assert_equal(sorted(os.listdir('output/input')), ['file1', 'file3']) def test_extract_include_exclude_regex(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file('file1', size=1024 * 80) self.create_regular_file('file2', size=1024 * 80) self.create_regular_file('file3', size=1024 * 80) self.create_regular_file('file4', size=1024 * 80) self.create_regular_file('file333', size=1024 * 80) # Create with regular expression exclusion for file4 self.cmd('create', '--exclude=re:input/file4$', self.repository_location + '::test', 'input') with changedir('output'): self.cmd('extract', self.repository_location + '::test') self.assert_equal(sorted(os.listdir('output/input')), ['file1', 'file2', 'file3', 'file333']) shutil.rmtree('output/input') # Extract with regular expression exclusion with changedir('output'): self.cmd('extract', '--exclude=re:file3+', self.repository_location + '::test') self.assert_equal(sorted(os.listdir('output/input')), ['file1', 'file2']) shutil.rmtree('output/input') # Combine --exclude with fnmatch and regular expression with changedir('output'): self.cmd('extract', '--exclude=input/file2', '--exclude=re:file[01]', self.repository_location + '::test') self.assert_equal(sorted(os.listdir('output/input')), ['file3', 'file333']) shutil.rmtree('output/input') # Combine --exclude-from and regular expression exclusion with changedir('output'): self.cmd('extract', '--exclude-from=' + self.exclude_file_path, '--exclude=re:file1', '--exclude=re:file(\\d)\\1\\1$', self.repository_location + '::test') self.assert_equal(sorted(os.listdir('output/input')), ['file3']) def test_extract_include_exclude_regex_from_file(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file('file1', size=1024 * 80) self.create_regular_file('file2', size=1024 * 80) self.create_regular_file('file3', size=1024 * 80) self.create_regular_file('file4', size=1024 * 80) self.create_regular_file('file333', size=1024 * 80) self.create_regular_file('aa:something', size=1024 * 80) # Create while excluding using mixed pattern styles with open(self.exclude_file_path, 'wb') as fd: fd.write(b're:input/file4$\n') fd.write(b'fm:*aa:*thing\n') self.cmd('create', '--exclude-from=' + self.exclude_file_path, self.repository_location + '::test', 'input') with changedir('output'): self.cmd('extract', self.repository_location + '::test') self.assert_equal(sorted(os.listdir('output/input')), ['file1', 'file2', 'file3', 'file333']) shutil.rmtree('output/input') # Exclude using regular expression with open(self.exclude_file_path, 'wb') as fd: fd.write(b're:file3+\n') with changedir('output'): self.cmd('extract', '--exclude-from=' + self.exclude_file_path, self.repository_location + '::test') self.assert_equal(sorted(os.listdir('output/input')), ['file1', 'file2']) shutil.rmtree('output/input') # Mixed exclude pattern styles with open(self.exclude_file_path, 'wb') as fd: fd.write(b're:file(\\d)\\1\\1$\n') fd.write(b'fm:nothingwillmatchthis\n') fd.write(b'*/file1\n') fd.write(b're:file2$\n') with changedir('output'): self.cmd('extract', '--exclude-from=' + self.exclude_file_path, self.repository_location + '::test') self.assert_equal(sorted(os.listdir('output/input')), ['file3']) def test_extract_with_pattern(self): self.cmd("init", '--encryption=repokey', self.repository_location) self.create_regular_file("file1", size=1024 * 80) self.create_regular_file("file2", size=1024 * 80) self.create_regular_file("file3", size=1024 * 80) self.create_regular_file("file4", size=1024 * 80) self.create_regular_file("file333", size=1024 * 80) self.cmd("create", self.repository_location + "::test", "input") # Extract everything with regular expression with changedir("output"): self.cmd("extract", self.repository_location + "::test", "re:.*") self.assert_equal(sorted(os.listdir("output/input")), ["file1", "file2", "file3", "file333", "file4"]) shutil.rmtree("output/input") # Extract with pattern while also excluding files with changedir("output"): self.cmd("extract", "--exclude=re:file[34]$", self.repository_location + "::test", r"re:file\d$") self.assert_equal(sorted(os.listdir("output/input")), ["file1", "file2"]) shutil.rmtree("output/input") # Combine --exclude with pattern for extraction with changedir("output"): self.cmd("extract", "--exclude=input/file1", self.repository_location + "::test", "re:file[12]$") self.assert_equal(sorted(os.listdir("output/input")), ["file2"]) shutil.rmtree("output/input") # Multiple pattern with changedir("output"): self.cmd("extract", self.repository_location + "::test", "fm:input/file1", "fm:*file33*", "input/file2") self.assert_equal(sorted(os.listdir("output/input")), ["file1", "file2", "file333"]) def test_extract_list_output(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file('file', size=1024 * 80) self.cmd('create', self.repository_location + '::test', 'input') with changedir('output'): output = self.cmd('extract', self.repository_location + '::test') self.assert_not_in("input/file", output) shutil.rmtree('output/input') with changedir('output'): output = self.cmd('extract', '--info', self.repository_location + '::test') self.assert_not_in("input/file", output) shutil.rmtree('output/input') with changedir('output'): output = self.cmd('extract', '--list', self.repository_location + '::test') self.assert_in("input/file", output) shutil.rmtree('output/input') with changedir('output'): output = self.cmd('extract', '--list', '--info', self.repository_location + '::test') self.assert_in("input/file", output) def test_extract_progress(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file('file', size=1024 * 80) self.cmd('create', self.repository_location + '::test', 'input') with changedir('output'): output = self.cmd('extract', self.repository_location + '::test', '--progress') assert 'Extracting:' in output def _create_test_caches(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file('file1', size=1024 * 80) self.create_regular_file('cache1/%s' % CACHE_TAG_NAME, contents=CACHE_TAG_CONTENTS + b' extra stuff') self.create_regular_file('cache2/%s' % CACHE_TAG_NAME, contents=b'invalid signature') os.mkdir('input/cache3') if are_hardlinks_supported(): os.link('input/cache1/%s' % CACHE_TAG_NAME, 'input/cache3/%s' % CACHE_TAG_NAME) else: self.create_regular_file('cache3/%s' % CACHE_TAG_NAME, contents=CACHE_TAG_CONTENTS + b' extra stuff') def test_create_stdin(self): self.cmd('init', '--encryption=repokey', self.repository_location) input_data = b'\x00foo\n\nbar\n \n' self.cmd('create', self.repository_location + '::test', '-', input=input_data) item = json.loads(self.cmd('list', '--json-lines', self.repository_location + '::test')) assert item['uid'] == 0 assert item['gid'] == 0 assert item['size'] == len(input_data) assert item['path'] == 'stdin' extracted_data = self.cmd('extract', '--stdout', self.repository_location + '::test', binary_output=True) assert extracted_data == input_data def test_create_content_from_command(self): self.cmd('init', '--encryption=repokey', self.repository_location) input_data = 'some test content' name = 'a/b/c' self.cmd('create', '--stdin-name', name, '--content-from-command', self.repository_location + '::test', '--', 'echo', input_data) item = json.loads(self.cmd('list', '--json-lines', self.repository_location + '::test')) assert item['uid'] == 0 assert item['gid'] == 0 assert item['size'] == len(input_data) + 1 # `echo` adds newline assert item['path'] == name extracted_data = self.cmd('extract', '--stdout', self.repository_location + '::test') assert extracted_data == input_data + '\n' def test_create_content_from_command_with_failed_command(self): self.cmd('init', '--encryption=repokey', self.repository_location) output = self.cmd('create', '--content-from-command', self.repository_location + '::test', '--', 'sh', '-c', 'exit 73;', exit_code=2) assert output.endswith("Command 'sh' exited with status 73\n") archive_list = json.loads(self.cmd('list', '--json', self.repository_location)) assert archive_list['archives'] == [] def test_create_content_from_command_missing_command(self): self.cmd('init', '--encryption=repokey', self.repository_location) output = self.cmd('create', '--content-from-command', self.repository_location + '::test', exit_code=2) assert output.endswith('No command given.\n') def test_create_paths_from_stdin(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file("file1", size=1024 * 80) self.create_regular_file("dir1/file2", size=1024 * 80) self.create_regular_file("dir1/file3", size=1024 * 80) self.create_regular_file("file4", size=1024 * 80) input_data = b'input/file1\0input/dir1\0input/file4' self.cmd('create', '--paths-from-stdin', '--paths-delimiter', '\\0', self.repository_location + '::test', input=input_data) archive_list = self.cmd('list', '--json-lines', self.repository_location + '::test') paths = [json.loads(line)['path'] for line in archive_list.split('\n') if line] assert paths == ['input/file1', 'input/dir1', 'input/file4'] def test_create_paths_from_command(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file("file1", size=1024 * 80) self.create_regular_file("file2", size=1024 * 80) self.create_regular_file("file3", size=1024 * 80) self.create_regular_file("file4", size=1024 * 80) input_data = 'input/file1\ninput/file2\ninput/file3' self.cmd('create', '--paths-from-command', self.repository_location + '::test', '--', 'echo', input_data) archive_list = self.cmd('list', '--json-lines', self.repository_location + '::test') paths = [json.loads(line)['path'] for line in archive_list.split('\n') if line] assert paths == ['input/file1', 'input/file2', 'input/file3'] def test_create_paths_from_command_with_failed_command(self): self.cmd('init', '--encryption=repokey', self.repository_location) output = self.cmd('create', '--paths-from-command', self.repository_location + '::test', '--', 'sh', '-c', 'exit 73;', exit_code=2) assert output.endswith("Command 'sh' exited with status 73\n") archive_list = json.loads(self.cmd('list', '--json', self.repository_location)) assert archive_list['archives'] == [] def test_create_paths_from_command_missing_command(self): self.cmd('init', '--encryption=repokey', self.repository_location) output = self.cmd('create', '--paths-from-command', self.repository_location + '::test', exit_code=2) assert output.endswith('No command given.\n') def test_create_without_root(self): """test create without a root""" self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', exit_code=2) def test_create_pattern_root(self): """test create with only a root pattern""" self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file('file1', size=1024 * 80) self.create_regular_file('file2', size=1024 * 80) output = self.cmd('create', '-v', '--list', '--pattern=R input', self.repository_location + '::test') self.assert_in("A input/file1", output) self.assert_in("A input/file2", output) def test_create_pattern(self): """test file patterns during create""" self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file('file1', size=1024 * 80) self.create_regular_file('file2', size=1024 * 80) self.create_regular_file('file_important', size=1024 * 80) output = self.cmd('create', '-v', '--list', '--pattern=+input/file_important', '--pattern=-input/file*', self.repository_location + '::test', 'input') self.assert_in("A input/file_important", output) self.assert_in('x input/file1', output) self.assert_in('x input/file2', output) def test_create_pattern_file(self): """test file patterns during create""" self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file('file1', size=1024 * 80) self.create_regular_file('file2', size=1024 * 80) self.create_regular_file('otherfile', size=1024 * 80) self.create_regular_file('file_important', size=1024 * 80) output = self.cmd('create', '-v', '--list', '--pattern=-input/otherfile', '--patterns-from=' + self.patterns_file_path, self.repository_location + '::test', 'input') self.assert_in("A input/file_important", output) self.assert_in('x input/file1', output) self.assert_in('x input/file2', output) self.assert_in('x input/otherfile', output) def test_create_pattern_exclude_folder_but_recurse(self): """test when patterns exclude a parent folder, but include a child""" self.patterns_file_path2 = os.path.join(self.tmpdir, 'patterns2') with open(self.patterns_file_path2, 'wb') as fd: fd.write(b'+ input/x/b\n- input/x*\n') self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file('x/a/foo_a', size=1024 * 80) self.create_regular_file('x/b/foo_b', size=1024 * 80) self.create_regular_file('y/foo_y', size=1024 * 80) output = self.cmd('create', '-v', '--list', '--patterns-from=' + self.patterns_file_path2, self.repository_location + '::test', 'input') self.assert_in('x input/x/a/foo_a', output) self.assert_in("A input/x/b/foo_b", output) self.assert_in('A input/y/foo_y', output) def test_create_pattern_exclude_folder_no_recurse(self): """test when patterns exclude a parent folder and, but include a child""" self.patterns_file_path2 = os.path.join(self.tmpdir, 'patterns2') with open(self.patterns_file_path2, 'wb') as fd: fd.write(b'+ input/x/b\n! input/x*\n') self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file('x/a/foo_a', size=1024 * 80) self.create_regular_file('x/b/foo_b', size=1024 * 80) self.create_regular_file('y/foo_y', size=1024 * 80) output = self.cmd('create', '-v', '--list', '--patterns-from=' + self.patterns_file_path2, self.repository_location + '::test', 'input') self.assert_not_in('input/x/a/foo_a', output) self.assert_not_in('input/x/a', output) self.assert_in('A input/y/foo_y', output) def test_create_pattern_intermediate_folders_first(self): """test that intermediate folders appear first when patterns exclude a parent folder but include a child""" self.patterns_file_path2 = os.path.join(self.tmpdir, 'patterns2') with open(self.patterns_file_path2, 'wb') as fd: fd.write(b'+ input/x/a\n+ input/x/b\n- input/x*\n') self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file('x/a/foo_a', size=1024 * 80) self.create_regular_file('x/b/foo_b', size=1024 * 80) with changedir('input'): self.cmd('create', '--patterns-from=' + self.patterns_file_path2, self.repository_location + '::test', '.') # list the archive and verify that the "intermediate" folders appear before # their contents out = self.cmd('list', '--format', '{type} {path}{NL}', self.repository_location + '::test') out_list = out.splitlines() self.assert_in('d x/a', out_list) self.assert_in('d x/b', out_list) assert out_list.index('d x/a') < out_list.index('- x/a/foo_a') assert out_list.index('d x/b') < out_list.index('- x/b/foo_b') def test_create_no_cache_sync(self): self.create_test_files() self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('delete', '--cache-only', self.repository_location) create_json = json.loads(self.cmd('create', '--no-cache-sync', self.repository_location + '::test', 'input', '--json', '--error')) # ignore experimental warning info_json = json.loads(self.cmd('info', self.repository_location + '::test', '--json')) create_stats = create_json['cache']['stats'] info_stats = info_json['cache']['stats'] assert create_stats == info_stats self.cmd('delete', '--cache-only', self.repository_location) self.cmd('create', '--no-cache-sync', self.repository_location + '::test2', 'input') self.cmd('info', self.repository_location) self.cmd('check', self.repository_location) def test_extract_pattern_opt(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file('file1', size=1024 * 80) self.create_regular_file('file2', size=1024 * 80) self.create_regular_file('file_important', size=1024 * 80) self.cmd('create', self.repository_location + '::test', 'input') with changedir('output'): self.cmd('extract', '--pattern=+input/file_important', '--pattern=-input/file*', self.repository_location + '::test') self.assert_equal(sorted(os.listdir('output/input')), ['file_important']) def _assert_test_caches(self): with changedir('output'): self.cmd('extract', self.repository_location + '::test') self.assert_equal(sorted(os.listdir('output/input')), ['cache2', 'file1']) self.assert_equal(sorted(os.listdir('output/input/cache2')), [CACHE_TAG_NAME]) def test_exclude_caches(self): self._create_test_caches() self.cmd('create', '--exclude-caches', self.repository_location + '::test', 'input') self._assert_test_caches() def test_recreate_exclude_caches(self): self._create_test_caches() self.cmd('create', self.repository_location + '::test', 'input') self.cmd('recreate', '--exclude-caches', self.repository_location + '::test') self._assert_test_caches() def _create_test_tagged(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file('file1', size=1024 * 80) self.create_regular_file('tagged1/.NOBACKUP') self.create_regular_file('tagged2/00-NOBACKUP') self.create_regular_file('tagged3/.NOBACKUP/file2', size=1024) def _assert_test_tagged(self): with changedir('output'): self.cmd('extract', self.repository_location + '::test') self.assert_equal(sorted(os.listdir('output/input')), ['file1']) def test_exclude_tagged(self): self._create_test_tagged() self.cmd('create', '--exclude-if-present', '.NOBACKUP', '--exclude-if-present', '00-NOBACKUP', self.repository_location + '::test', 'input') self._assert_test_tagged() def test_recreate_exclude_tagged(self): self._create_test_tagged() self.cmd('create', self.repository_location + '::test', 'input') self.cmd('recreate', '--exclude-if-present', '.NOBACKUP', '--exclude-if-present', '00-NOBACKUP', self.repository_location + '::test') self._assert_test_tagged() def _create_test_keep_tagged(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file('file0', size=1024) self.create_regular_file('tagged1/.NOBACKUP1') self.create_regular_file('tagged1/file1', size=1024) self.create_regular_file('tagged2/.NOBACKUP2/subfile1', size=1024) self.create_regular_file('tagged2/file2', size=1024) self.create_regular_file('tagged3/%s' % CACHE_TAG_NAME, contents=CACHE_TAG_CONTENTS + b' extra stuff') self.create_regular_file('tagged3/file3', size=1024) self.create_regular_file('taggedall/.NOBACKUP1') self.create_regular_file('taggedall/.NOBACKUP2/subfile1', size=1024) self.create_regular_file('taggedall/%s' % CACHE_TAG_NAME, contents=CACHE_TAG_CONTENTS + b' extra stuff') self.create_regular_file('taggedall/file4', size=1024) def _assert_test_keep_tagged(self): with changedir('output'): self.cmd('extract', self.repository_location + '::test') self.assert_equal(sorted(os.listdir('output/input')), ['file0', 'tagged1', 'tagged2', 'tagged3', 'taggedall']) self.assert_equal(os.listdir('output/input/tagged1'), ['.NOBACKUP1']) self.assert_equal(os.listdir('output/input/tagged2'), ['.NOBACKUP2']) self.assert_equal(os.listdir('output/input/tagged3'), [CACHE_TAG_NAME]) self.assert_equal(sorted(os.listdir('output/input/taggedall')), ['.NOBACKUP1', '.NOBACKUP2', CACHE_TAG_NAME, ]) def test_exclude_keep_tagged(self): self._create_test_keep_tagged() self.cmd('create', '--exclude-if-present', '.NOBACKUP1', '--exclude-if-present', '.NOBACKUP2', '--exclude-caches', '--keep-exclude-tags', self.repository_location + '::test', 'input') self._assert_test_keep_tagged() def test_recreate_exclude_keep_tagged(self): self._create_test_keep_tagged() self.cmd('create', self.repository_location + '::test', 'input') self.cmd('recreate', '--exclude-if-present', '.NOBACKUP1', '--exclude-if-present', '.NOBACKUP2', '--exclude-caches', '--keep-exclude-tags', self.repository_location + '::test') self._assert_test_keep_tagged() @pytest.mark.skipif(not are_hardlinks_supported(), reason='hardlinks not supported') def test_recreate_hardlinked_tags(self): # test for issue #4911 self.cmd('init', '--encryption=none', self.repository_location) self.create_regular_file('file1', contents=CACHE_TAG_CONTENTS) # "wrong" filename, but correct tag contents os.mkdir(os.path.join(self.input_path, 'subdir')) # to make sure the tag is encountered *after* file1 os.link(os.path.join(self.input_path, 'file1'), os.path.join(self.input_path, 'subdir', CACHE_TAG_NAME)) # correct tag name, hardlink to file1 self.cmd('create', self.repository_location + '::test', 'input') # in the "test" archive, we now have, in this order: # - a regular file item for "file1" # - a hardlink item for "CACHEDIR.TAG" referring back to file1 for its contents self.cmd('recreate', '--exclude-caches', '--keep-exclude-tags', self.repository_location + '::test') # if issue #4911 is present, the recreate will crash with a KeyError for "input/file1" @pytest.mark.skipif(not xattr.XATTR_FAKEROOT, reason='Linux capabilities test, requires fakeroot >= 1.20.2') def test_extract_capabilities(self): fchown = os.fchown # We need to manually patch chown to get the behaviour Linux has, since fakeroot does not # accurately model the interaction of chown(2) and Linux capabilities, i.e. it does not remove them. def patched_fchown(fd, uid, gid): xattr.setxattr(fd, b'security.capability', b'', follow_symlinks=False) fchown(fd, uid, gid) # The capability descriptor used here is valid and taken from a /usr/bin/ping capabilities = b'\x01\x00\x00\x02\x00 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' self.create_regular_file('file') xattr.setxattr(b'input/file', b'security.capability', capabilities) self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') with changedir('output'): with patch.object(os, 'fchown', patched_fchown): self.cmd('extract', self.repository_location + '::test') assert xattr.getxattr(b'input/file', b'security.capability') == capabilities @pytest.mark.skipif(not xattr.XATTR_FAKEROOT, reason='xattr not supported on this system or on this version of' 'fakeroot') def test_extract_xattrs_errors(self): def patched_setxattr_E2BIG(*args, **kwargs): raise OSError(errno.E2BIG, 'E2BIG') def patched_setxattr_ENOTSUP(*args, **kwargs): raise OSError(errno.ENOTSUP, 'ENOTSUP') def patched_setxattr_EACCES(*args, **kwargs): raise OSError(errno.EACCES, 'EACCES') self.create_regular_file('file') xattr.setxattr(b'input/file', b'user.attribute', b'value') self.cmd('init', self.repository_location, '-e' 'none') self.cmd('create', self.repository_location + '::test', 'input') with changedir('output'): input_abspath = os.path.abspath('input/file') with patch.object(xattr, 'setxattr', patched_setxattr_E2BIG): out = self.cmd('extract', self.repository_location + '::test', exit_code=EXIT_WARNING) assert ': when setting extended attribute user.attribute: too big for this filesystem\n' in out os.remove(input_abspath) with patch.object(xattr, 'setxattr', patched_setxattr_ENOTSUP): out = self.cmd('extract', self.repository_location + '::test', exit_code=EXIT_WARNING) assert ': when setting extended attribute user.attribute: xattrs not supported on this filesystem\n' in out os.remove(input_abspath) with patch.object(xattr, 'setxattr', patched_setxattr_EACCES): out = self.cmd('extract', self.repository_location + '::test', exit_code=EXIT_WARNING) assert ': when setting extended attribute user.attribute: Permission denied\n' in out assert os.path.isfile(input_abspath) def test_path_normalization(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file('dir1/dir2/file', size=1024 * 80) with changedir('input/dir1/dir2'): self.cmd('create', self.repository_location + '::test', '../../../input/dir1/../dir1/dir2/..') output = self.cmd('list', self.repository_location + '::test') self.assert_not_in('..', output) self.assert_in(' input/dir1/dir2/file', output) def test_exclude_normalization(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file('file1', size=1024 * 80) self.create_regular_file('file2', size=1024 * 80) with changedir('input'): self.cmd('create', '--exclude=file1', self.repository_location + '::test1', '.') with changedir('output'): self.cmd('extract', self.repository_location + '::test1') self.assert_equal(sorted(os.listdir('output')), ['file2']) with changedir('input'): self.cmd('create', '--exclude=./file1', self.repository_location + '::test2', '.') with changedir('output'): self.cmd('extract', self.repository_location + '::test2') self.assert_equal(sorted(os.listdir('output')), ['file2']) self.cmd('create', '--exclude=input/./file1', self.repository_location + '::test3', 'input') with changedir('output'): self.cmd('extract', self.repository_location + '::test3') self.assert_equal(sorted(os.listdir('output/input')), ['file2']) def test_repeated_files(self): self.create_regular_file('file1', size=1024 * 80) self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input', 'input') def test_overwrite(self): self.create_regular_file('file1', size=1024 * 80) self.create_regular_file('dir2/file2', size=1024 * 80) self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') # Overwriting regular files and directories should be supported os.mkdir('output/input') os.mkdir('output/input/file1') os.mkdir('output/input/dir2') with changedir('output'): self.cmd('extract', self.repository_location + '::test') self.assert_dirs_equal('input', 'output/input') # But non-empty dirs should fail os.unlink('output/input/file1') os.mkdir('output/input/file1') os.mkdir('output/input/file1/dir') with changedir('output'): self.cmd('extract', self.repository_location + '::test', exit_code=1) def test_rename(self): self.create_regular_file('file1', size=1024 * 80) self.create_regular_file('dir2/file2', size=1024 * 80) self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') self.cmd('create', self.repository_location + '::test.2', 'input') self.cmd('extract', '--dry-run', self.repository_location + '::test') self.cmd('extract', '--dry-run', self.repository_location + '::test.2') self.cmd('rename', self.repository_location + '::test', 'test.3') self.cmd('extract', '--dry-run', self.repository_location + '::test.2') self.cmd('rename', self.repository_location + '::test.2', 'test.4') self.cmd('extract', '--dry-run', self.repository_location + '::test.3') self.cmd('extract', '--dry-run', self.repository_location + '::test.4') # Make sure both archives have been renamed with Repository(self.repository_path) as repository: manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK) self.assert_equal(len(manifest.archives), 2) self.assert_in('test.3', manifest.archives) self.assert_in('test.4', manifest.archives) def test_info(self): self.create_regular_file('file1', size=1024 * 80) self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') info_repo = self.cmd('info', self.repository_location) assert 'All archives:' in info_repo info_archive = self.cmd('info', self.repository_location + '::test') assert 'Archive name: test\n' in info_archive info_archive = self.cmd('info', '--first', '1', self.repository_location) assert 'Archive name: test\n' in info_archive def test_info_json(self): self.create_regular_file('file1', size=1024 * 80) self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') info_repo = json.loads(self.cmd('info', '--json', self.repository_location)) repository = info_repo['repository'] assert len(repository['id']) == 64 assert 'last_modified' in repository assert datetime.strptime(repository['last_modified'], ISO_FORMAT) # must not raise assert info_repo['encryption']['mode'] == 'repokey' assert 'keyfile' not in info_repo['encryption'] cache = info_repo['cache'] stats = cache['stats'] assert all(isinstance(o, int) for o in stats.values()) assert all(key in stats for key in ('total_chunks', 'total_csize', 'total_size', 'total_unique_chunks', 'unique_csize', 'unique_size')) info_archive = json.loads(self.cmd('info', '--json', self.repository_location + '::test')) assert info_repo['repository'] == info_archive['repository'] assert info_repo['cache'] == info_archive['cache'] archives = info_archive['archives'] assert len(archives) == 1 archive = archives[0] assert archive['name'] == 'test' assert isinstance(archive['command_line'], list) assert isinstance(archive['duration'], float) assert len(archive['id']) == 64 assert 'stats' in archive assert datetime.strptime(archive['start'], ISO_FORMAT) assert datetime.strptime(archive['end'], ISO_FORMAT) def test_comment(self): self.create_regular_file('file1', size=1024 * 80) self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test1', 'input') self.cmd('create', '--comment', 'this is the comment', self.repository_location + '::test2', 'input') self.cmd('create', '--comment', '"deleted" comment', self.repository_location + '::test3', 'input') self.cmd('create', '--comment', 'preserved comment', self.repository_location + '::test4', 'input') assert 'Comment: \n' in self.cmd('info', self.repository_location + '::test1') assert 'Comment: this is the comment' in self.cmd('info', self.repository_location + '::test2') self.cmd('recreate', self.repository_location + '::test1', '--comment', 'added comment') self.cmd('recreate', self.repository_location + '::test2', '--comment', 'modified comment') self.cmd('recreate', self.repository_location + '::test3', '--comment', '') self.cmd('recreate', self.repository_location + '::test4', '12345') assert 'Comment: added comment' in self.cmd('info', self.repository_location + '::test1') assert 'Comment: modified comment' in self.cmd('info', self.repository_location + '::test2') assert 'Comment: \n' in self.cmd('info', self.repository_location + '::test3') assert 'Comment: preserved comment' in self.cmd('info', self.repository_location + '::test4') def test_delete(self): self.create_regular_file('file1', size=1024 * 80) self.create_regular_file('dir2/file2', size=1024 * 80) self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') self.cmd('create', self.repository_location + '::test.2', 'input') self.cmd('create', self.repository_location + '::test.3', 'input') self.cmd('create', self.repository_location + '::another_test.1', 'input') self.cmd('create', self.repository_location + '::another_test.2', 'input') self.cmd('extract', '--dry-run', self.repository_location + '::test') self.cmd('extract', '--dry-run', self.repository_location + '::test.2') self.cmd('delete', '--prefix', 'another_', self.repository_location) self.cmd('delete', '--last', '1', self.repository_location) self.cmd('delete', self.repository_location + '::test') self.cmd('extract', '--dry-run', self.repository_location + '::test.2') output = self.cmd('delete', '--stats', self.repository_location + '::test.2') self.assert_in('Deleted data:', output) # Make sure all data except the manifest has been deleted with Repository(self.repository_path) as repository: self.assert_equal(len(repository), 1) def test_delete_multiple(self): self.create_regular_file('file1', size=1024 * 80) self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test1', 'input') self.cmd('create', self.repository_location + '::test2', 'input') self.cmd('create', self.repository_location + '::test3', 'input') self.cmd('delete', self.repository_location + '::test1', 'test2') self.cmd('extract', '--dry-run', self.repository_location + '::test3') self.cmd('delete', self.repository_location, 'test3') assert not self.cmd('list', self.repository_location) def test_delete_repo(self): self.create_regular_file('file1', size=1024 * 80) self.create_regular_file('dir2/file2', size=1024 * 80) self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') self.cmd('create', self.repository_location + '::test.2', 'input') os.environ['BORG_DELETE_I_KNOW_WHAT_I_AM_DOING'] = 'no' self.cmd('delete', self.repository_location, exit_code=2) assert os.path.exists(self.repository_path) os.environ['BORG_DELETE_I_KNOW_WHAT_I_AM_DOING'] = 'YES' self.cmd('delete', self.repository_location) # Make sure the repo is gone self.assertFalse(os.path.exists(self.repository_path)) def test_delete_force(self): self.cmd('init', '--encryption=none', self.repository_location) self.create_src_archive('test') with Repository(self.repository_path, exclusive=True) as repository: manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK) archive = Archive(repository, key, manifest, 'test') for item in archive.iter_items(): if item.path.endswith('testsuite/archiver.py'): repository.delete(item.chunks[-1].id) break else: assert False # missed the file repository.commit(compact=False) output = self.cmd('delete', '--force', self.repository_location + '::test') self.assert_in('deleted archive was corrupted', output) self.cmd('check', '--repair', self.repository_location) output = self.cmd('list', self.repository_location) self.assert_not_in('test', output) def test_delete_double_force(self): self.cmd('init', '--encryption=none', self.repository_location) self.create_src_archive('test') with Repository(self.repository_path, exclusive=True) as repository: manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK) archive = Archive(repository, key, manifest, 'test') id = archive.metadata.items[0] repository.put(id, b'corrupted items metadata stream chunk') repository.commit(compact=False) self.cmd('delete', '--force', '--force', self.repository_location + '::test') self.cmd('check', '--repair', self.repository_location) output = self.cmd('list', self.repository_location) self.assert_not_in('test', output) def test_corrupted_repository(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_src_archive('test') self.cmd('extract', '--dry-run', self.repository_location + '::test') output = self.cmd('check', '--show-version', self.repository_location) self.assert_in('borgbackup version', output) # implied output even without --info given self.assert_not_in('Starting repository check', output) # --info not given for root logger name = sorted(os.listdir(os.path.join(self.tmpdir, 'repository', 'data', '0')), reverse=True)[1] with open(os.path.join(self.tmpdir, 'repository', 'data', '0', name), 'r+b') as fd: fd.seek(100) fd.write(b'XXXX') output = self.cmd('check', '--info', self.repository_location, exit_code=1) self.assert_in('Starting repository check', output) # --info given for root logger def test_readonly_check(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_src_archive('test') with self.read_only(self.repository_path): # verify that command normally doesn't work with read-only repo if self.FORK_DEFAULT: self.cmd('check', '--verify-data', self.repository_location, exit_code=EXIT_ERROR) else: with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo: self.cmd('check', '--verify-data', self.repository_location) if isinstance(excinfo.value, RemoteRepository.RPCError): assert excinfo.value.exception_class == 'LockFailed' # verify that command works with read-only repo when using --bypass-lock self.cmd('check', '--verify-data', self.repository_location, '--bypass-lock') def test_readonly_diff(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_src_archive('a') self.create_src_archive('b') with self.read_only(self.repository_path): # verify that command normally doesn't work with read-only repo if self.FORK_DEFAULT: self.cmd('diff', '%s::a' % self.repository_location, 'b', exit_code=EXIT_ERROR) else: with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo: self.cmd('diff', '%s::a' % self.repository_location, 'b') if isinstance(excinfo.value, RemoteRepository.RPCError): assert excinfo.value.exception_class == 'LockFailed' # verify that command works with read-only repo when using --bypass-lock self.cmd('diff', '%s::a' % self.repository_location, 'b', '--bypass-lock') def test_readonly_export_tar(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_src_archive('test') with self.read_only(self.repository_path): # verify that command normally doesn't work with read-only repo if self.FORK_DEFAULT: self.cmd('export-tar', '%s::test' % self.repository_location, 'test.tar', exit_code=EXIT_ERROR) else: with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo: self.cmd('export-tar', '%s::test' % self.repository_location, 'test.tar') if isinstance(excinfo.value, RemoteRepository.RPCError): assert excinfo.value.exception_class == 'LockFailed' # verify that command works with read-only repo when using --bypass-lock self.cmd('export-tar', '%s::test' % self.repository_location, 'test.tar', '--bypass-lock') def test_readonly_extract(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_src_archive('test') with self.read_only(self.repository_path): # verify that command normally doesn't work with read-only repo if self.FORK_DEFAULT: self.cmd('extract', '%s::test' % self.repository_location, exit_code=EXIT_ERROR) else: with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo: self.cmd('extract', '%s::test' % self.repository_location) if isinstance(excinfo.value, RemoteRepository.RPCError): assert excinfo.value.exception_class == 'LockFailed' # verify that command works with read-only repo when using --bypass-lock self.cmd('extract', '%s::test' % self.repository_location, '--bypass-lock') def test_readonly_info(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_src_archive('test') with self.read_only(self.repository_path): # verify that command normally doesn't work with read-only repo if self.FORK_DEFAULT: self.cmd('info', self.repository_location, exit_code=EXIT_ERROR) else: with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo: self.cmd('info', self.repository_location) if isinstance(excinfo.value, RemoteRepository.RPCError): assert excinfo.value.exception_class == 'LockFailed' # verify that command works with read-only repo when using --bypass-lock self.cmd('info', self.repository_location, '--bypass-lock') def test_readonly_list(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_src_archive('test') with self.read_only(self.repository_path): # verify that command normally doesn't work with read-only repo if self.FORK_DEFAULT: self.cmd('list', self.repository_location, exit_code=EXIT_ERROR) else: with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo: self.cmd('list', self.repository_location) if isinstance(excinfo.value, RemoteRepository.RPCError): assert excinfo.value.exception_class == 'LockFailed' # verify that command works with read-only repo when using --bypass-lock self.cmd('list', self.repository_location, '--bypass-lock') @unittest.skipUnless(llfuse, 'llfuse not installed') def test_readonly_mount(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_src_archive('test') with self.read_only(self.repository_path): # verify that command normally doesn't work with read-only repo if self.FORK_DEFAULT: with self.fuse_mount(self.repository_location, exit_code=EXIT_ERROR): pass else: with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo: # self.fuse_mount always assumes fork=True, so for this test we have to manually set fork=False with self.fuse_mount(self.repository_location, fork=False): pass if isinstance(excinfo.value, RemoteRepository.RPCError): assert excinfo.value.exception_class == 'LockFailed' # verify that command works with read-only repo when using --bypass-lock with self.fuse_mount(self.repository_location, None, '--bypass-lock'): pass @pytest.mark.skipif('BORG_TESTS_IGNORE_MODES' in os.environ, reason='modes unreliable') def test_umask(self): self.create_regular_file('file1', size=1024 * 80) self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') mode = os.stat(self.repository_path).st_mode self.assertEqual(stat.S_IMODE(mode), 0o700) def test_create_dry_run(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', '--dry-run', self.repository_location + '::test', 'input') # Make sure no archive has been created with Repository(self.repository_path) as repository: manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK) self.assert_equal(len(manifest.archives), 0) def add_unknown_feature(self, operation): with Repository(self.repository_path, exclusive=True) as repository: manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK) manifest.config[b'feature_flags'] = {operation.value.encode(): {b'mandatory': [b'unknown-feature']}} manifest.write() repository.commit(compact=False) def cmd_raises_unknown_feature(self, args): if self.FORK_DEFAULT: self.cmd(*args, exit_code=EXIT_ERROR) else: with pytest.raises(MandatoryFeatureUnsupported) as excinfo: self.cmd(*args) assert excinfo.value.args == (['unknown-feature'],) def test_unknown_feature_on_create(self): print(self.cmd('init', '--encryption=repokey', self.repository_location)) self.add_unknown_feature(Manifest.Operation.WRITE) self.cmd_raises_unknown_feature(['create', self.repository_location + '::test', 'input']) def test_unknown_feature_on_cache_sync(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('delete', '--cache-only', self.repository_location) self.add_unknown_feature(Manifest.Operation.READ) self.cmd_raises_unknown_feature(['create', self.repository_location + '::test', 'input']) def test_unknown_feature_on_change_passphrase(self): print(self.cmd('init', '--encryption=repokey', self.repository_location)) self.add_unknown_feature(Manifest.Operation.CHECK) self.cmd_raises_unknown_feature(['key', 'change-passphrase', self.repository_location]) def test_unknown_feature_on_read(self): print(self.cmd('init', '--encryption=repokey', self.repository_location)) self.cmd('create', self.repository_location + '::test', 'input') self.add_unknown_feature(Manifest.Operation.READ) with changedir('output'): self.cmd_raises_unknown_feature(['extract', self.repository_location + '::test']) self.cmd_raises_unknown_feature(['list', self.repository_location]) self.cmd_raises_unknown_feature(['info', self.repository_location + '::test']) def test_unknown_feature_on_rename(self): print(self.cmd('init', '--encryption=repokey', self.repository_location)) self.cmd('create', self.repository_location + '::test', 'input') self.add_unknown_feature(Manifest.Operation.CHECK) self.cmd_raises_unknown_feature(['rename', self.repository_location + '::test', 'other']) def test_unknown_feature_on_delete(self): print(self.cmd('init', '--encryption=repokey', self.repository_location)) self.cmd('create', self.repository_location + '::test', 'input') self.add_unknown_feature(Manifest.Operation.DELETE) # delete of an archive raises self.cmd_raises_unknown_feature(['delete', self.repository_location + '::test']) self.cmd_raises_unknown_feature(['prune', '--keep-daily=3', self.repository_location]) # delete of the whole repository ignores features self.cmd('delete', self.repository_location) @unittest.skipUnless(llfuse, 'llfuse not installed') def test_unknown_feature_on_mount(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') self.add_unknown_feature(Manifest.Operation.READ) mountpoint = os.path.join(self.tmpdir, 'mountpoint') os.mkdir(mountpoint) # XXX this might hang if it doesn't raise an error self.cmd_raises_unknown_feature(['mount', self.repository_location + '::test', mountpoint]) @pytest.mark.allow_cache_wipe def test_unknown_mandatory_feature_in_cache(self): if self.prefix: path_prefix = 'ssh://__testsuite__' else: path_prefix = '' print(self.cmd('init', '--encryption=repokey', self.repository_location)) with Repository(self.repository_path, exclusive=True) as repository: if path_prefix: repository._location = Location(self.repository_location) manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK) with Cache(repository, key, manifest) as cache: cache.begin_txn() cache.cache_config.mandatory_features = set(['unknown-feature']) cache.commit() if self.FORK_DEFAULT: self.cmd('create', self.repository_location + '::test', 'input') else: called = False wipe_cache_safe = LocalCache.wipe_cache def wipe_wrapper(*args): nonlocal called called = True wipe_cache_safe(*args) with patch.object(LocalCache, 'wipe_cache', wipe_wrapper): self.cmd('create', self.repository_location + '::test', 'input') assert called with Repository(self.repository_path, exclusive=True) as repository: if path_prefix: repository._location = Location(self.repository_location) manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK) with Cache(repository, key, manifest) as cache: assert cache.cache_config.mandatory_features == set([]) def test_progress_on(self): self.create_regular_file('file1', size=1024 * 80) self.cmd('init', '--encryption=repokey', self.repository_location) output = self.cmd('create', '--progress', self.repository_location + '::test4', 'input') self.assert_in("\r", output) def test_progress_off(self): self.create_regular_file('file1', size=1024 * 80) self.cmd('init', '--encryption=repokey', self.repository_location) output = self.cmd('create', self.repository_location + '::test5', 'input') self.assert_not_in("\r", output) def test_file_status(self): """test that various file status show expected results clearly incomplete: only tests for the weird "unchanged" status for now""" self.create_regular_file('file1', size=1024 * 80) time.sleep(1) # file2 must have newer timestamps than file1 self.create_regular_file('file2', size=1024 * 80) self.cmd('init', '--encryption=repokey', self.repository_location) output = self.cmd('create', '--list', self.repository_location + '::test', 'input') self.assert_in("A input/file1", output) self.assert_in("A input/file2", output) # should find first file as unmodified output = self.cmd('create', '--list', self.repository_location + '::test1', 'input') self.assert_in("U input/file1", output) # this is expected, although surprising, for why, see: # https://borgbackup.readthedocs.org/en/latest/faq.html#i-am-seeing-a-added-status-for-a-unchanged-file self.assert_in("A input/file2", output) def test_file_status_cs_cache_mode(self): """test that a changed file with faked "previous" mtime still gets backed up in ctime,size cache_mode""" self.create_regular_file('file1', contents=b'123') time.sleep(1) # file2 must have newer timestamps than file1 self.create_regular_file('file2', size=10) self.cmd('init', '--encryption=repokey', self.repository_location) output = self.cmd('create', '--list', '--files-cache=ctime,size', self.repository_location + '::test1', 'input') # modify file1, but cheat with the mtime (and atime) and also keep same size: st = os.stat('input/file1') self.create_regular_file('file1', contents=b'321') os.utime('input/file1', ns=(st.st_atime_ns, st.st_mtime_ns)) # this mode uses ctime for change detection, so it should find file1 as modified output = self.cmd('create', '--list', '--files-cache=ctime,size', self.repository_location + '::test2', 'input') self.assert_in("M input/file1", output) def test_file_status_ms_cache_mode(self): """test that a chmod'ed file with no content changes does not get chunked again in mtime,size cache_mode""" self.create_regular_file('file1', size=10) time.sleep(1) # file2 must have newer timestamps than file1 self.create_regular_file('file2', size=10) self.cmd('init', '--encryption=repokey', self.repository_location) output = self.cmd('create', '--list', '--files-cache=mtime,size', self.repository_location + '::test1', 'input') # change mode of file1, no content change: st = os.stat('input/file1') os.chmod('input/file1', st.st_mode ^ stat.S_IRWXO) # this triggers a ctime change, but mtime is unchanged # this mode uses mtime for change detection, so it should find file1 as unmodified output = self.cmd('create', '--list', '--files-cache=mtime,size', self.repository_location + '::test2', 'input') self.assert_in("U input/file1", output) def test_file_status_rc_cache_mode(self): """test that files get rechunked unconditionally in rechunk,ctime cache mode""" self.create_regular_file('file1', size=10) time.sleep(1) # file2 must have newer timestamps than file1 self.create_regular_file('file2', size=10) self.cmd('init', '--encryption=repokey', self.repository_location) output = self.cmd('create', '--list', '--files-cache=rechunk,ctime', self.repository_location + '::test1', 'input') # no changes here, but this mode rechunks unconditionally output = self.cmd('create', '--list', '--files-cache=rechunk,ctime', self.repository_location + '::test2', 'input') self.assert_in("A input/file1", output) def test_file_status_excluded(self): """test that excluded paths are listed""" self.create_regular_file('file1', size=1024 * 80) time.sleep(1) # file2 must have newer timestamps than file1 self.create_regular_file('file2', size=1024 * 80) if has_lchflags: self.create_regular_file('file3', size=1024 * 80) platform.set_flags(os.path.join(self.input_path, 'file3'), stat.UF_NODUMP) self.cmd('init', '--encryption=repokey', self.repository_location) output = self.cmd('create', '--list', '--exclude-nodump', self.repository_location + '::test', 'input') self.assert_in("A input/file1", output) self.assert_in("A input/file2", output) if has_lchflags: self.assert_in("x input/file3", output) # should find second file as excluded output = self.cmd('create', '--list', '--exclude-nodump', self.repository_location + '::test1', 'input', '--exclude', '*/file2') self.assert_in("U input/file1", output) self.assert_in("x input/file2", output) if has_lchflags: self.assert_in("x input/file3", output) def test_create_json(self): self.create_regular_file('file1', size=1024 * 80) self.cmd('init', '--encryption=repokey', self.repository_location) create_info = json.loads(self.cmd('create', '--json', self.repository_location + '::test', 'input')) # The usual keys assert 'encryption' in create_info assert 'repository' in create_info assert 'cache' in create_info assert 'last_modified' in create_info['repository'] archive = create_info['archive'] assert archive['name'] == 'test' assert isinstance(archive['command_line'], list) assert isinstance(archive['duration'], float) assert len(archive['id']) == 64 assert 'stats' in archive def test_create_topical(self): self.create_regular_file('file1', size=1024 * 80) time.sleep(1) # file2 must have newer timestamps than file1 self.create_regular_file('file2', size=1024 * 80) self.cmd('init', '--encryption=repokey', self.repository_location) # no listing by default output = self.cmd('create', self.repository_location + '::test', 'input') self.assert_not_in('file1', output) # shouldn't be listed even if unchanged output = self.cmd('create', self.repository_location + '::test0', 'input') self.assert_not_in('file1', output) # should list the file as unchanged output = self.cmd('create', '--list', '--filter=U', self.repository_location + '::test1', 'input') self.assert_in('file1', output) # should *not* list the file as changed output = self.cmd('create', '--list', '--filter=AM', self.repository_location + '::test2', 'input') self.assert_not_in('file1', output) # change the file self.create_regular_file('file1', size=1024 * 100) # should list the file as changed output = self.cmd('create', '--list', '--filter=AM', self.repository_location + '::test3', 'input') self.assert_in('file1', output) @pytest.mark.skipif(not are_fifos_supported(), reason='FIFOs not supported') def test_create_read_special_symlink(self): from threading import Thread def fifo_feeder(fifo_fn, data): fd = os.open(fifo_fn, os.O_WRONLY) try: os.write(fd, data) finally: os.close(fd) self.cmd('init', '--encryption=repokey', self.repository_location) archive = self.repository_location + '::test' data = b'foobar' * 1000 fifo_fn = os.path.join(self.input_path, 'fifo') link_fn = os.path.join(self.input_path, 'link_fifo') os.mkfifo(fifo_fn) os.symlink(fifo_fn, link_fn) t = Thread(target=fifo_feeder, args=(fifo_fn, data)) t.start() try: self.cmd('create', '--read-special', archive, 'input/link_fifo') finally: t.join() with changedir('output'): self.cmd('extract', archive) fifo_fn = 'input/link_fifo' with open(fifo_fn, 'rb') as f: extracted_data = f.read() assert extracted_data == data def test_create_read_special_broken_symlink(self): os.symlink('somewhere does not exist', os.path.join(self.input_path, 'link')) self.cmd('init', '--encryption=repokey', self.repository_location) archive = self.repository_location + '::test' self.cmd('create', '--read-special', archive, 'input') output = self.cmd('list', archive) assert 'input/link -> somewhere does not exist' in output # def test_cmdline_compatibility(self): # self.create_regular_file('file1', size=1024 * 80) # self.cmd('init', '--encryption=repokey', self.repository_location) # self.cmd('create', self.repository_location + '::test', 'input') # output = self.cmd('foo', self.repository_location, '--old') # self.assert_in('"--old" has been deprecated. Use "--new" instead', output) def test_prune_repository(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test1', src_dir) self.cmd('create', self.repository_location + '::test2', src_dir) # these are not really a checkpoints, but they look like some: self.cmd('create', self.repository_location + '::test3.checkpoint', src_dir) self.cmd('create', self.repository_location + '::test3.checkpoint.1', src_dir) self.cmd('create', self.repository_location + '::test4.checkpoint', src_dir) output = self.cmd('prune', '--list', '--dry-run', self.repository_location, '--keep-daily=1') assert re.search(r'Would prune:\s+test1', output) # must keep the latest non-checkpoint archive: assert re.search(r'Keeping archive \(rule: daily #1\):\s+test2', output) # must keep the latest checkpoint archive: assert re.search(r'Keeping checkpoint archive:\s+test4.checkpoint', output) output = self.cmd('list', '--consider-checkpoints', self.repository_location) self.assert_in('test1', output) self.assert_in('test2', output) self.assert_in('test3.checkpoint', output) self.assert_in('test3.checkpoint.1', output) self.assert_in('test4.checkpoint', output) self.cmd('prune', self.repository_location, '--keep-daily=1') output = self.cmd('list', '--consider-checkpoints', self.repository_location) self.assert_not_in('test1', output) # the latest non-checkpoint archive must be still there: self.assert_in('test2', output) # only the latest checkpoint archive must still be there: self.assert_not_in('test3.checkpoint', output) self.assert_not_in('test3.checkpoint.1', output) self.assert_in('test4.checkpoint', output) # now we supercede the latest checkpoint by a successful backup: self.cmd('create', self.repository_location + '::test5', src_dir) self.cmd('prune', self.repository_location, '--keep-daily=2') output = self.cmd('list', '--consider-checkpoints', self.repository_location) # all checkpoints should be gone now: self.assert_not_in('checkpoint', output) # the latest archive must be still there self.assert_in('test5', output) # Given a date and time in local tz, create a UTC timestamp string suitable # for create --timestamp command line option def _to_utc_timestamp(self, year, month, day, hour, minute, second): dtime = datetime(year, month, day, hour, minute, second, 0, dateutil.tz.gettz()) return dtime.astimezone(dateutil.tz.UTC).strftime("%Y-%m-%dT%H:%M:%S") def _create_archive_ts(self, name, y, m, d, H=0, M=0, S=0): loc = self.repository_location + '::' + name self.cmd('create', '--timestamp', self._to_utc_timestamp(y, m, d, H, M, S), loc, src_dir) # This test must match docs/misc/prune-example.txt def test_prune_repository_example(self): self.cmd('init', '--encryption=repokey', self.repository_location) # Archives that will be kept, per the example # Oldest archive self._create_archive_ts('test01', 2015, 1, 1) # 6 monthly archives self._create_archive_ts('test02', 2015, 6, 30) self._create_archive_ts('test03', 2015, 7, 31) self._create_archive_ts('test04', 2015, 8, 31) self._create_archive_ts('test05', 2015, 9, 30) self._create_archive_ts('test06', 2015, 10, 31) self._create_archive_ts('test07', 2015, 11, 30) # 14 daily archives self._create_archive_ts('test08', 2015, 12, 17) self._create_archive_ts('test09', 2015, 12, 18) self._create_archive_ts('test10', 2015, 12, 20) self._create_archive_ts('test11', 2015, 12, 21) self._create_archive_ts('test12', 2015, 12, 22) self._create_archive_ts('test13', 2015, 12, 23) self._create_archive_ts('test14', 2015, 12, 24) self._create_archive_ts('test15', 2015, 12, 25) self._create_archive_ts('test16', 2015, 12, 26) self._create_archive_ts('test17', 2015, 12, 27) self._create_archive_ts('test18', 2015, 12, 28) self._create_archive_ts('test19', 2015, 12, 29) self._create_archive_ts('test20', 2015, 12, 30) self._create_archive_ts('test21', 2015, 12, 31) # Additional archives that would be pruned # The second backup of the year self._create_archive_ts('test22', 2015, 1, 2) # The next older monthly backup self._create_archive_ts('test23', 2015, 5, 31) # The next older daily backup self._create_archive_ts('test24', 2015, 12, 16) output = self.cmd('prune', '--list', '--dry-run', self.repository_location, '--keep-daily=14', '--keep-monthly=6', '--keep-yearly=1') # Prune second backup of the year assert re.search(r'Would prune:\s+test22', output) # Prune next older monthly and daily backups assert re.search(r'Would prune:\s+test23', output) assert re.search(r'Would prune:\s+test24', output) # Must keep the other 21 backups # Yearly is kept as oldest archive assert re.search(r'Keeping archive \(rule: yearly\[oldest\] #1\):\s+test01', output) for i in range(1, 7): assert re.search(r'Keeping archive \(rule: monthly #' + str(i) + r'\):\s+test' + ("%02d" % (8-i)), output) for i in range(1, 15): assert re.search(r'Keeping archive \(rule: daily #' + str(i) + r'\):\s+test' + ("%02d" % (22-i)), output) output = self.cmd('list', self.repository_location) # Nothing pruned after dry run for i in range(1, 25): self.assert_in('test%02d' % i, output) self.cmd('prune', self.repository_location, '--keep-daily=14', '--keep-monthly=6', '--keep-yearly=1') output = self.cmd('list', self.repository_location) # All matching backups plus oldest kept for i in range(1, 22): self.assert_in('test%02d' % i, output) # Other backups have been pruned for i in range(22, 25): self.assert_not_in('test%02d' % i, output) # With an initial and daily backup, prune daily until oldest is replaced by a monthly backup def test_prune_retain_and_expire_oldest(self): self.cmd('init', '--encryption=repokey', self.repository_location) # Initial backup self._create_archive_ts('original_archive', 2020, 9, 1, 11, 15) # Archive and prune daily for 30 days for i in range(1, 31): self._create_archive_ts('september%02d' % i, 2020, 9, i, 12) self.cmd('prune', self.repository_location, '--keep-daily=7', '--keep-monthly=1') # Archive and prune 6 days into the next month for i in range(1, 7): self._create_archive_ts('october%02d' % i, 2020, 10, i, 12) self.cmd('prune', self.repository_location, '--keep-daily=7', '--keep-monthly=1') # Oldest backup is still retained output = self.cmd('prune', '--list', '--dry-run', self.repository_location, '--keep-daily=7', '--keep-monthly=1') assert re.search(r'Keeping archive \(rule: monthly\[oldest\] #1' + r'\):\s+original_archive', output) # Archive one more day and prune. self._create_archive_ts('october07', 2020, 10, 7, 12) self.cmd('prune', self.repository_location, '--keep-daily=7', '--keep-monthly=1') # Last day of previous month is retained as monthly, and oldest is expired. output = self.cmd('prune', '--list', '--dry-run', self.repository_location, '--keep-daily=7', '--keep-monthly=1') assert re.search(r'Keeping archive \(rule: monthly #1\):\s+september30', output) self.assert_not_in('original_archive', output) def test_prune_repository_save_space(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test1', src_dir) self.cmd('create', self.repository_location + '::test2', src_dir) output = self.cmd('prune', '--list', '--dry-run', self.repository_location, '--keep-daily=1') assert re.search(r'Keeping archive \(rule: daily #1\):\s+test2', output) assert re.search(r'Would prune:\s+test1', output) output = self.cmd('list', self.repository_location) self.assert_in('test1', output) self.assert_in('test2', output) self.cmd('prune', '--save-space', self.repository_location, '--keep-daily=1') output = self.cmd('list', self.repository_location) self.assert_not_in('test1', output) self.assert_in('test2', output) def test_prune_repository_prefix(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::foo-2015-08-12-10:00', src_dir) self.cmd('create', self.repository_location + '::foo-2015-08-12-20:00', src_dir) self.cmd('create', self.repository_location + '::bar-2015-08-12-10:00', src_dir) self.cmd('create', self.repository_location + '::bar-2015-08-12-20:00', src_dir) output = self.cmd('prune', '--list', '--dry-run', self.repository_location, '--keep-daily=1', '--prefix=foo-') assert re.search(r'Keeping archive \(rule: daily #1\):\s+foo-2015-08-12-20:00', output) assert re.search(r'Would prune:\s+foo-2015-08-12-10:00', output) output = self.cmd('list', self.repository_location) self.assert_in('foo-2015-08-12-10:00', output) self.assert_in('foo-2015-08-12-20:00', output) self.assert_in('bar-2015-08-12-10:00', output) self.assert_in('bar-2015-08-12-20:00', output) self.cmd('prune', self.repository_location, '--keep-daily=1', '--prefix=foo-') output = self.cmd('list', self.repository_location) self.assert_not_in('foo-2015-08-12-10:00', output) self.assert_in('foo-2015-08-12-20:00', output) self.assert_in('bar-2015-08-12-10:00', output) self.assert_in('bar-2015-08-12-20:00', output) def test_prune_repository_glob(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::2015-08-12-10:00-foo', src_dir) self.cmd('create', self.repository_location + '::2015-08-12-20:00-foo', src_dir) self.cmd('create', self.repository_location + '::2015-08-12-10:00-bar', src_dir) self.cmd('create', self.repository_location + '::2015-08-12-20:00-bar', src_dir) output = self.cmd('prune', '--list', '--dry-run', self.repository_location, '--keep-daily=1', '--glob-archives=2015-*-foo') assert re.search(r'Keeping archive \(rule: daily #1\):\s+2015-08-12-20:00-foo', output) assert re.search(r'Would prune:\s+2015-08-12-10:00-foo', output) output = self.cmd('list', self.repository_location) self.assert_in('2015-08-12-10:00-foo', output) self.assert_in('2015-08-12-20:00-foo', output) self.assert_in('2015-08-12-10:00-bar', output) self.assert_in('2015-08-12-20:00-bar', output) self.cmd('prune', self.repository_location, '--keep-daily=1', '--glob-archives=2015-*-foo') output = self.cmd('list', self.repository_location) self.assert_not_in('2015-08-12-10:00-foo', output) self.assert_in('2015-08-12-20:00-foo', output) self.assert_in('2015-08-12-10:00-bar', output) self.assert_in('2015-08-12-20:00-bar', output) def test_list_prefix(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test-1', src_dir) self.cmd('create', self.repository_location + '::something-else-than-test-1', src_dir) self.cmd('create', self.repository_location + '::test-2', src_dir) output = self.cmd('list', '--prefix=test-', self.repository_location) self.assert_in('test-1', output) self.assert_in('test-2', output) self.assert_not_in('something-else', output) def test_list_format(self): self.cmd('init', '--encryption=repokey', self.repository_location) test_archive = self.repository_location + '::test' self.cmd('create', test_archive, src_dir) output_1 = self.cmd('list', test_archive) output_2 = self.cmd('list', '--format', '{mode} {user:6} {group:6} {size:8d} {mtime} {path}{extra}{NEWLINE}', test_archive) output_3 = self.cmd('list', '--format', '{mtime:%s} {path}{NL}', test_archive) self.assertEqual(output_1, output_2) self.assertNotEqual(output_1, output_3) def test_list_repository_format(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', '--comment', 'comment 1', self.repository_location + '::test-1', src_dir) self.cmd('create', '--comment', 'comment 2', self.repository_location + '::test-2', src_dir) output_1 = self.cmd('list', self.repository_location) output_2 = self.cmd('list', '--format', '{archive:<36} {time} [{id}]{NL}', self.repository_location) self.assertEqual(output_1, output_2) output_1 = self.cmd('list', '--short', self.repository_location) self.assertEqual(output_1, 'test-1\ntest-2\n') output_1 = self.cmd('list', '--format', '{barchive}/', self.repository_location) self.assertEqual(output_1, 'test-1/test-2/') output_3 = self.cmd('list', '--format', '{name} {comment}{NL}', self.repository_location) self.assert_in('test-1 comment 1\n', output_3) self.assert_in('test-2 comment 2\n', output_3) def test_list_hash(self): self.create_regular_file('empty_file', size=0) self.create_regular_file('amb', contents=b'a' * 1000000) self.cmd('init', '--encryption=repokey', self.repository_location) test_archive = self.repository_location + '::test' self.cmd('create', test_archive, 'input') output = self.cmd('list', '--format', '{sha256} {path}{NL}', test_archive) assert "cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0 input/amb" in output assert "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 input/empty_file" in output def test_list_consider_checkpoints(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test1', src_dir) # these are not really a checkpoints, but they look like some: self.cmd('create', self.repository_location + '::test2.checkpoint', src_dir) self.cmd('create', self.repository_location + '::test3.checkpoint.1', src_dir) output = self.cmd('list', self.repository_location) assert "test1" in output assert "test2.checkpoint" not in output assert "test3.checkpoint.1" not in output output = self.cmd('list', '--consider-checkpoints', self.repository_location) assert "test1" in output assert "test2.checkpoint" in output assert "test3.checkpoint.1" in output def test_list_chunk_counts(self): self.create_regular_file('empty_file', size=0) self.create_regular_file('two_chunks') with open(os.path.join(self.input_path, 'two_chunks'), 'wb') as fd: fd.write(b'abba' * 2000000) fd.write(b'baab' * 2000000) self.cmd('init', '--encryption=repokey', self.repository_location) test_archive = self.repository_location + '::test' self.cmd('create', test_archive, 'input') output = self.cmd('list', '--format', '{num_chunks} {unique_chunks} {path}{NL}', test_archive) assert "0 0 input/empty_file" in output assert "2 2 input/two_chunks" in output def test_list_size(self): self.create_regular_file('compressible_file', size=10000) self.cmd('init', '--encryption=repokey', self.repository_location) test_archive = self.repository_location + '::test' self.cmd('create', '-C', 'lz4', test_archive, 'input') output = self.cmd('list', '--format', '{size} {csize} {dsize} {dcsize} {path}{NL}', test_archive) size, csize, dsize, dcsize, path = output.split("\n")[1].split(" ") assert int(csize) < int(size) assert int(dcsize) < int(dsize) assert int(dsize) <= int(size) assert int(dcsize) <= int(csize) def test_list_json(self): self.create_regular_file('file1', size=1024 * 80) self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') list_repo = json.loads(self.cmd('list', '--json', self.repository_location)) repository = list_repo['repository'] assert len(repository['id']) == 64 assert datetime.strptime(repository['last_modified'], ISO_FORMAT) # must not raise assert list_repo['encryption']['mode'] == 'repokey' assert 'keyfile' not in list_repo['encryption'] archive0 = list_repo['archives'][0] assert datetime.strptime(archive0['time'], ISO_FORMAT) # must not raise list_archive = self.cmd('list', '--json-lines', self.repository_location + '::test') items = [json.loads(s) for s in list_archive.splitlines()] assert len(items) == 2 file1 = items[1] assert file1['path'] == 'input/file1' assert file1['size'] == 81920 assert datetime.strptime(file1['mtime'], ISO_FORMAT) # must not raise list_archive = self.cmd('list', '--json-lines', '--format={sha256}', self.repository_location + '::test') items = [json.loads(s) for s in list_archive.splitlines()] assert len(items) == 2 file1 = items[1] assert file1['path'] == 'input/file1' assert file1['sha256'] == 'b2915eb69f260d8d3c25249195f2c8f4f716ea82ec760ae929732c0262442b2b' def test_list_json_args(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('list', '--json-lines', self.repository_location, exit_code=2) self.cmd('list', '--json', self.repository_location + '::archive', exit_code=2) def test_log_json(self): self.create_test_files() self.cmd('init', '--encryption=repokey', self.repository_location) log = self.cmd('create', '--log-json', self.repository_location + '::test', 'input', '--list', '--debug') messages = {} # type -> message, one of each kind for line in log.splitlines(): msg = json.loads(line) messages[msg['type']] = msg file_status = messages['file_status'] assert 'status' in file_status assert file_status['path'].startswith('input') log_message = messages['log_message'] assert isinstance(log_message['time'], float) assert log_message['levelname'] == 'DEBUG' # there should only be DEBUG messages assert isinstance(log_message['message'], str) def test_debug_profile(self): self.create_test_files() self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input', '--debug-profile=create.prof') self.cmd('debug', 'convert-profile', 'create.prof', 'create.pyprof') stats = pstats.Stats('create.pyprof') stats.strip_dirs() stats.sort_stats('cumtime') self.cmd('create', self.repository_location + '::test2', 'input', '--debug-profile=create.pyprof') stats = pstats.Stats('create.pyprof') # Only do this on trusted data! stats.strip_dirs() stats.sort_stats('cumtime') def test_common_options(self): self.create_test_files() self.cmd('init', '--encryption=repokey', self.repository_location) log = self.cmd('--debug', 'create', self.repository_location + '::test', 'input') assert 'security: read previous location' in log def _get_sizes(self, compression, compressible, size=10000): if compressible: contents = b'X' * size else: contents = os.urandom(size) self.create_regular_file('file', contents=contents) self.cmd('init', '--encryption=none', self.repository_location) archive = self.repository_location + '::test' self.cmd('create', '-C', compression, archive, 'input') output = self.cmd('list', '--format', '{size} {csize} {path}{NL}', archive) size, csize, path = output.split("\n")[1].split(" ") return int(size), int(csize) def test_compression_none_compressible(self): size, csize = self._get_sizes('none', compressible=True) assert csize == size + 3 def test_compression_none_uncompressible(self): size, csize = self._get_sizes('none', compressible=False) assert csize == size + 3 def test_compression_zlib_compressible(self): size, csize = self._get_sizes('zlib', compressible=True) assert csize < size * 0.1 assert csize == 35 def test_compression_zlib_uncompressible(self): size, csize = self._get_sizes('zlib', compressible=False) assert csize >= size def test_compression_auto_compressible(self): size, csize = self._get_sizes('auto,zlib', compressible=True) assert csize < size * 0.1 assert csize == 35 # same as compression 'zlib' def test_compression_auto_uncompressible(self): size, csize = self._get_sizes('auto,zlib', compressible=False) assert csize == size + 3 # same as compression 'none' def test_compression_lz4_compressible(self): size, csize = self._get_sizes('lz4', compressible=True) assert csize < size * 0.1 def test_compression_lz4_uncompressible(self): size, csize = self._get_sizes('lz4', compressible=False) assert csize == size + 3 # same as compression 'none' def test_compression_lzma_compressible(self): size, csize = self._get_sizes('lzma', compressible=True) assert csize < size * 0.1 def test_compression_lzma_uncompressible(self): size, csize = self._get_sizes('lzma', compressible=False) assert csize == size + 3 # same as compression 'none' def test_compression_zstd_compressible(self): size, csize = self._get_sizes('zstd', compressible=True) assert csize < size * 0.1 def test_compression_zstd_uncompressible(self): size, csize = self._get_sizes('zstd', compressible=False) assert csize == size + 3 # same as compression 'none' def test_change_passphrase(self): self.cmd('init', '--encryption=repokey', self.repository_location) os.environ['BORG_NEW_PASSPHRASE'] = 'newpassphrase' # here we have both BORG_PASSPHRASE and BORG_NEW_PASSPHRASE set: self.cmd('key', 'change-passphrase', self.repository_location) os.environ['BORG_PASSPHRASE'] = 'newpassphrase' self.cmd('list', self.repository_location) def test_break_lock(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('break-lock', self.repository_location) def test_usage(self): self.cmd() self.cmd('-h') def test_help(self): assert 'Borg' in self.cmd('help') assert 'patterns' in self.cmd('help', 'patterns') assert 'Initialize' in self.cmd('help', 'init') assert 'positional arguments' not in self.cmd('help', 'init', '--epilog-only') assert 'This command initializes' not in self.cmd('help', 'init', '--usage-only') @unittest.skipUnless(llfuse, 'llfuse not installed') def test_fuse(self): def has_noatime(some_file): atime_before = os.stat(some_file).st_atime_ns try: os.close(os.open(some_file, flags_noatime)) except PermissionError: return False else: atime_after = os.stat(some_file).st_atime_ns noatime_used = flags_noatime != flags_normal return noatime_used and atime_before == atime_after self.cmd('init', '--encryption=repokey', self.repository_location) self.create_test_files() have_noatime = has_noatime('input/file1') self.cmd('create', '--exclude-nodump', '--atime', self.repository_location + '::archive', 'input') self.cmd('create', '--exclude-nodump', '--atime', self.repository_location + '::archive2', 'input') if has_lchflags: # remove the file we did not backup, so input and output become equal os.remove(os.path.join('input', 'flagfile')) mountpoint = os.path.join(self.tmpdir, 'mountpoint') # mount the whole repository, archive contents shall show up in archivename subdirs of mountpoint: with self.fuse_mount(self.repository_location, mountpoint): # flags are not supported by the FUSE mount # we also ignore xattrs here, they are tested separately self.assert_dirs_equal(self.input_path, os.path.join(mountpoint, 'archive', 'input'), ignore_flags=True, ignore_xattrs=True) self.assert_dirs_equal(self.input_path, os.path.join(mountpoint, 'archive2', 'input'), ignore_flags=True, ignore_xattrs=True) # mount only 1 archive, its contents shall show up directly in mountpoint: with self.fuse_mount(self.repository_location + '::archive', mountpoint): self.assert_dirs_equal(self.input_path, os.path.join(mountpoint, 'input'), ignore_flags=True, ignore_xattrs=True) # regular file in_fn = 'input/file1' out_fn = os.path.join(mountpoint, 'input', 'file1') # stat sti1 = os.stat(in_fn) sto1 = os.stat(out_fn) assert sti1.st_mode == sto1.st_mode assert sti1.st_uid == sto1.st_uid assert sti1.st_gid == sto1.st_gid assert sti1.st_size == sto1.st_size if have_noatime: assert sti1.st_atime == sto1.st_atime assert sti1.st_ctime == sto1.st_ctime assert sti1.st_mtime == sto1.st_mtime if are_hardlinks_supported(): # note: there is another hardlink to this, see below assert sti1.st_nlink == sto1.st_nlink == 2 # read with open(in_fn, 'rb') as in_f, open(out_fn, 'rb') as out_f: assert in_f.read() == out_f.read() # hardlink (to 'input/file1') if are_hardlinks_supported(): in_fn = 'input/hardlink' out_fn = os.path.join(mountpoint, 'input', 'hardlink') sti2 = os.stat(in_fn) sto2 = os.stat(out_fn) assert sti2.st_nlink == sto2.st_nlink == 2 assert sto1.st_ino == sto2.st_ino # symlink if are_symlinks_supported(): in_fn = 'input/link1' out_fn = os.path.join(mountpoint, 'input', 'link1') sti = os.stat(in_fn, follow_symlinks=False) sto = os.stat(out_fn, follow_symlinks=False) assert sti.st_size == len('somewhere') assert sto.st_size == len('somewhere') assert stat.S_ISLNK(sti.st_mode) assert stat.S_ISLNK(sto.st_mode) assert os.readlink(in_fn) == os.readlink(out_fn) # FIFO if are_fifos_supported(): out_fn = os.path.join(mountpoint, 'input', 'fifo1') sto = os.stat(out_fn) assert stat.S_ISFIFO(sto.st_mode) # list/read xattrs try: in_fn = 'input/fusexattr' out_fn = os.fsencode(os.path.join(mountpoint, 'input', 'fusexattr')) if not xattr.XATTR_FAKEROOT and xattr.is_enabled(self.input_path): assert sorted(no_selinux(xattr.listxattr(out_fn))) == [b'user.empty', b'user.foo', ] assert xattr.getxattr(out_fn, b'user.foo') == b'bar' assert xattr.getxattr(out_fn, b'user.empty') == b'' else: assert no_selinux(xattr.listxattr(out_fn)) == [] try: xattr.getxattr(out_fn, b'user.foo') except OSError as e: assert e.errno == llfuse.ENOATTR else: assert False, "expected OSError(ENOATTR), but no error was raised" except OSError as err: if sys.platform.startswith(('nothing_here_now', )) and err.errno == errno.ENOTSUP: # some systems have no xattr support on FUSE pass else: raise @unittest.skipUnless(llfuse, 'llfuse not installed') def test_fuse_versions_view(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file('test', contents=b'first') if are_hardlinks_supported(): self.create_regular_file('hardlink1', contents=b'123456') os.link('input/hardlink1', 'input/hardlink2') os.link('input/hardlink1', 'input/hardlink3') self.cmd('create', self.repository_location + '::archive1', 'input') self.create_regular_file('test', contents=b'second') self.cmd('create', self.repository_location + '::archive2', 'input') mountpoint = os.path.join(self.tmpdir, 'mountpoint') # mount the whole repository, archive contents shall show up in versioned view: with self.fuse_mount(self.repository_location, mountpoint, '-o', 'versions'): path = os.path.join(mountpoint, 'input', 'test') # filename shows up as directory ... files = os.listdir(path) assert all(f.startswith('test.') for f in files) # ... with files test.xxxxx in there assert {b'first', b'second'} == {open(os.path.join(path, f), 'rb').read() for f in files} if are_hardlinks_supported(): hl1 = os.path.join(mountpoint, 'input', 'hardlink1', 'hardlink1.00001') hl2 = os.path.join(mountpoint, 'input', 'hardlink2', 'hardlink2.00001') hl3 = os.path.join(mountpoint, 'input', 'hardlink3', 'hardlink3.00001') assert os.stat(hl1).st_ino == os.stat(hl2).st_ino == os.stat(hl3).st_ino assert open(hl3, 'rb').read() == b'123456' # similar again, but exclude the hardlink master: with self.fuse_mount(self.repository_location, mountpoint, '-o', 'versions', '-e', 'input/hardlink1'): if are_hardlinks_supported(): hl2 = os.path.join(mountpoint, 'input', 'hardlink2', 'hardlink2.00001') hl3 = os.path.join(mountpoint, 'input', 'hardlink3', 'hardlink3.00001') assert os.stat(hl2).st_ino == os.stat(hl3).st_ino assert open(hl3, 'rb').read() == b'123456' @unittest.skipUnless(llfuse, 'llfuse not installed') def test_fuse_allow_damaged_files(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_src_archive('archive') # Get rid of a chunk and repair it archive, repository = self.open_archive('archive') with repository: for item in archive.iter_items(): if item.path.endswith('testsuite/archiver.py'): repository.delete(item.chunks[-1].id) path = item.path # store full path for later break else: assert False # missed the file repository.commit(compact=False) self.cmd('check', '--repair', self.repository_location, exit_code=0) mountpoint = os.path.join(self.tmpdir, 'mountpoint') with self.fuse_mount(self.repository_location + '::archive', mountpoint): with pytest.raises(OSError) as excinfo: open(os.path.join(mountpoint, path)) assert excinfo.value.errno == errno.EIO with self.fuse_mount(self.repository_location + '::archive', mountpoint, '-o', 'allow_damaged_files'): open(os.path.join(mountpoint, path)).close() @unittest.skipUnless(llfuse, 'llfuse not installed') def test_fuse_mount_options(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_src_archive('arch11') self.create_src_archive('arch12') self.create_src_archive('arch21') self.create_src_archive('arch22') mountpoint = os.path.join(self.tmpdir, 'mountpoint') with self.fuse_mount(self.repository_location, mountpoint, '--first=2', '--sort=name'): assert sorted(os.listdir(os.path.join(mountpoint))) == ['arch11', 'arch12'] with self.fuse_mount(self.repository_location, mountpoint, '--last=2', '--sort=name'): assert sorted(os.listdir(os.path.join(mountpoint))) == ['arch21', 'arch22'] with self.fuse_mount(self.repository_location, mountpoint, '--prefix=arch1'): assert sorted(os.listdir(os.path.join(mountpoint))) == ['arch11', 'arch12'] with self.fuse_mount(self.repository_location, mountpoint, '--prefix=arch2'): assert sorted(os.listdir(os.path.join(mountpoint))) == ['arch21', 'arch22'] with self.fuse_mount(self.repository_location, mountpoint, '--prefix=arch'): assert sorted(os.listdir(os.path.join(mountpoint))) == ['arch11', 'arch12', 'arch21', 'arch22'] with self.fuse_mount(self.repository_location, mountpoint, '--prefix=nope'): assert sorted(os.listdir(os.path.join(mountpoint))) == [] @unittest.skipUnless(llfuse, 'llfuse not installed') def test_migrate_lock_alive(self): """Both old_id and new_id must not be stale during lock migration / daemonization.""" from functools import wraps import pickle import traceback # Check results are communicated from the borg mount background process # to the pytest process by means of a serialized dict object stored in this file. assert_data_file = os.path.join(self.tmpdir, 'migrate_lock_assert_data.pickle') # Decorates Lock.migrate_lock() with process_alive() checks before and after. # (We don't want to mix testing code into runtime.) def write_assert_data(migrate_lock): @wraps(migrate_lock) def wrapper(self, old_id, new_id): wrapper.num_calls += 1 assert_data = { 'num_calls': wrapper.num_calls, 'old_id': old_id, 'new_id': new_id, 'before': { 'old_id_alive': platform.process_alive(*old_id), 'new_id_alive': platform.process_alive(*new_id)}, 'exception': None, 'exception.extr_tb': None, 'after': { 'old_id_alive': None, 'new_id_alive': None}} try: with open(assert_data_file, 'wb') as _out: pickle.dump(assert_data, _out) except: pass try: return migrate_lock(self, old_id, new_id) except BaseException as e: assert_data['exception'] = e assert_data['exception.extr_tb'] = traceback.extract_tb(e.__traceback__) finally: assert_data['after'].update({ 'old_id_alive': platform.process_alive(*old_id), 'new_id_alive': platform.process_alive(*new_id)}) try: with open(assert_data_file, 'wb') as _out: pickle.dump(assert_data, _out) except: pass wrapper.num_calls = 0 return wrapper # Decorate borg.locking.Lock.migrate_lock = write_assert_data(borg.locking.Lock.migrate_lock) try: self.cmd('init', '--encryption=none', self.repository_location) self.create_src_archive('arch') mountpoint = os.path.join(self.tmpdir, 'mountpoint') # In order that the decoration is kept for the borg mount process, we must not spawn, but actually fork; # not to be confused with the forking in borg.helpers.daemonize() which is done as well. with self.fuse_mount(self.repository_location, mountpoint, os_fork=True): pass with open(assert_data_file, 'rb') as _in: assert_data = pickle.load(_in) print('\nLock.migrate_lock(): assert_data = %r.' % (assert_data, ), file=sys.stderr, flush=True) exception = assert_data['exception'] if exception is not None: extracted_tb = assert_data['exception.extr_tb'] print( 'Lock.migrate_lock() raised an exception:\n', 'Traceback (most recent call last):\n', *traceback.format_list(extracted_tb), *traceback.format_exception(exception.__class__, exception, None), sep='', end='', file=sys.stderr, flush=True) assert assert_data['num_calls'] == 1, "Lock.migrate_lock() must be called exactly once." assert exception is None, "Lock.migrate_lock() may not raise an exception." assert_data_before = assert_data['before'] assert assert_data_before['old_id_alive'], "old_id must be alive (=must not be stale) when calling Lock.migrate_lock()." assert assert_data_before['new_id_alive'], "new_id must be alive (=must not be stale) when calling Lock.migrate_lock()." assert_data_after = assert_data['after'] assert assert_data_after['old_id_alive'], "old_id must be alive (=must not be stale) when Lock.migrate_lock() has returned." assert assert_data_after['new_id_alive'], "new_id must be alive (=must not be stale) when Lock.migrate_lock() has returned." finally: # Undecorate borg.locking.Lock.migrate_lock = borg.locking.Lock.migrate_lock.__wrapped__ def verify_aes_counter_uniqueness(self, method): seen = set() # Chunks already seen used = set() # counter values already used def verify_uniqueness(): with Repository(self.repository_path) as repository: for id, _ in repository.open_index(repository.get_transaction_id()).iteritems(): data = repository.get(id) hash = sha256(data).digest() if hash not in seen: seen.add(hash) num_blocks = num_cipher_blocks(len(data) - 41) nonce = bytes_to_long(data[33:41]) for counter in range(nonce, nonce + num_blocks): self.assert_not_in(counter, used) used.add(counter) self.create_test_files() os.environ['BORG_PASSPHRASE'] = 'passphrase' self.cmd('init', '--encryption=' + method, self.repository_location) verify_uniqueness() self.cmd('create', self.repository_location + '::test', 'input') verify_uniqueness() self.cmd('create', self.repository_location + '::test.2', 'input') verify_uniqueness() self.cmd('delete', self.repository_location + '::test.2') verify_uniqueness() def test_aes_counter_uniqueness_keyfile(self): self.verify_aes_counter_uniqueness('keyfile') def
(self): self.verify_aes_counter_uniqueness('repokey') def test_debug_dump_archive_items(self): self.create_test_files() self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') with changedir('output'): output = self.cmd('debug', 'dump-archive-items', self.repository_location + '::test') output_dir = sorted(os.listdir('output')) assert len(output_dir) > 0 and output_dir[0].startswith('000000_') assert 'Done.' in output def test_debug_dump_repo_objs(self): self.create_test_files() self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') with changedir('output'): output = self.cmd('debug', 'dump-repo-objs', self.repository_location) output_dir = sorted(os.listdir('output')) assert len(output_dir) > 0 and output_dir[0].startswith('00000000_') assert 'Done.' in output def test_debug_put_get_delete_obj(self): self.cmd('init', '--encryption=repokey', self.repository_location) data = b'some data' hexkey = sha256(data).hexdigest() self.create_regular_file('file', contents=data) output = self.cmd('debug', 'put-obj', self.repository_location, 'input/file') assert hexkey in output output = self.cmd('debug', 'get-obj', self.repository_location, hexkey, 'output/file') assert hexkey in output with open('output/file', 'rb') as f: data_read = f.read() assert data == data_read output = self.cmd('debug', 'delete-obj', self.repository_location, hexkey) assert "deleted" in output output = self.cmd('debug', 'delete-obj', self.repository_location, hexkey) assert "not found" in output output = self.cmd('debug', 'delete-obj', self.repository_location, 'invalid') assert "is invalid" in output def test_init_interrupt(self): def raise_eof(*args): raise EOFError with patch.object(KeyfileKeyBase, 'create', raise_eof): self.cmd('init', '--encryption=repokey', self.repository_location, exit_code=1) assert not os.path.exists(self.repository_location) def test_init_requires_encryption_option(self): self.cmd('init', self.repository_location, exit_code=2) def test_init_nested_repositories(self): self.cmd('init', '--encryption=repokey', self.repository_location) if self.FORK_DEFAULT: self.cmd('init', '--encryption=repokey', self.repository_location + '/nested', exit_code=2) else: with pytest.raises(Repository.AlreadyExists): self.cmd('init', '--encryption=repokey', self.repository_location + '/nested') def check_cache(self): # First run a regular borg check self.cmd('check', self.repository_location) # Then check that the cache on disk matches exactly what's in the repo. with self.open_repository() as repository: manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK) with Cache(repository, key, manifest, sync=False) as cache: original_chunks = cache.chunks Cache.destroy(repository) with Cache(repository, key, manifest) as cache: correct_chunks = cache.chunks assert original_chunks is not correct_chunks seen = set() for id, (refcount, size, csize) in correct_chunks.iteritems(): o_refcount, o_size, o_csize = original_chunks[id] assert refcount == o_refcount assert size == o_size assert csize == o_csize seen.add(id) for id, (refcount, size, csize) in original_chunks.iteritems(): assert id in seen def test_check_cache(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') with self.open_repository() as repository: manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK) with Cache(repository, key, manifest, sync=False) as cache: cache.begin_txn() cache.chunks.incref(list(cache.chunks.iteritems())[0][0]) cache.commit() with pytest.raises(AssertionError): self.check_cache() def test_recreate_target_rc(self): self.cmd('init', '--encryption=repokey', self.repository_location) output = self.cmd('recreate', self.repository_location, '--target=asdf', exit_code=2) assert 'Need to specify single archive' in output def test_recreate_target(self): self.create_test_files() self.cmd('init', '--encryption=repokey', self.repository_location) self.check_cache() archive = self.repository_location + '::test0' self.cmd('create', archive, 'input') self.check_cache() original_archive = self.cmd('list', self.repository_location) self.cmd('recreate', archive, 'input/dir2', '-e', 'input/dir2/file3', '--target=new-archive') self.check_cache() archives = self.cmd('list', self.repository_location) assert original_archive in archives assert 'new-archive' in archives archive = self.repository_location + '::new-archive' listing = self.cmd('list', '--short', archive) assert 'file1' not in listing assert 'dir2/file2' in listing assert 'dir2/file3' not in listing def test_recreate_basic(self): self.create_test_files() self.create_regular_file('dir2/file3', size=1024 * 80) self.cmd('init', '--encryption=repokey', self.repository_location) archive = self.repository_location + '::test0' self.cmd('create', archive, 'input') self.cmd('recreate', archive, 'input/dir2', '-e', 'input/dir2/file3') self.check_cache() listing = self.cmd('list', '--short', archive) assert 'file1' not in listing assert 'dir2/file2' in listing assert 'dir2/file3' not in listing @pytest.mark.skipif(not are_hardlinks_supported(), reason='hardlinks not supported') def test_recreate_subtree_hardlinks(self): # This is essentially the same problem set as in test_extract_hardlinks self._extract_hardlinks_setup() self.cmd('create', self.repository_location + '::test2', 'input') self.cmd('recreate', self.repository_location + '::test', 'input/dir1') self.check_cache() with changedir('output'): self.cmd('extract', self.repository_location + '::test') assert os.stat('input/dir1/hardlink').st_nlink == 2 assert os.stat('input/dir1/subdir/hardlink').st_nlink == 2 assert os.stat('input/dir1/aaaa').st_nlink == 2 assert os.stat('input/dir1/source2').st_nlink == 2 with changedir('output'): self.cmd('extract', self.repository_location + '::test2') assert os.stat('input/dir1/hardlink').st_nlink == 4 def test_recreate_rechunkify(self): with open(os.path.join(self.input_path, 'large_file'), 'wb') as fd: fd.write(b'a' * 280) fd.write(b'b' * 280) self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', '--chunker-params', '7,9,8,128', self.repository_location + '::test1', 'input') self.cmd('create', self.repository_location + '::test2', 'input', '--files-cache=disabled') list = self.cmd('list', self.repository_location + '::test1', 'input/large_file', '--format', '{num_chunks} {unique_chunks}') num_chunks, unique_chunks = map(int, list.split(' ')) # test1 and test2 do not deduplicate assert num_chunks == unique_chunks self.cmd('recreate', self.repository_location, '--chunker-params', 'default') self.check_cache() # test1 and test2 do deduplicate after recreate assert int(self.cmd('list', self.repository_location + '::test1', 'input/large_file', '--format={size}')) assert not int(self.cmd('list', self.repository_location + '::test1', 'input/large_file', '--format', '{unique_chunks}')) def test_recreate_recompress(self): self.create_regular_file('compressible', size=10000) self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input', '-C', 'none') file_list = self.cmd('list', self.repository_location + '::test', 'input/compressible', '--format', '{size} {csize} {sha256}') size, csize, sha256_before = file_list.split(' ') assert int(csize) >= int(size) # >= due to metadata overhead self.cmd('recreate', self.repository_location, '-C', 'lz4', '--recompress') self.check_cache() file_list = self.cmd('list', self.repository_location + '::test', 'input/compressible', '--format', '{size} {csize} {sha256}') size, csize, sha256_after = file_list.split(' ') assert int(csize) < int(size) assert sha256_before == sha256_after def test_recreate_timestamp(self): local_timezone = datetime.now(timezone(timedelta(0))).astimezone().tzinfo self.create_test_files() self.cmd('init', '--encryption=repokey', self.repository_location) archive = self.repository_location + '::test0' self.cmd('create', archive, 'input') self.cmd('recreate', '--timestamp', "1970-01-02T00:00:00", '--comment', 'test', archive) info = self.cmd('info', archive).splitlines() dtime = datetime(1970, 1, 2) + local_timezone.utcoffset(None) s_time = dtime.strftime("%Y-%m-%d") assert any([re.search(r'Time \(start\).+ %s' % s_time, item) for item in info]) assert any([re.search(r'Time \(end\).+ %s' % s_time, item) for item in info]) def test_recreate_dry_run(self): self.create_regular_file('compressible', size=10000) self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') archives_before = self.cmd('list', self.repository_location + '::test') self.cmd('recreate', self.repository_location, '-n', '-e', 'input/compressible') self.check_cache() archives_after = self.cmd('list', self.repository_location + '::test') assert archives_after == archives_before def test_recreate_skips_nothing_to_do(self): self.create_regular_file('file1', size=1024 * 80) self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') info_before = self.cmd('info', self.repository_location + '::test') self.cmd('recreate', self.repository_location, '--chunker-params', 'default') self.check_cache() info_after = self.cmd('info', self.repository_location + '::test') assert info_before == info_after # includes archive ID def test_with_lock(self): self.cmd('init', '--encryption=repokey', self.repository_location) lock_path = os.path.join(self.repository_path, 'lock.exclusive') cmd = 'python3', '-c', 'import os, sys; sys.exit(42 if os.path.exists("%s") else 23)' % lock_path self.cmd('with-lock', self.repository_location, *cmd, fork=True, exit_code=42) def test_recreate_list_output(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file('file1', size=0) self.create_regular_file('file2', size=0) self.create_regular_file('file3', size=0) self.create_regular_file('file4', size=0) self.create_regular_file('file5', size=0) self.cmd('create', self.repository_location + '::test', 'input') output = self.cmd('recreate', '--list', '--info', self.repository_location + '::test', '-e', 'input/file2') self.check_cache() self.assert_in("input/file1", output) self.assert_in("x input/file2", output) output = self.cmd('recreate', '--list', self.repository_location + '::test', '-e', 'input/file3') self.check_cache() self.assert_in("input/file1", output) self.assert_in("x input/file3", output) output = self.cmd('recreate', self.repository_location + '::test', '-e', 'input/file4') self.check_cache() self.assert_not_in("input/file1", output) self.assert_not_in("x input/file4", output) output = self.cmd('recreate', '--info', self.repository_location + '::test', '-e', 'input/file5') self.check_cache() self.assert_not_in("input/file1", output) self.assert_not_in("x input/file5", output) def test_bad_filters(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') self.cmd('delete', '--first', '1', '--last', '1', self.repository_location, fork=True, exit_code=2) def test_key_export_keyfile(self): export_file = self.output_path + '/exported' self.cmd('init', self.repository_location, '--encryption', 'keyfile') repo_id = self._extract_repository_id(self.repository_path) self.cmd('key', 'export', self.repository_location, export_file) with open(export_file, 'r') as fd: export_contents = fd.read() assert export_contents.startswith('BORG_KEY ' + bin_to_hex(repo_id) + '\n') key_file = self.keys_path + '/' + os.listdir(self.keys_path)[0] with open(key_file, 'r') as fd: key_contents = fd.read() assert key_contents == export_contents os.unlink(key_file) self.cmd('key', 'import', self.repository_location, export_file) with open(key_file, 'r') as fd: key_contents2 = fd.read() assert key_contents2 == key_contents def test_key_import_keyfile_with_borg_key_file(self): self.cmd('init', self.repository_location, '--encryption', 'keyfile') exported_key_file = os.path.join(self.output_path, 'exported') self.cmd('key', 'export', self.repository_location, exported_key_file) key_file = os.path.join(self.keys_path, os.listdir(self.keys_path)[0]) with open(key_file, 'r') as fd: key_contents = fd.read() os.unlink(key_file) imported_key_file = os.path.join(self.output_path, 'imported') with environment_variable(BORG_KEY_FILE=imported_key_file): self.cmd('key', 'import', self.repository_location, exported_key_file) assert not os.path.isfile(key_file), '"borg key import" should respect BORG_KEY_FILE' with open(imported_key_file, 'r') as fd: imported_key_contents = fd.read() assert imported_key_contents == key_contents def test_key_export_repokey(self): export_file = self.output_path + '/exported' self.cmd('init', self.repository_location, '--encryption', 'repokey') repo_id = self._extract_repository_id(self.repository_path) self.cmd('key', 'export', self.repository_location, export_file) with open(export_file, 'r') as fd: export_contents = fd.read() assert export_contents.startswith('BORG_KEY ' + bin_to_hex(repo_id) + '\n') with Repository(self.repository_path) as repository: repo_key = RepoKey(repository) repo_key.load(None, Passphrase.env_passphrase()) backup_key = KeyfileKey(key.TestKey.MockRepository()) backup_key.load(export_file, Passphrase.env_passphrase()) assert repo_key.enc_key == backup_key.enc_key with Repository(self.repository_path) as repository: repository.save_key(b'') self.cmd('key', 'import', self.repository_location, export_file) with Repository(self.repository_path) as repository: repo_key2 = RepoKey(repository) repo_key2.load(None, Passphrase.env_passphrase()) assert repo_key2.enc_key == repo_key2.enc_key def test_key_export_qr(self): export_file = self.output_path + '/exported.html' self.cmd('init', self.repository_location, '--encryption', 'repokey') repo_id = self._extract_repository_id(self.repository_path) self.cmd('key', 'export', '--qr-html', self.repository_location, export_file) with open(export_file, 'r', encoding='utf-8') as fd: export_contents = fd.read() assert bin_to_hex(repo_id) in export_contents assert export_contents.startswith('<!doctype html>') assert export_contents.endswith('</html>\n') def test_key_export_directory(self): export_directory = self.output_path + '/exported' os.mkdir(export_directory) self.cmd('init', self.repository_location, '--encryption', 'repokey') self.cmd('key', 'export', self.repository_location, export_directory, exit_code=EXIT_ERROR) def test_key_import_errors(self): export_file = self.output_path + '/exported' self.cmd('init', self.repository_location, '--encryption', 'keyfile') self.cmd('key', 'import', self.repository_location, export_file, exit_code=EXIT_ERROR) with open(export_file, 'w') as fd: fd.write('something not a key\n') if self.FORK_DEFAULT: self.cmd('key', 'import', self.repository_location, export_file, exit_code=2) else: with pytest.raises(NotABorgKeyFile): self.cmd('key', 'import', self.repository_location, export_file) with open(export_file, 'w') as fd: fd.write('BORG_KEY a0a0a0\n') if self.FORK_DEFAULT: self.cmd('key', 'import', self.repository_location, export_file, exit_code=2) else: with pytest.raises(RepoIdMismatch): self.cmd('key', 'import', self.repository_location, export_file) def test_key_export_paperkey(self): repo_id = 'e294423506da4e1ea76e8dcdf1a3919624ae3ae496fddf905610c351d3f09239' export_file = self.output_path + '/exported' self.cmd('init', self.repository_location, '--encryption', 'keyfile') self._set_repository_id(self.repository_path, unhexlify(repo_id)) key_file = self.keys_path + '/' + os.listdir(self.keys_path)[0] with open(key_file, 'w') as fd: fd.write(KeyfileKey.FILE_ID + ' ' + repo_id + '\n') fd.write(b2a_base64(b'abcdefghijklmnopqrstu').decode()) self.cmd('key', 'export', '--paper', self.repository_location, export_file) with open(export_file, 'r') as fd: export_contents = fd.read() assert export_contents == """To restore key use borg key import --paper /path/to/repo BORG PAPER KEY v1 id: 2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41 - 02 1: 616263 646566 676869 6a6b6c 6d6e6f 707172 - 6d 2: 737475 - 88 """ def test_key_import_paperkey(self): repo_id = 'e294423506da4e1ea76e8dcdf1a3919624ae3ae496fddf905610c351d3f09239' self.cmd('init', self.repository_location, '--encryption', 'keyfile') self._set_repository_id(self.repository_path, unhexlify(repo_id)) key_file = self.keys_path + '/' + os.listdir(self.keys_path)[0] with open(key_file, 'w') as fd: fd.write(KeyfileKey.FILE_ID + ' ' + repo_id + '\n') fd.write(b2a_base64(b'abcdefghijklmnopqrstu').decode()) typed_input = ( b'2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41 02\n' # Forgot to type "-" b'2 / e29442 3506da 4e1ea7 25f62a 5a3d41 - 02\n' # Forgot to type second "/" b'2 / e29442 3506da 4e1ea7 / 25f62a 5a3d42 - 02\n' # Typo (..42 not ..41) b'2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41 - 02\n' # Correct! Congratulations b'616263 646566 676869 6a6b6c 6d6e6f 707172 - 6d\n' b'\n\n' # Abort [yN] => N b'737475 88\n' # missing "-" b'73747i - 88\n' # typo b'73747 - 88\n' # missing nibble b'73 74 75 - 89\n' # line checksum mismatch b'00a1 - 88\n' # line hash collision - overall hash mismatch, have to start over b'2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41 - 02\n' b'616263 646566 676869 6a6b6c 6d6e6f 707172 - 6d\n' b'73 74 75 - 88\n' ) # In case that this has to change, here is a quick way to find a colliding line hash: # # from hashlib import sha256 # hash_fn = lambda x: sha256(b'\x00\x02' + x).hexdigest()[:2] # for i in range(1000): # if hash_fn(i.to_bytes(2, byteorder='big')) == '88': # 88 = line hash # print(i.to_bytes(2, 'big')) # break self.cmd('key', 'import', '--paper', self.repository_location, input=typed_input) # Test abort paths typed_input = b'\ny\n' self.cmd('key', 'import', '--paper', self.repository_location, input=typed_input) typed_input = b'2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41 - 02\n\ny\n' self.cmd('key', 'import', '--paper', self.repository_location, input=typed_input) def test_debug_dump_manifest(self): self.create_regular_file('file1', size=1024 * 80) self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') dump_file = self.output_path + '/dump' output = self.cmd('debug', 'dump-manifest', self.repository_location, dump_file) assert output == "" with open(dump_file, "r") as f: result = json.load(f) assert 'archives' in result assert 'config' in result assert 'item_keys' in result assert 'timestamp' in result assert 'version' in result def test_debug_dump_archive(self): self.create_regular_file('file1', size=1024 * 80) self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') dump_file = self.output_path + '/dump' output = self.cmd('debug', 'dump-archive', self.repository_location + "::test", dump_file) assert output == "" with open(dump_file, "r") as f: result = json.load(f) assert '_name' in result assert '_manifest_entry' in result assert '_meta' in result assert '_items' in result def test_debug_refcount_obj(self): self.cmd('init', '--encryption=repokey', self.repository_location) output = self.cmd('debug', 'refcount-obj', self.repository_location, '0' * 64).strip() assert output == 'object 0000000000000000000000000000000000000000000000000000000000000000 not found [info from chunks cache].' create_json = json.loads(self.cmd('create', '--json', self.repository_location + '::test', 'input')) archive_id = create_json['archive']['id'] output = self.cmd('debug', 'refcount-obj', self.repository_location, archive_id).strip() assert output == 'object ' + archive_id + ' has 1 referrers [info from chunks cache].' # Invalid IDs do not abort or return an error output = self.cmd('debug', 'refcount-obj', self.repository_location, '124', 'xyza').strip() assert output == 'object id 124 is invalid.\nobject id xyza is invalid.' def test_debug_info(self): output = self.cmd('debug', 'info') assert 'CRC implementation' in output assert 'Python' in output def test_benchmark_crud(self): self.cmd('init', '--encryption=repokey', self.repository_location) with environment_variable(_BORG_BENCHMARK_CRUD_TEST='YES'): self.cmd('benchmark', 'crud', self.repository_location, self.input_path) def test_config(self): self.create_test_files() os.unlink('input/flagfile') self.cmd('init', '--encryption=repokey', self.repository_location) output = self.cmd('config', '--list', self.repository_location) self.assert_in('[repository]', output) self.assert_in('version', output) self.assert_in('segments_per_dir', output) self.assert_in('storage_quota', output) self.assert_in('append_only', output) self.assert_in('additional_free_space', output) self.assert_in('id', output) self.assert_not_in('last_segment_checked', output) output = self.cmd('config', self.repository_location, 'last_segment_checked', exit_code=1) self.assert_in('No option ', output) self.cmd('config', self.repository_location, 'last_segment_checked', '123') output = self.cmd('config', self.repository_location, 'last_segment_checked') assert output == '123' + '\n' output = self.cmd('config', '--list', self.repository_location) self.assert_in('last_segment_checked', output) self.cmd('config', '--delete', self.repository_location, 'last_segment_checked') for cfg_key, cfg_value in [ ('additional_free_space', '2G'), ('repository.append_only', '1'), ]: output = self.cmd('config', self.repository_location, cfg_key) assert output == '0' + '\n' self.cmd('config', self.repository_location, cfg_key, cfg_value) output = self.cmd('config', self.repository_location, cfg_key) assert output == cfg_value + '\n' self.cmd('config', '--delete', self.repository_location, cfg_key) self.cmd('config', self.repository_location, cfg_key, exit_code=1) self.cmd('config', '--list', '--delete', self.repository_location, exit_code=2) self.cmd('config', self.repository_location, exit_code=2) self.cmd('config', self.repository_location, 'invalid-option', exit_code=1) requires_gnutar = pytest.mark.skipif(not have_gnutar(), reason='GNU tar must be installed for this test.') requires_gzip = pytest.mark.skipif(not shutil.which('gzip'), reason='gzip must be installed for this test.') @requires_gnutar def test_export_tar(self): self.create_test_files() os.unlink('input/flagfile') self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') self.cmd('export-tar', self.repository_location + '::test', 'simple.tar', '--progress') with changedir('output'): # This probably assumes GNU tar. Note -p switch to extract permissions regardless of umask. subprocess.check_call(['tar', 'xpf', '../simple.tar', '--warning=no-timestamp']) self.assert_dirs_equal('input', 'output/input', ignore_flags=True, ignore_xattrs=True, ignore_ns=True) @requires_gnutar @requires_gzip def test_export_tar_gz(self): if not shutil.which('gzip'): pytest.skip('gzip is not installed') self.create_test_files() os.unlink('input/flagfile') self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') list = self.cmd('export-tar', self.repository_location + '::test', 'simple.tar.gz', '--list') assert 'input/file1\n' in list assert 'input/dir2\n' in list with changedir('output'): subprocess.check_call(['tar', 'xpf', '../simple.tar.gz', '--warning=no-timestamp']) self.assert_dirs_equal('input', 'output/input', ignore_flags=True, ignore_xattrs=True, ignore_ns=True) @requires_gnutar def test_export_tar_strip_components(self): if not shutil.which('gzip'): pytest.skip('gzip is not installed') self.create_test_files() os.unlink('input/flagfile') self.cmd('init', '--encryption=repokey', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') list = self.cmd('export-tar', self.repository_location + '::test', 'simple.tar', '--strip-components=1', '--list') # --list's path are those before processing with --strip-components assert 'input/file1\n' in list assert 'input/dir2\n' in list with changedir('output'): subprocess.check_call(['tar', 'xpf', '../simple.tar', '--warning=no-timestamp']) self.assert_dirs_equal('input', 'output/', ignore_flags=True, ignore_xattrs=True, ignore_ns=True) @requires_hardlinks @requires_gnutar def test_export_tar_strip_components_links(self): self._extract_hardlinks_setup() self.cmd('export-tar', self.repository_location + '::test', 'output.tar', '--strip-components=2') with changedir('output'): subprocess.check_call(['tar', 'xpf', '../output.tar', '--warning=no-timestamp']) assert os.stat('hardlink').st_nlink == 2 assert os.stat('subdir/hardlink').st_nlink == 2 assert os.stat('aaaa').st_nlink == 2 assert os.stat('source2').st_nlink == 2 @requires_hardlinks @requires_gnutar def test_extract_hardlinks_tar(self): self._extract_hardlinks_setup() self.cmd('export-tar', self.repository_location + '::test', 'output.tar', 'input/dir1') with changedir('output'): subprocess.check_call(['tar', 'xpf', '../output.tar', '--warning=no-timestamp']) assert os.stat('input/dir1/hardlink').st_nlink == 2 assert os.stat('input/dir1/subdir/hardlink').st_nlink == 2 assert os.stat('input/dir1/aaaa').st_nlink == 2 assert os.stat('input/dir1/source2').st_nlink == 2 def test_detect_attic_repo(self): path = make_attic_repo(self.repository_path) cmds = [ ['create', path + '::test', self.tmpdir], ['extract', path + '::test'], ['check', path], ['rename', path + '::test', 'newname'], ['list', path], ['delete', path], ['prune', path], ['info', path + '::test'], ['key', 'export', path, 'exported'], ['key', 'import', path, 'import'], ['key', 'change-passphrase', path], ['break-lock', path], ] for args in cmds: output = self.cmd(*args, fork=True, exit_code=2) assert 'Attic repository detected.' in output @unittest.skipUnless('binary' in BORG_EXES, 'no borg.exe available') class ArchiverTestCaseBinary(ArchiverTestCase): EXE = 'borg.exe' FORK_DEFAULT = True @unittest.skip('does not raise Exception, but sets rc==2') def test_init_parent_dirs(self): pass @unittest.skip('patches objects') def test_init_interrupt(self): pass @unittest.skip('patches objects') def test_extract_capabilities(self): pass @unittest.skip('patches objects') def test_extract_xattrs_errors(self): pass @unittest.skip('test_basic_functionality seems incompatible with fakeroot and/or the binary.') def test_basic_functionality(self): pass @unittest.skip('test_overwrite seems incompatible with fakeroot and/or the binary.') def test_overwrite(self): pass def test_fuse(self): if fakeroot_detected(): unittest.skip('test_fuse with the binary is not compatible with fakeroot') else: super().test_fuse() class ArchiverCheckTestCase(ArchiverTestCaseBase): def setUp(self): super().setUp() with patch.object(ChunkBuffer, 'BUFFER_SIZE', 10): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_src_archive('archive1') self.create_src_archive('archive2') def test_check_usage(self): output = self.cmd('check', '-v', '--progress', self.repository_location, exit_code=0) self.assert_in('Starting repository check', output) self.assert_in('Starting archive consistency check', output) self.assert_in('Checking segments', output) # reset logging to new process default to avoid need for fork=True on next check logging.getLogger('borg.output.progress').setLevel(logging.NOTSET) output = self.cmd('check', '-v', '--repository-only', self.repository_location, exit_code=0) self.assert_in('Starting repository check', output) self.assert_not_in('Starting archive consistency check', output) self.assert_not_in('Checking segments', output) output = self.cmd('check', '-v', '--archives-only', self.repository_location, exit_code=0) self.assert_not_in('Starting repository check', output) self.assert_in('Starting archive consistency check', output) output = self.cmd('check', '-v', '--archives-only', '--prefix=archive2', self.repository_location, exit_code=0) self.assert_not_in('archive1', output) output = self.cmd('check', '-v', '--archives-only', '--first=1', self.repository_location, exit_code=0) self.assert_in('archive1', output) self.assert_not_in('archive2', output) output = self.cmd('check', '-v', '--archives-only', '--last=1', self.repository_location, exit_code=0) self.assert_not_in('archive1', output) self.assert_in('archive2', output) def test_missing_file_chunk(self): archive, repository = self.open_archive('archive1') with repository: for item in archive.iter_items(): if item.path.endswith('testsuite/archiver.py'): valid_chunks = item.chunks killed_chunk = valid_chunks[-1] repository.delete(killed_chunk.id) break else: self.fail('should not happen') repository.commit(compact=False) self.cmd('check', self.repository_location, exit_code=1) output = self.cmd('check', '--repair', self.repository_location, exit_code=0) self.assert_in('New missing file chunk detected', output) self.cmd('check', self.repository_location, exit_code=0) output = self.cmd('list', '--format={health}#{path}{LF}', self.repository_location + '::archive1', exit_code=0) self.assert_in('broken#', output) # check that the file in the old archives has now a different chunk list without the killed chunk for archive_name in ('archive1', 'archive2'): archive, repository = self.open_archive(archive_name) with repository: for item in archive.iter_items(): if item.path.endswith('testsuite/archiver.py'): self.assert_not_equal(valid_chunks, item.chunks) self.assert_not_in(killed_chunk, item.chunks) break else: self.fail('should not happen') # do a fresh backup (that will include the killed chunk) with patch.object(ChunkBuffer, 'BUFFER_SIZE', 10): self.create_src_archive('archive3') # check should be able to heal the file now: output = self.cmd('check', '-v', '--repair', self.repository_location, exit_code=0) self.assert_in('Healed previously missing file chunk', output) self.assert_in('testsuite/archiver.py: Completely healed previously damaged file!', output) # check that the file in the old archives has the correct chunks again for archive_name in ('archive1', 'archive2'): archive, repository = self.open_archive(archive_name) with repository: for item in archive.iter_items(): if item.path.endswith('testsuite/archiver.py'): self.assert_equal(valid_chunks, item.chunks) break else: self.fail('should not happen') # list is also all-healthy again output = self.cmd('list', '--format={health}#{path}{LF}', self.repository_location + '::archive1', exit_code=0) self.assert_not_in('broken#', output) def test_missing_archive_item_chunk(self): archive, repository = self.open_archive('archive1') with repository: repository.delete(archive.metadata.items[0]) repository.commit(compact=False) self.cmd('check', self.repository_location, exit_code=1) self.cmd('check', '--repair', self.repository_location, exit_code=0) self.cmd('check', self.repository_location, exit_code=0) def test_missing_archive_metadata(self): archive, repository = self.open_archive('archive1') with repository: repository.delete(archive.id) repository.commit(compact=False) self.cmd('check', self.repository_location, exit_code=1) self.cmd('check', '--repair', self.repository_location, exit_code=0) self.cmd('check', self.repository_location, exit_code=0) def test_missing_manifest(self): archive, repository = self.open_archive('archive1') with repository: repository.delete(Manifest.MANIFEST_ID) repository.commit(compact=False) self.cmd('check', self.repository_location, exit_code=1) output = self.cmd('check', '-v', '--repair', self.repository_location, exit_code=0) self.assert_in('archive1', output) self.assert_in('archive2', output) self.cmd('check', self.repository_location, exit_code=0) def test_corrupted_manifest(self): archive, repository = self.open_archive('archive1') with repository: manifest = repository.get(Manifest.MANIFEST_ID) corrupted_manifest = manifest + b'corrupted!' repository.put(Manifest.MANIFEST_ID, corrupted_manifest) repository.commit(compact=False) self.cmd('check', self.repository_location, exit_code=1) output = self.cmd('check', '-v', '--repair', self.repository_location, exit_code=0) self.assert_in('archive1', output) self.assert_in('archive2', output) self.cmd('check', self.repository_location, exit_code=0) def test_manifest_rebuild_corrupted_chunk(self): archive, repository = self.open_archive('archive1') with repository: manifest = repository.get(Manifest.MANIFEST_ID) corrupted_manifest = manifest + b'corrupted!' repository.put(Manifest.MANIFEST_ID, corrupted_manifest) chunk = repository.get(archive.id) corrupted_chunk = chunk + b'corrupted!' repository.put(archive.id, corrupted_chunk) repository.commit(compact=False) self.cmd('check', self.repository_location, exit_code=1) output = self.cmd('check', '-v', '--repair', self.repository_location, exit_code=0) self.assert_in('archive2', output) self.cmd('check', self.repository_location, exit_code=0) def test_manifest_rebuild_duplicate_archive(self): archive, repository = self.open_archive('archive1') key = archive.key with repository: manifest = repository.get(Manifest.MANIFEST_ID) corrupted_manifest = manifest + b'corrupted!' repository.put(Manifest.MANIFEST_ID, corrupted_manifest) archive = msgpack.packb({ 'cmdline': [], 'items': [], 'hostname': 'foo', 'username': 'bar', 'name': 'archive1', 'time': '2016-12-15T18:49:51.849711', 'version': 1, }) archive_id = key.id_hash(archive) repository.put(archive_id, key.encrypt(archive)) repository.commit(compact=False) self.cmd('check', self.repository_location, exit_code=1) self.cmd('check', '--repair', self.repository_location, exit_code=0) output = self.cmd('list', self.repository_location) self.assert_in('archive1', output) self.assert_in('archive1.1', output) self.assert_in('archive2', output) def test_extra_chunks(self): self.cmd('check', self.repository_location, exit_code=0) with Repository(self.repository_location, exclusive=True) as repository: repository.put(b'01234567890123456789012345678901', b'xxxx') repository.commit(compact=False) self.cmd('check', self.repository_location, exit_code=1) self.cmd('check', self.repository_location, exit_code=1) self.cmd('check', '--repair', self.repository_location, exit_code=0) self.cmd('check', self.repository_location, exit_code=0) self.cmd('extract', '--dry-run', self.repository_location + '::archive1', exit_code=0) def _test_verify_data(self, *init_args): shutil.rmtree(self.repository_path) self.cmd('init', self.repository_location, *init_args) self.create_src_archive('archive1') archive, repository = self.open_archive('archive1') with repository: for item in archive.iter_items(): if item.path.endswith('testsuite/archiver.py'): chunk = item.chunks[-1] data = repository.get(chunk.id) + b'1234' repository.put(chunk.id, data) break repository.commit(compact=False) self.cmd('check', self.repository_location, exit_code=0) output = self.cmd('check', '--verify-data', self.repository_location, exit_code=1) assert bin_to_hex(chunk.id) + ', integrity error' in output # repair (heal is tested in another test) output = self.cmd('check', '--repair', '--verify-data', self.repository_location, exit_code=0) assert bin_to_hex(chunk.id) + ', integrity error' in output assert 'testsuite/archiver.py: New missing file chunk detected' in output def test_verify_data(self): self._test_verify_data('--encryption', 'repokey') def test_verify_data_unencrypted(self): self._test_verify_data('--encryption', 'none') def test_empty_repository(self): with Repository(self.repository_location, exclusive=True) as repository: for id_ in repository.list(): repository.delete(id_) repository.commit(compact=False) self.cmd('check', self.repository_location, exit_code=1) def test_attic013_acl_bug(self): # Attic up to release 0.13 contained a bug where every item unintentionally received # a b'acl'=None key-value pair. # This bug can still live on in Borg repositories (through borg upgrade). class Attic013Item: def as_dict(self): return { # These are required b'path': '1234', b'mtime': 0, b'mode': 0, b'user': b'0', b'group': b'0', b'uid': 0, b'gid': 0, # acl is the offending key. b'acl': None, } archive, repository = self.open_archive('archive1') with repository: manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK) with Cache(repository, key, manifest) as cache: archive = Archive(repository, key, manifest, '0.13', cache=cache, create=True) archive.items_buffer.add(Attic013Item()) archive.save() self.cmd('check', self.repository_location, exit_code=0) self.cmd('list', self.repository_location + '::0.13', exit_code=0) class ManifestAuthenticationTest(ArchiverTestCaseBase): def spoof_manifest(self, repository): with repository: _, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK) repository.put(Manifest.MANIFEST_ID, key.encrypt(msgpack.packb({ 'version': 1, 'archives': {}, 'config': {}, 'timestamp': (datetime.utcnow() + timedelta(days=1)).strftime(ISO_FORMAT), }))) repository.commit(compact=False) def test_fresh_init_tam_required(self): self.cmd('init', '--encryption=repokey', self.repository_location) repository = Repository(self.repository_path, exclusive=True) with repository: manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK) repository.put(Manifest.MANIFEST_ID, key.encrypt(msgpack.packb({ 'version': 1, 'archives': {}, 'timestamp': (datetime.utcnow() + timedelta(days=1)).strftime(ISO_FORMAT), }))) repository.commit(compact=False) with pytest.raises(TAMRequiredError): self.cmd('list', self.repository_location) def test_not_required(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_src_archive('archive1234') repository = Repository(self.repository_path, exclusive=True) with repository: shutil.rmtree(get_security_dir(bin_to_hex(repository.id))) _, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK) key.tam_required = False key.change_passphrase(key._passphrase) manifest = msgpack.unpackb(key.decrypt(None, repository.get(Manifest.MANIFEST_ID))) del manifest[b'tam'] repository.put(Manifest.MANIFEST_ID, key.encrypt(msgpack.packb(manifest))) repository.commit(compact=False) output = self.cmd('list', '--debug', self.repository_location) assert 'archive1234' in output assert 'TAM not found and not required' in output # Run upgrade self.cmd('upgrade', '--tam', self.repository_location) # Manifest must be authenticated now output = self.cmd('list', '--debug', self.repository_location) assert 'archive1234' in output assert 'TAM-verified manifest' in output # Try to spoof / modify pre-1.0.9 self.spoof_manifest(repository) # Fails with pytest.raises(TAMRequiredError): self.cmd('list', self.repository_location) # Force upgrade self.cmd('upgrade', '--tam', '--force', self.repository_location) self.cmd('list', self.repository_location) def test_disable(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_src_archive('archive1234') self.cmd('upgrade', '--disable-tam', self.repository_location) repository = Repository(self.repository_path, exclusive=True) self.spoof_manifest(repository) assert not self.cmd('list', self.repository_location) def test_disable2(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_src_archive('archive1234') repository = Repository(self.repository_path, exclusive=True) self.spoof_manifest(repository) self.cmd('upgrade', '--disable-tam', self.repository_location) assert not self.cmd('list', self.repository_location) class RemoteArchiverTestCase(ArchiverTestCase): prefix = '__testsuite__:' def open_repository(self): return RemoteRepository(Location(self.repository_location)) def test_remote_repo_restrict_to_path(self): # restricted to repo directory itself: with patch.object(RemoteRepository, 'extra_test_args', ['--restrict-to-path', self.repository_path]): self.cmd('init', '--encryption=repokey', self.repository_location) # restricted to repo directory itself, fail for other directories with same prefix: with patch.object(RemoteRepository, 'extra_test_args', ['--restrict-to-path', self.repository_path]): with pytest.raises(PathNotAllowed): self.cmd('init', '--encryption=repokey', self.repository_location + '_0') # restricted to a completely different path: with patch.object(RemoteRepository, 'extra_test_args', ['--restrict-to-path', '/foo']): with pytest.raises(PathNotAllowed): self.cmd('init', '--encryption=repokey', self.repository_location + '_1') path_prefix = os.path.dirname(self.repository_path) # restrict to repo directory's parent directory: with patch.object(RemoteRepository, 'extra_test_args', ['--restrict-to-path', path_prefix]): self.cmd('init', '--encryption=repokey', self.repository_location + '_2') # restrict to repo directory's parent directory and another directory: with patch.object(RemoteRepository, 'extra_test_args', ['--restrict-to-path', '/foo', '--restrict-to-path', path_prefix]): self.cmd('init', '--encryption=repokey', self.repository_location + '_3') def test_remote_repo_restrict_to_repository(self): # restricted to repo directory itself: with patch.object(RemoteRepository, 'extra_test_args', ['--restrict-to-repository', self.repository_path]): self.cmd('init', '--encryption=repokey', self.repository_location) parent_path = os.path.join(self.repository_path, '..') with patch.object(RemoteRepository, 'extra_test_args', ['--restrict-to-repository', parent_path]): with pytest.raises(PathNotAllowed): self.cmd('init', '--encryption=repokey', self.repository_location) @unittest.skip('only works locally') def test_debug_put_get_delete_obj(self): pass @unittest.skip('only works locally') def test_config(self): pass @unittest.skip('only works locally') def test_migrate_lock_alive(self): pass def test_strip_components_doesnt_leak(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file('dir/file', contents=b"test file contents 1") self.create_regular_file('dir/file2', contents=b"test file contents 2") self.create_regular_file('skipped-file1', contents=b"test file contents 3") self.create_regular_file('skipped-file2', contents=b"test file contents 4") self.create_regular_file('skipped-file3', contents=b"test file contents 5") self.cmd('create', self.repository_location + '::test', 'input') marker = 'cached responses left in RemoteRepository' with changedir('output'): res = self.cmd('extract', "--debug", self.repository_location + '::test', '--strip-components', '3') self.assert_true(marker not in res) with self.assert_creates_file('file'): res = self.cmd('extract', "--debug", self.repository_location + '::test', '--strip-components', '2') self.assert_true(marker not in res) with self.assert_creates_file('dir/file'): res = self.cmd('extract', "--debug", self.repository_location + '::test', '--strip-components', '1') self.assert_true(marker not in res) with self.assert_creates_file('input/dir/file'): res = self.cmd('extract', "--debug", self.repository_location + '::test', '--strip-components', '0') self.assert_true(marker not in res) class ArchiverCorruptionTestCase(ArchiverTestCaseBase): def setUp(self): super().setUp() self.create_test_files() self.cmd('init', '--encryption=repokey', self.repository_location) self.cache_path = json.loads(self.cmd('info', self.repository_location, '--json'))['cache']['path'] def corrupt(self, file, amount=1): with open(file, 'r+b') as fd: fd.seek(-amount, io.SEEK_END) corrupted = bytes(255-c for c in fd.read(amount)) fd.seek(-amount, io.SEEK_END) fd.write(corrupted) def test_cache_chunks(self): self.corrupt(os.path.join(self.cache_path, 'chunks')) if self.FORK_DEFAULT: out = self.cmd('info', self.repository_location, exit_code=2) assert 'failed integrity check' in out else: with pytest.raises(FileIntegrityError): self.cmd('info', self.repository_location) def test_cache_files(self): self.cmd('create', self.repository_location + '::test', 'input') self.corrupt(os.path.join(self.cache_path, 'files')) out = self.cmd('create', self.repository_location + '::test1', 'input') # borg warns about the corrupt files cache, but then continues without files cache. assert 'files cache is corrupted' in out def test_chunks_archive(self): self.cmd('create', self.repository_location + '::test1', 'input') # Find ID of test1 so we can corrupt it later :) target_id = self.cmd('list', self.repository_location, '--format={id}{LF}').strip() self.cmd('create', self.repository_location + '::test2', 'input') # Force cache sync, creating archive chunks of test1 and test2 in chunks.archive.d self.cmd('delete', '--cache-only', self.repository_location) self.cmd('info', self.repository_location, '--json') chunks_archive = os.path.join(self.cache_path, 'chunks.archive.d') assert len(os.listdir(chunks_archive)) == 4 # two archives, one chunks cache and one .integrity file each self.corrupt(os.path.join(chunks_archive, target_id + '.compact')) # Trigger cache sync by changing the manifest ID in the cache config config_path = os.path.join(self.cache_path, 'config') config = ConfigParser(interpolation=None) config.read(config_path) config.set('cache', 'manifest', bin_to_hex(bytes(32))) with open(config_path, 'w') as fd: config.write(fd) # Cache sync notices corrupted archive chunks, but automatically recovers. out = self.cmd('create', '-v', self.repository_location + '::test3', 'input', exit_code=1) assert 'Reading cached archive chunk index for test1' in out assert 'Cached archive chunk index of test1 is corrupted' in out assert 'Fetching and building archive index for test1' in out def test_old_version_interfered(self): # Modify the main manifest ID without touching the manifest ID in the integrity section. # This happens if a version without integrity checking modifies the cache. config_path = os.path.join(self.cache_path, 'config') config = ConfigParser(interpolation=None) config.read(config_path) config.set('cache', 'manifest', bin_to_hex(bytes(32))) with open(config_path, 'w') as fd: config.write(fd) out = self.cmd('info', self.repository_location) assert 'Cache integrity data not available: old Borg version modified the cache.' in out class DiffArchiverTestCase(ArchiverTestCaseBase): def test_basic_functionality(self): # Setup files for the first snapshot self.create_regular_file('empty', size=0) self.create_regular_file('file_unchanged', size=128) self.create_regular_file('file_removed', size=256) self.create_regular_file('file_removed2', size=512) self.create_regular_file('file_replaced', size=1024) os.mkdir('input/dir_replaced_with_file') os.chmod('input/dir_replaced_with_file', stat.S_IFDIR | 0o755) os.mkdir('input/dir_removed') if are_symlinks_supported(): os.mkdir('input/dir_replaced_with_link') os.symlink('input/dir_replaced_with_file', 'input/link_changed') os.symlink('input/file_unchanged', 'input/link_removed') os.symlink('input/file_removed2', 'input/link_target_removed') os.symlink('input/empty', 'input/link_target_contents_changed') os.symlink('input/empty', 'input/link_replaced_by_file') if are_hardlinks_supported(): os.link('input/file_replaced', 'input/hardlink_target_replaced') os.link('input/empty', 'input/hardlink_contents_changed') os.link('input/file_removed', 'input/hardlink_removed') os.link('input/file_removed2', 'input/hardlink_target_removed') self.cmd('init', '--encryption=repokey', self.repository_location) # Create the first snapshot self.cmd('create', self.repository_location + '::test0', 'input') # Setup files for the second snapshot self.create_regular_file('file_added', size=2048) self.create_regular_file('file_empty_added', size=0) os.unlink('input/file_replaced') self.create_regular_file('file_replaced', contents=b'0' * 4096) os.unlink('input/file_removed') os.unlink('input/file_removed2') os.rmdir('input/dir_replaced_with_file') self.create_regular_file('dir_replaced_with_file', size=8192) os.chmod('input/dir_replaced_with_file', stat.S_IFREG | 0o755) os.mkdir('input/dir_added') os.rmdir('input/dir_removed') if are_symlinks_supported(): os.rmdir('input/dir_replaced_with_link') os.symlink('input/dir_added', 'input/dir_replaced_with_link') os.unlink('input/link_changed') os.symlink('input/dir_added', 'input/link_changed') os.symlink('input/dir_added', 'input/link_added') os.unlink('input/link_replaced_by_file') self.create_regular_file('link_replaced_by_file', size=16384) os.unlink('input/link_removed') if are_hardlinks_supported(): os.unlink('input/hardlink_removed') os.link('input/file_added', 'input/hardlink_added') with open('input/empty', 'ab') as fd: fd.write(b'appended_data') # Create the second snapshot self.cmd('create', self.repository_location + '::test1a', 'input') self.cmd('create', '--chunker-params', '16,18,17,4095', self.repository_location + '::test1b', 'input') def do_asserts(output, can_compare_ids): # File contents changed (deleted and replaced with a new file) change = 'B' if can_compare_ids else '{:<19}'.format('modified') assert 'file_replaced' in output # added to debug #3494 assert '{} input/file_replaced'.format(change) in output # File unchanged assert 'input/file_unchanged' not in output # Directory replaced with a regular file if 'BORG_TESTS_IGNORE_MODES' not in os.environ: assert '[drwxr-xr-x -> -rwxr-xr-x] input/dir_replaced_with_file' in output # Basic directory cases assert 'added directory input/dir_added' in output assert 'removed directory input/dir_removed' in output if are_symlinks_supported(): # Basic symlink cases assert 'changed link input/link_changed' in output assert 'added link input/link_added' in output assert 'removed link input/link_removed' in output # Symlink replacing or being replaced assert '] input/dir_replaced_with_link' in output assert '] input/link_replaced_by_file' in output # Symlink target removed. Should not affect the symlink at all. assert 'input/link_target_removed' not in output # The inode has two links and the file contents changed. Borg # should notice the changes in both links. However, the symlink # pointing to the file is not changed. change = '0 B' if can_compare_ids else '{:<19}'.format('modified') assert '{} input/empty'.format(change) in output if are_hardlinks_supported(): assert '{} input/hardlink_contents_changed'.format(change) in output if are_symlinks_supported(): assert 'input/link_target_contents_changed' not in output # Added a new file and a hard link to it. Both links to the same # inode should appear as separate files. assert 'added 2.05 kB input/file_added' in output if are_hardlinks_supported(): assert 'added 2.05 kB input/hardlink_added' in output # check if a diff between non-existent and empty new file is found assert 'added 0 B input/file_empty_added' in output # The inode has two links and both of them are deleted. They should # appear as two deleted files. assert 'removed 256 B input/file_removed' in output if are_hardlinks_supported(): assert 'removed 256 B input/hardlink_removed' in output # Another link (marked previously as the source in borg) to the # same inode was removed. This should not change this link at all. if are_hardlinks_supported(): assert 'input/hardlink_target_removed' not in output # Another link (marked previously as the source in borg) to the # same inode was replaced with a new regular file. This should not # change this link at all. if are_hardlinks_supported(): assert 'input/hardlink_target_replaced' not in output def do_json_asserts(output, can_compare_ids): def get_changes(filename, data): chgsets = [j['changes'] for j in data if j['path'] == filename] assert len(chgsets) < 2 # return a flattened list of changes for given filename return [chg for chgset in chgsets for chg in chgset] # convert output to list of dicts joutput = [json.loads(line) for line in output.split('\n') if line] # File contents changed (deleted and replaced with a new file) expected = {'type': 'modified', 'added': 4096, 'removed': 1024} if can_compare_ids else {'type': 'modified'} assert expected in get_changes('input/file_replaced', joutput) # File unchanged assert not any(get_changes('input/file_unchanged', joutput)) # Directory replaced with a regular file if 'BORG_TESTS_IGNORE_MODES' not in os.environ: assert {'type': 'mode', 'old_mode': 'drwxr-xr-x', 'new_mode': '-rwxr-xr-x'} in \ get_changes('input/dir_replaced_with_file', joutput) # Basic directory cases assert {'type': 'added directory'} in get_changes('input/dir_added', joutput) assert {'type': 'removed directory'} in get_changes('input/dir_removed', joutput) if are_symlinks_supported(): # Basic symlink cases assert {'type': 'changed link'} in get_changes('input/link_changed', joutput) assert {'type': 'added link'} in get_changes('input/link_added', joutput) assert {'type': 'removed link'} in get_changes('input/link_removed', joutput) # Symlink replacing or being replaced assert any(chg['type'] == 'mode' and chg['new_mode'].startswith('l') for chg in get_changes('input/dir_replaced_with_link', joutput)) assert any(chg['type'] == 'mode' and chg['old_mode'].startswith('l') for chg in get_changes('input/link_replaced_by_file', joutput)) # Symlink target removed. Should not affect the symlink at all. assert not any(get_changes('input/link_target_removed', joutput)) # The inode has two links and the file contents changed. Borg # should notice the changes in both links. However, the symlink # pointing to the file is not changed. expected = {'type': 'modified', 'added': 13, 'removed': 0} if can_compare_ids else {'type': 'modified'} assert expected in get_changes('input/empty', joutput) if are_hardlinks_supported(): assert expected in get_changes('input/hardlink_contents_changed', joutput) if are_symlinks_supported(): assert not any(get_changes('input/link_target_contents_changed', joutput)) # Added a new file and a hard link to it. Both links to the same # inode should appear as separate files. assert {'type': 'added', 'size': 2048} in get_changes('input/file_added', joutput) if are_hardlinks_supported(): assert {'type': 'added', 'size': 2048} in get_changes('input/hardlink_added', joutput) # check if a diff between non-existent and empty new file is found assert {'type': 'added', 'size': 0} in get_changes('input/file_empty_added', joutput) # The inode has two links and both of them are deleted. They should # appear as two deleted files. assert {'type': 'removed', 'size': 256} in get_changes('input/file_removed', joutput) if are_hardlinks_supported(): assert {'type': 'removed', 'size': 256} in get_changes('input/hardlink_removed', joutput) # Another link (marked previously as the source in borg) to the # same inode was removed. This should not change this link at all. if are_hardlinks_supported(): assert not any(get_changes('input/hardlink_target_removed', joutput)) # Another link (marked previously as the source in borg) to the # same inode was replaced with a new regular file. This should not # change this link at all. if are_hardlinks_supported(): assert not any(get_changes('input/hardlink_target_replaced', joutput)) do_asserts(self.cmd('diff', self.repository_location + '::test0', 'test1a'), True) # We expect exit_code=1 due to the chunker params warning do_asserts(self.cmd('diff', self.repository_location + '::test0', 'test1b', exit_code=1), False) do_json_asserts(self.cmd('diff', self.repository_location + '::test0', 'test1a', '--json-lines'), True) def test_sort_option(self): self.cmd('init', '--encryption=repokey', self.repository_location) self.create_regular_file('a_file_removed', size=8) self.create_regular_file('f_file_removed', size=16) self.create_regular_file('c_file_changed', size=32) self.create_regular_file('e_file_changed', size=64) self.cmd('create', self.repository_location + '::test0', 'input') os.unlink('input/a_file_removed') os.unlink('input/f_file_removed') os.unlink('input/c_file_changed') os.unlink('input/e_file_changed') self.create_regular_file('c_file_changed', size=512) self.create_regular_file('e_file_changed', size=1024) self.create_regular_file('b_file_added', size=128) self.create_regular_file('d_file_added', size=256) self.cmd('create', self.repository_location + '::test1', 'input') output = self.cmd('diff', '--sort', self.repository_location + '::test0', 'test1') expected = [ 'a_file_removed', 'b_file_added', 'c_file_changed', 'd_file_added', 'e_file_changed', 'f_file_removed', ] assert all(x in line for x, line in zip(expected, output.splitlines())) def test_get_args(): archiver = Archiver() # everything normal: # first param is argv as produced by ssh forced command, # second param is like from SSH_ORIGINAL_COMMAND env variable args = archiver.get_args(['borg', 'serve', '--umask=0027', '--restrict-to-path=/p1', '--restrict-to-path=/p2', ], 'borg serve --info') assert args.func == archiver.do_serve assert args.restrict_to_paths == ['/p1', '/p2'] assert args.umask == 0o027 assert args.log_level == 'info' # similar, but with --restrict-to-repository args = archiver.get_args(['borg', 'serve', '--restrict-to-repository=/r1', '--restrict-to-repository=/r2', ], 'borg serve --info --umask=0027') assert args.restrict_to_repositories == ['/r1', '/r2'] # trying to cheat - break out of path restriction args = archiver.get_args(['borg', 'serve', '--restrict-to-path=/p1', '--restrict-to-path=/p2', ], 'borg serve --restrict-to-path=/') assert args.restrict_to_paths == ['/p1', '/p2'] # trying to cheat - break out of repository restriction args = archiver.get_args(['borg', 'serve', '--restrict-to-repository=/r1', '--restrict-to-repository=/r2', ], 'borg serve --restrict-to-repository=/') assert args.restrict_to_repositories == ['/r1', '/r2'] # trying to cheat - break below repository restriction args = archiver.get_args(['borg', 'serve', '--restrict-to-repository=/r1', '--restrict-to-repository=/r2', ], 'borg serve --restrict-to-repository=/r1/below') assert args.restrict_to_repositories == ['/r1', '/r2'] # trying to cheat - try to execute different subcommand args = archiver.get_args(['borg', 'serve', '--restrict-to-path=/p1', '--restrict-to-path=/p2', ], 'borg init --encryption=repokey /') assert args.func == archiver.do_serve # Check that environment variables in the forced command don't cause issues. If the command # were not forced, environment variables would be interpreted by the shell, but this does not # happen for forced commands - we get the verbatim command line and need to deal with env vars. args = archiver.get_args(['borg', 'serve', ], 'BORG_FOO=bar borg serve --info') assert args.func == archiver.do_serve def test_chunk_content_equal(): def ccc(a, b): chunks_a = [data for data in a] chunks_b = [data for data in b] compare1 = ItemDiff._chunk_content_equal(iter(chunks_a), iter(chunks_b)) compare2 = ItemDiff._chunk_content_equal(iter(chunks_b), iter(chunks_a)) assert compare1 == compare2 return compare1 assert ccc([ b'1234', b'567A', b'bC' ], [ b'1', b'23', b'4567A', b'b', b'C' ]) # one iterator exhausted before the other assert not ccc([ b'12345', ], [ b'1234', b'56' ]) # content mismatch assert not ccc([ b'1234', b'65' ], [ b'1234', b'56' ]) # first is the prefix of second assert not ccc([ b'1234', b'56' ], [ b'1234', b'565' ]) class TestBuildFilter: @staticmethod def peek_and_store_hardlink_masters(item, matched): pass def test_basic(self): matcher = PatternMatcher() matcher.add([parse_pattern('included')], IECommand.Include) filter = Archiver.build_filter(matcher, self.peek_and_store_hardlink_masters, 0) assert filter(Item(path='included')) assert filter(Item(path='included/file')) assert not filter(Item(path='something else')) def test_empty(self): matcher = PatternMatcher(fallback=True) filter = Archiver.build_filter(matcher, self.peek_and_store_hardlink_masters, 0) assert filter(Item(path='anything')) def test_strip_components(self): matcher = PatternMatcher(fallback=True) filter = Archiver.build_filter(matcher, self.peek_and_store_hardlink_masters, strip_components=1) assert not filter(Item(path='shallow')) assert not filter(Item(path='shallow/')) # can this even happen? paths are normalized... assert filter(Item(path='deep enough/file')) assert filter(Item(path='something/dir/file')) class TestCommonOptions: @staticmethod def define_common_options(add_common_option): add_common_option('-h', '--help', action='help', help='show this help message and exit') add_common_option('--critical', dest='log_level', help='foo', action='store_const', const='critical', default='warning') add_common_option('--error', dest='log_level', help='foo', action='store_const', const='error', default='warning') add_common_option('--append', dest='append', help='foo', action='append', metavar='TOPIC', default=[]) add_common_option('-p', '--progress', dest='progress', action='store_true', help='foo') add_common_option('--lock-wait', dest='lock_wait', type=int, metavar='N', default=1, help='(default: %(default)d).') @pytest.fixture def basic_parser(self): parser = argparse.ArgumentParser(prog='test', description='test parser', add_help=False) parser.common_options = Archiver.CommonOptions(self.define_common_options, suffix_precedence=('_level0', '_level1')) return parser @pytest.fixture def subparsers(self, basic_parser): if sys.version_info >= (3, 7): # py37 pre-release defaults to unwanted required=True, in 3.7.0+ it was fixed to =False return basic_parser.add_subparsers(title='required arguments', metavar='<command>', required=False) else: # py36 does not support required=... argument (but behaves like required=False). # note: use below call for 3.6 and 3.7 when there are no alphas/betas/RCs of 3.7.0 around any more. return basic_parser.add_subparsers(title='required arguments', metavar='<command>') @pytest.fixture def parser(self, basic_parser): basic_parser.common_options.add_common_group(basic_parser, '_level0', provide_defaults=True) return basic_parser @pytest.fixture def common_parser(self, parser): common_parser = argparse.ArgumentParser(add_help=False, prog='test') parser.common_options.add_common_group(common_parser, '_level1') return common_parser @pytest.fixture def parse_vars_from_line(self, parser, subparsers, common_parser): subparser = subparsers.add_parser('subcommand', parents=[common_parser], add_help=False, description='foo', epilog='bar', help='baz', formatter_class=argparse.RawDescriptionHelpFormatter) subparser.set_defaults(func=1234) subparser.add_argument('--append-only', dest='append_only', action='store_true') def parse_vars_from_line(*line): print(line) args = parser.parse_args(line) parser.common_options.resolve(args) return vars(args) return parse_vars_from_line def test_simple(self, parse_vars_from_line): assert parse_vars_from_line('--error') == { 'append': [], 'lock_wait': 1, 'log_level': 'error', 'progress': False } assert parse_vars_from_line('--error', 'subcommand', '--critical') == { 'append': [], 'lock_wait': 1, 'log_level': 'critical', 'progress': False, 'append_only': False, 'func': 1234, } with pytest.raises(SystemExit): parse_vars_from_line('--append-only', 'subcommand') assert parse_vars_from_line('--append=foo', '--append', 'bar', 'subcommand', '--append', 'baz') == { 'append': ['foo', 'bar', 'baz'], 'lock_wait': 1, 'log_level': 'warning', 'progress': False, 'append_only': False, 'func': 1234, } @pytest.mark.parametrize('position', ('before', 'after', 'both')) @pytest.mark.parametrize('flag,args_key,args_value', ( ('-p', 'progress', True), ('--lock-wait=3', 'lock_wait', 3), )) def test_flag_position_independence(self, parse_vars_from_line, position, flag, args_key, args_value): line = [] if position in ('before', 'both'): line.append(flag) line.append('subcommand') if position in ('after', 'both'): line.append(flag) result = { 'append': [], 'lock_wait': 1, 'log_level': 'warning', 'progress': False, 'append_only': False, 'func': 1234, } result[args_key] = args_value assert parse_vars_from_line(*line) == result def test_parse_storage_quota(): assert parse_storage_quota('50M') == 50 * 1000**2 with pytest.raises(argparse.ArgumentTypeError): parse_storage_quota('5M') def get_all_parsers(): """ Return dict mapping command to parser. """ parser = Archiver(prog='borg').build_parser() borgfs_parser = Archiver(prog='borgfs').build_parser() parsers = {} def discover_level(prefix, parser, Archiver, extra_choices=None): choices = {} for action in parser._actions: if action.choices is not None and 'SubParsersAction' in str(action.__class__): for cmd, parser in action.choices.items(): choices[prefix + cmd] = parser if extra_choices is not None: choices.update(extra_choices) if prefix and not choices: return for command, parser in sorted(choices.items()): discover_level(command + " ", parser, Archiver) parsers[command] = parser discover_level("", parser, Archiver, {'borgfs': borgfs_parser}) return parsers @pytest.mark.parametrize('command, parser', list(get_all_parsers().items())) def test_help_formatting(command, parser): if isinstance(parser.epilog, RstToTextLazy): assert parser.epilog.rst @pytest.mark.parametrize('topic, helptext', list(Archiver.helptext.items())) def test_help_formatting_helptexts(topic, helptext): assert str(rst_to_terminal(helptext))
test_aes_counter_uniqueness_passphrase
worker_local.rs
use crate::registry::{Registry, WorkerThread}; use std::fmt; use std::ops::Deref; use std::sync::Arc; #[repr(align(64))] #[derive(Debug)] struct CacheAligned<T>(T); /// Holds worker-locals values for each thread in a thread pool. /// You can only access the worker local value through the Deref impl /// on the thread pool it was constructed on. It will panic otherwise pub struct WorkerLocal<T> { locals: Vec<CacheAligned<T>>, registry: Arc<Registry>, } /// We prevent concurrent access to the underlying value in the /// Deref impl, thus any values safe to send across threads can /// be used with WorkerLocal. unsafe impl<T: Send> Sync for WorkerLocal<T> {} impl<T> WorkerLocal<T> { /// Creates a new worker local where the `initial` closure computes the /// value this worker local should take for each thread in the thread pool. #[inline] pub fn
<F: FnMut(usize) -> T>(mut initial: F) -> WorkerLocal<T> { let registry = Registry::current(); WorkerLocal { locals: (0..registry.num_threads()) .map(|i| CacheAligned(initial(i))) .collect(), registry, } } /// Returns the worker-local value for each thread #[inline] pub fn into_inner(self) -> Vec<T> { self.locals.into_iter().map(|c| c.0).collect() } fn current(&self) -> &T { unsafe { let worker_thread = WorkerThread::current(); if worker_thread.is_null() || &*(*worker_thread).registry as *const _ != &*self.registry as *const _ { panic!("WorkerLocal can only be used on the thread pool it was created on") } &self.locals[(*worker_thread).index].0 } } } impl<T> WorkerLocal<Vec<T>> { /// Joins the elements of all the worker locals into one Vec pub fn join(self) -> Vec<T> { self.into_inner().into_iter().flat_map(|v| v).collect() } } impl<T: fmt::Debug> fmt::Debug for WorkerLocal<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("WorkerLocal") .field("registry", &self.registry.id()) .finish() } } impl<T> Deref for WorkerLocal<T> { type Target = T; #[inline(always)] fn deref(&self) -> &T { self.current() } }
new
vue-select.js
!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.VueSelect=e():t.VueSelect=e()}("undefined"!=typeof self?self:this,function(){return function(t){var e={};function n(o){if(e[o])return e[o].exports;var i=e[o]={i:o,l:!1,exports:{}};return t[o].call(i.exports,i,i.exports,n),i.l=!0,i.exports}return n.m=t,n.c=e,n.d=function(t,e,o){n.o(t,e)||Object.defineProperty(t,e,{enumerable:!0,get:o})},n.r=function(t){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})},n.t=function(t,e){if(1&e&&(t=n(t)),8&e)return t;if(4&e&&"object"==typeof t&&t&&t.__esModule)return t;var o=Object.create(null);if(n.r(o),Object.defineProperty(o,"default",{enumerable:!0,value:t}),2&e&&"string"!=typeof t)for(var i in t)n.d(o,i,function(e){return t[e]}.bind(null,i));return o},n.n=function(t){var e=t&&t.__esModule?function(){return t.default}:function(){return t};return n.d(e,"a",e),e},n.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},n.p="/",n(n.s=9)}([function(t,e){function n(t){return(n="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}function o(e){return"function"==typeof Symbol&&"symbol"===n(Symbol.iterator)?t.exports=o=function(t){return n(t)}:t.exports=o=function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":n(t)},o(e)}t.exports=o},function(t,e,n){},function(t,e){t.exports=function(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}},function(t,e,n){var o=n(5),i=n(6),s=n(7);t.exports=function(t){return o(t)||i(t)||s()}},function(t,e,n){var o=n(2);t.exports=function(t){for(var e=1;e<arguments.length;e++){var n=null!=arguments[e]?arguments[e]:{},i=Object.keys(n);"function"==typeof Object.getOwnPropertySymbols&&(i=i.concat(Object.getOwnPropertySymbols(n).filter(function(t){return Object.getOwnPropertyDescriptor(n,t).enumerable}))),i.forEach(function(e){o(t,e,n[e])})}return t}},function(t,e){t.exports=function(t){if(Array.isArray(t)){for(var e=0,n=new Array(t.length);e<t.length;e++)n[e]=t[e];return n}}},function(t,e){t.exports=function(t){if(Symbol.iterator in Object(t)||"[object Arguments]"===Object.prototype.toString.call(t))return Array.from(t)}},function(t,e){t.exports=function(){throw new TypeError("Invalid attempt to spread non-iterable instance")}},function(t,e,n){"use strict";var o=n(1);n.n(o).a},function(t,e,n){"use strict";n.r(e);var o=n(3),i=n.n(o),s=n(2),r=n.n(s),a=n(0),l=n.n(a),c=n(4),u=n.n(c),h={watch:{typeAheadPointer:function(){this.maybeAdjustScroll()}},methods:{maybeAdjustScroll:function(){var t=this.pixelsToPointerTop(),e=this.pixelsToPointerBottom();return t<=this.viewport().top?this.scrollTo(t):e>=this.viewport().bottom?this.scrollTo(this.viewport().top+this.pointerHeight()):void 0},pixelsToPointerTop:function(){var t=0;if(this.$refs.dropdownMenu)for(var e=0;e<this.typeAheadPointer;e++)t+=this.$refs.dropdownMenu.children[e].offsetHeight;return t},pixelsToPointerBottom:function(){return this.pixelsToPointerTop()+this.pointerHeight()},pointerHeight:function(){var t=!!this.$refs.dropdownMenu&&this.$refs.dropdownMenu.children[this.typeAheadPointer];return t?t.offsetHeight:0},viewport:function(){return{top:this.$refs.dropdownMenu?this.$refs.dropdownMenu.scrollTop:0,bottom:this.$refs.dropdownMenu?this.$refs.dropdownMenu.offsetHeight+this.$refs.dropdownMenu.scrollTop:0}},scrollTo:function(t){return this.$refs.dropdownMenu?this.$refs.dropdownMenu.scrollTop=t:null}}},p={data:function(){return{typeAheadPointer:-1}},watch:{filteredOptions:function(){this.typeAheadPointer=0}},methods:{typeAheadUp:function(){this.typeAheadPointer>0&&(this.typeAheadPointer--,this.maybeAdjustScroll&&this.maybeAdjustScroll())},typeAheadDown:function(){this.typeAheadPointer<this.filteredOptions.length-1&&(this.typeAheadPointer++,this.maybeAdjustScroll&&this.maybeAdjustScroll())},typeAheadSelect:function(){this.filteredOptions[this.typeAheadPointer]?this.select(this.filteredOptions[this.typeAheadPointer]):this.taggable&&this.search.length&&this.select(this.search),this.clearSearchOnSelect&&(this.search="")}}},d={props:{loading:{type:Boolean,default:!1}},data:function(){return{mutableLoading:!1}},watch:{search:function(){this.$emit("search",this.search,this.toggleLoading)},loading:function(t){this.mutableLoading=t}},methods:{toggleLoading:function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:null;return this.mutableLoading=null==t?!this.mutableLoading:t}}};function f(t,e,n,o,i,s,r,a){var l,c="function"==typeof t?t.options:t;if(e&&(c.render=e,c.staticRenderFns=n,c._compiled=!0),o&&(c.functional=!0),s&&(c._scopeId="data-v-"+s),r?(l=function(t){(t=t||this.$vnode&&this.$vnode.ssrContext||this.parent&&this.parent.$vnode&&this.parent.$vnode.ssrContext)||"undefined"==typeof __VUE_SSR_CONTEXT__||(t=__VUE_SSR_CONTEXT__),i&&i.call(this,t),t&&t._registeredComponents&&t._registeredComponents.add(r)},c._ssrRegister=l):i&&(l=a?function(){i.call(this,this.$root.$options.shadowRoot)}:i),l)if(c.functional){c._injectStyles=l;var u=c.render;c.render=function(t,e){return l.call(e),u(t,e)}}else{var h=c.beforeCreate;c.beforeCreate=h?[].concat(h,l):[l]}return{exports:t,options:c}}var y={Deselect:f({},function(){var t=this.$createElement,e=this._self._c||t;return e("svg",{attrs:{xmlns:"http://www.w3.org/2000/svg",width:"10",height:"10"}},[e("path",{attrs:{d:"M6.895455 5l2.842897-2.842898c.348864-.348863.348864-.914488 0-1.263636L9.106534.261648c-.348864-.348864-.914489-.348864-1.263636 0L5 3.104545 2.157102.261648c-.348863-.348864-.914488-.348864-1.263636 0L.261648.893466c-.348864.348864-.348864.914489 0 1.263636L3.104545 5 .261648 7.842898c-.348864.348863-.348864.914488 0 1.263636l.631818.631818c.348864.348864.914773.348864 1.263636 0L5 6.895455l2.842898 2.842897c.348863.348864.914772.348864 1.263636 0l.631818-.631818c.348864-.348864.348864-.914489 0-1.263636L6.895455 5z"}})])},[],!1,null,null,null).exports,OpenIndicator:f({},function(){var t=this.$createElement,e=this._self._c||t;return e("svg",{attrs:{xmlns:"http://www.w3.org/2000/svg",width:"14",height:"10"}},[e("path",{attrs:{d:"M9.211364 7.59931l4.48338-4.867229c.407008-.441854.407008-1.158247 0-1.60046l-.73712-.80023c-.407008-.441854-1.066904-.441854-1.474243 0L7 5.198617 2.51662.33139c-.407008-.441853-1.066904-.441853-1.474243 0l-.737121.80023c-.407008.441854-.407008 1.158248 0 1.600461l4.48338 4.867228L7 10l2.211364-2.40069z"}})])},[],!1,null,null,null).exports},b={components:u()({},y),mixins:[h,p,d],props:{value:{},components:{type:Object,default:function(){return{}}},options:{type:Array,default:function(){return[]}},disabled:{type:Boolean,default:!1},clearable:{type:Boolean,default:!0},searchable:{type:Boolean,default:!0},multiple:{type:Boolean,default:!1},placeholder:{type:String,default:""},transition:{type:String,default:"vs__fade"},clearSearchOnSelect:{type:Boolean,default:!0},deselectable:{type:Boolean,default:!1},closeOnSelect:{type:Boolean,default:!0},label:{type:String,default:"label"},autocomplete:{type:String,default:"off"},reduce:{type:Function,default:function(t){return t}},getOptionLabel:{type:Function,default:function(t){if("object"===l()(t)){if(!t.hasOwnProperty(this.label))return;return t[this.label]}return t}},onTab:{type:Function,default:function(){this.selectOnTab&&this.typeAheadSelect()}},taggable:{type:Boolean,default:!1},tabindex:{type:Number,default:null},pushTags:{type:Boolean,default:!1},filterable:{type:Boolean,default:!0},filterBy:{type:Function,default:function(t,e,n){return(e||"").toLowerCase().indexOf(n.toLowerCase())>-1}},filter:{type:Function,default:function(t,e){var n=this;return t.filter(function(t){var o=n.getOptionLabel(t);return"number"==typeof o&&(o=o.toString()),n.filterBy(t,o,e)})}},createOption:{type:Function,default:function(t){return"object"===l()(this.optionList[0])&&(t=r()({},this.label,t)),this.$emit("option:created",t),t}},resetOnOptionsChange:{type:Boolean,default:!1},noDrop:{type:Boolean,default:!1},inputId:{type:String},dir:{type:String,default:"auto"},selectOnTab:{type:Boolean,default:!1},searchInputQuerySelector:{type:String,default:"[type=search]"}},data:function(){return{search:"",open:!1,pushedTags:[],_value:[]}},watch:{options:function(t){!this.taggable&&this.resetOnOptionsChange&&this.clearSelection()},multiple:function(){this.clearSelection()}},created:function(){var t=this;this.mutableLoading=this.loading,this.$options.propsData.hasOwnProperty("reduce")&&this.value&&(Array.isArray(this.value)?this.$data._value=this.value.map(function(e){return t.findOptionFromReducedValue(e)}):this.$data._value=this.findOptionFromReducedValue(this.value)),this.$on("option:created",this.maybePushTag)},methods:{select:function(t){this.isOptionSelected(t)&&this.deselectable?this.deselect(t):(this.taggable&&!this.optionExists(t)&&(t=this.createOption(t)),this.multiple&&(t=this.selectedValue.concat(t)),this.updateValue(t)),this.onAfterSelect(t)},deselect:function(t){var e=this;this.updateValue(this.selectedValue.filter(function(n){return!e.optionComparator(n,t)}))},clearSelection:function(){this.updateValue(this.multiple?[]:null)},onAfterSelect:function(t){this.closeOnSelect&&(this.open=!this.open,this.searchEl.blur()),this.clearSearchOnSelect&&(this.search="")},updateValue:function(t){var e=this;this.isTrackingValues&&(this.$data._value=t),null!==t&&(t=Array.isArray(t)?t.map(function(t){return e.reduce(t)}):this.reduce(t)),this.$emit("input",t)},toggleDropdown:function(t){var e=t.target,n=[this.$el,this.searchEl,this.$refs.toggle.$el];void 0!==this.$refs.openIndicator&&n.push.apply(n,[this.$refs.openIndicator.$el].concat(i()(Array.prototype.slice.call(this.$refs.openIndicator.$el.childNodes)))),(n.indexOf(e)>-1||e.classList.contains("vs__selected"))&&(this.open?this.searchEl.blur():this.disabled||(this.open=!0,this.searchEl.focus()))},isOptionSelected:function(t){var e=this;return this.selectedValue.some(function(n){return e.optionComparator(n,t)})},optionComparator:function(t,e){if("object"!==l()(t)&&"object"!==l()(e)){if(t===e)return!0}else{if(t===this.reduce(e))return!0;if(this.getOptionLabel(t)===this.getOptionLabel(e)||this.getOptionLabel(t)===e)return!0;if(this.reduce(t)===this.reduce(e))return!0}return!1},findOptionFromReducedValue:function(t){var e=this;return this.options.find(function(n){return JSON.stringify(e.reduce(n))===JSON.stringify(t)})||t},closeSearchOptions:function(){this.open=!1,this.$emit("search:blur")},maybeDeleteValue:function(){if(!this.searchEl.value.length&&this.selectedValue&&this.clearable){var t=null;this.multiple&&(t=i()(this.selectedValue.slice(0,this.selectedValue.length-1))),this.updateValue(t)}},optionExists:function(t){var e=this;return this.optionList.some(function(n){return"object"===l()(n)&&e.getOptionLabel(n)===t||n===t})},normalizeOptionForSlot:function(t){return"object"===l()(t)?t:r()({},this.label,t)},maybePushTag:function(t){this.pushTags&&this.pushedTags.push(t)},onEscape:function(){this.search.length?this.search="":this.searchEl.blur()},onSearchBlur:function(){if(!this.mousedown||this.searching)return this.clearSearchOnBlur&&(this.search=""),void this.closeSearchOptions();this.mousedown=!1,0!==this.search.length||0!==this.options.length||this.closeSearchOptions()},onSearchFocus:function(){this.open=!0,this.$emit("search:focus")},onMousedown:function(){this.mousedown=!0},onMouseUp:function(){this.mousedown=!1},onSearchKeyDown:function(t){switch(t.keyCode){case 8:return this.maybeDeleteValue();case 9:return this.onTab()}},onSearchKeyUp:function(t){switch(t.keyCode){case 27:return this.onEscape();case 38:return t.preventDefault(),this.typeAheadUp();case 40:return t.preventDefault(),this.typeAheadDown();case 13:return t.preventDefault(),this.typeAheadSelect()}}},computed:{isTrackingValues:function(){return void 0===this.value||this.$options.propsData.hasOwnProperty("reduce")},selectedValue:function(){var t=this.value;return this.isTrackingValues&&(t=this.$data._value),t?[].concat(t):[]},optionList:function(){return this.options.concat(this.pushedTags)},searchEl:function(){return this.$scopedSlots.search?this.$refs.selectedOptions.querySelector(this.searchInputQuerySelector):this.$refs.search},scope:function(){var t=this;return{search:{attributes:{disabled:this.disabled,placeholder:this.searchPlaceholder,tabindex:this.tabindex,readonly:!this.searchable,id:this.inputId,"aria-expanded":this.dropdownOpen,"aria-label":"Search for option",ref:"search",role:"combobox",type:"search",autocomplete:"off",value:this.search},events:{keydown:this.onSearchKeyDown,keyup:this.onSearchKeyUp,blur:this.onSearchBlur,focus:this.onSearchFocus,input:function(e){return t.search=e.target.value}}},spinner:{loading:this.mutableLoading},openIndicator:{attributes:{ref:"openIndicator",role:"presentation",class:"vs__open-indicator"}}}},childComponents:function(){return u()({},y,this.components)},stateClasses:function(){return{"vs--open":this.dropdownOpen,"vs--single":!this.multiple,"vs--searching":this.searching&&!this.noDrop,"vs--searchable":this.searchable&&!this.noDrop,"vs--unsearchable":!this.searchable,"vs--loading":this.mutableLoading,"vs--disabled":this.disabled}},clearSearchOnBlur:function(){return this.clearSearchOnSelect&&!this.multiple},searching:function(){return!!this.search},dropdownOpen:function(){return!this.noDrop&&(this.open&&!this.mutableLoading)},searchPlaceholder:function(){if(this.isValueEmpty&&this.placeholder)return this.placeholder},filteredOptions:function(){var t=[].concat(this.optionList);if(!this.filterable&&!this.taggable)return t;var e=this.search.length?this.filter(t,this.search,this):t;return this.taggable&&this.search.length&&!this.optionExists(this.search)&&e.unshift(this.search),e},isValueEmpty:function(){return 0===this.selectedValue.length},showClearButton:function(){return!this.multiple&&this.clearable&&!this.open&&!this.isValueEmpty}}},m=(n(8),f(b,function(){var t=this,e=t.$createElement,n=t._self._c||e;return n("div",{staticClass:"v-select",class:t.stateClasses,attrs:{dir:t.dir}},[n("div",{ref:"toggle",staticClass:"vs__dropdown-toggle",on:{mousedown:function(e){return e.preventDefault(),t.toggleDropdown(e)}}},[n("div",{ref:"selectedOptions",staticClass:"vs__selected-options"},[t._l(t.selectedValue,function(e){return t._t("selected-option-container",[n("span",{key:e.index,staticClass:"vs__selected"},[t._t("selected-option",[t._v("\n "+t._s(t.getOptionLabel(e))+"\n ")],null,t.normalizeOptionForSlot(e)),t._v(" "),t.multiple?n("button",{staticClass:"vs__deselect",attrs:{disabled:t.disabled,type:"button","aria-label":"Deselect option"},on:{click:function(n){return t.deselect(e)}}},[n(t.childComponents.Deselect,{tag:"component"})],1):t._e()],2)],{option:t.normalizeOptionForSlot(e),deselect:t.deselect,multiple:t.multiple,disabled:t.disabled})}),t._v(" "),t._t("search",[n("input",t._g(t._b({staticClass:"vs__search"},"input",t.scope.search.attributes,!1),t.scope.search.events))],null,t.scope.search)],2),t._v(" "),n("div",{staticClass:"vs__actions"},[n("button",{directives:[{name:"show",rawName:"v-show",value:t.showClearButton,expression:"showClearButton"}],staticClass:"vs__clear",attrs:{disabled:t.disabled,type:"button",title:"Clear selection"},on:{click:t.clearSelection}},[n(t.childComponents.Deselect,{tag:"component"})],1),t._v(" "),t._t("open-indicator",[t.noDrop?t._e():n(t.childComponents.OpenIndicator,t._b({tag:"component"},"component",t.scope.openIndicator.attributes,!1))],null,t.scope.openIndicator),t._v(" "),t._t("spinner",[n("div",{directives:[{name:"show",rawName:"v-show",value:t.mutableLoading,expression:"mutableLoading"}],staticClass:"vs__spinner"},[t._v("Loading...")])],null,t.scope.spinner)],2)]),t._v(" "),n("transition",{attrs:{name:t.transition}},[t.dropdownOpen?n("ul",{ref:"dropdownMenu",staticClass:"vs__dropdown-menu",attrs:{role:"listbox"},on:{mousedown:t.onMousedown,mouseup:t.onMouseUp}},[t._l(t.filteredOptions,function(e,o){return n("li",{key:o,staticClass:"vs__dropdown-option",class:{"vs__dropdown-option--selected":t.isOptionSelected(e),"vs__dropdown-option--highlight":o===t.typeAheadPointer},attrs:{role:"option"},on:{mouseover:function(e){t.typeAheadPointer=o},mousedown:function(n){return n.preventDefault(),n.stopPropagation(),t.select(e)}}},[t._t("option",[t._v("\n "+t._s(t.getOptionLabel(e))+"\n ")],null,t.normalizeOptionForSlot(e))],2)}),t._v(" "),t.filteredOptions.length?t._e():n("li",{staticClass:"vs__no-options",on:{mousedown:function(t){t.stopPropagation()}}},[t._t("no-options",[t._v("Sorry, no matching options.")])],2)],2):t._e()])],1)},[],!1,null,null,null).exports),g={ajax:d,pointer:p,pointerScroll:h};n.d(e,"VueSelect",function(){return m}),n.d(e,"mixins",function(){return g});e.default=m}])}); //# sourceMappingURL=vue-select.js.map
zz_eventsubscription_types.go
/* Copyright 2021 The Crossplane Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by terrajet. DO NOT EDIT. package v1alpha2 import ( metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime/schema" v1 "github.com/crossplane/crossplane-runtime/apis/common/v1" ) type EventSubscriptionObservation struct { Arn *string `json:"arn,omitempty" tf:"arn,omitempty"` CustomerAwsID *string `json:"customerAwsId,omitempty" tf:"customer_aws_id,omitempty"` ID *string `json:"id,omitempty" tf:"id,omitempty"` TagsAll map[string]*string `json:"tagsAll,omitempty" tf:"tags_all,omitempty"` } type EventSubscriptionParameters struct { // +kubebuilder:validation:Optional Enabled *bool `json:"enabled,omitempty" tf:"enabled,omitempty"` // +kubebuilder:validation:Optional EventCategories []*string `json:"eventCategories,omitempty" tf:"event_categories,omitempty"` // Region is the region you'd like your resource to be created in. // +terrajet:crd:field:TFTag=- // +kubebuilder:validation:Required Region *string `json:"region" tf:"-"` // +kubebuilder:validation:Required SnsTopicArn *string `json:"snsTopicArn" tf:"sns_topic_arn,omitempty"` // +kubebuilder:validation:Optional SourceIds []*string `json:"sourceIds,omitempty" tf:"source_ids,omitempty"` // +kubebuilder:validation:Optional SourceType *string `json:"sourceType,omitempty" tf:"source_type,omitempty"` // +kubebuilder:validation:Optional Tags map[string]*string `json:"tags,omitempty" tf:"tags,omitempty"` } // EventSubscriptionSpec defines the desired state of EventSubscription type EventSubscriptionSpec struct { v1.ResourceSpec `json:",inline"` ForProvider EventSubscriptionParameters `json:"forProvider"` } // EventSubscriptionStatus defines the observed state of EventSubscription. type EventSubscriptionStatus struct { v1.ResourceStatus `json:",inline"` AtProvider EventSubscriptionObservation `json:"atProvider,omitempty"` } // +kubebuilder:object:root=true // EventSubscription is the Schema for the EventSubscriptions API // +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" // +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" // +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" // +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" // +kubebuilder:subresource:status // +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,awsjet} type EventSubscription struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` Spec EventSubscriptionSpec `json:"spec"` Status EventSubscriptionStatus `json:"status,omitempty"` } // +kubebuilder:object:root=true // EventSubscriptionList contains a list of EventSubscriptions type EventSubscriptionList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []EventSubscription `json:"items"` } // Repository type metadata.
var ( EventSubscription_Kind = "EventSubscription" EventSubscription_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: EventSubscription_Kind}.String() EventSubscription_KindAPIVersion = EventSubscription_Kind + "." + CRDGroupVersion.String() EventSubscription_GroupVersionKind = CRDGroupVersion.WithKind(EventSubscription_Kind) ) func init() { SchemeBuilder.Register(&EventSubscription{}, &EventSubscriptionList{}) }
config-empty.js
// Change this file to config.js // Add your keys // Add file .gitignore: config.js // Load with // var config = require('./config.js'); module.exports = { client_id: '',
}
client_secret: '', access_token: '', api_url: 'https://botsin.space/api/v1/' // optional, defaults to https://mastodon.social/api/v1/
setup.js
/* * Copyright 2018 Coveros, Inc. * * This file is part of Gherkin Builder. * * Gherkin Builder is licensed under the Apache License, Version * 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy * of the License at *
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ if (typeof String.prototype.startsWith !== 'function') { String.prototype.startsWith = function(str) { return this.slice(0, str.length) === str; }; } if (typeof String.prototype.endsWith !== 'function') { String.prototype.endsWith = function(str) { return this.slice(-str.length) === str; }; } if (typeof String.prototype.stripTags !== 'function') { String.prototype.stripTags = function() { var tmp = document.createElement("DIV"); tmp.innerHTML = this; return tmp.textContent || tmp.innerText || ""; } } function rand(length) { var text = ""; var possible = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; for (var i = 0; i < length; i++) text += possible.charAt(Math.floor(Math.random() * possible.length)); return text; } function resizeInput() { // will resize our inputs to the input value size $(this).attr('size', $(this).val().length ? $(this).val().length : $(this).attr('placeholder') ? $(this).attr('placeholder').length : '20'); } $(document).ready(function() { // setup the text areas to make easily readable $('textarea').attr('rows', '1'); makeDynamic(); fillTag($('#featTag')); // setup our buttons $('#addBackgroundStep').click(function() { addTestStep(this); }).button(); $('#addScenario').click(function() { addScenario(); $('.required').each(function() { checkRequired($(this)); }); }).button(); $('#exportFile').click(function() { download(); }).button().button("disable"); $('#exportJIRA').click(function() { getJIRACreds(); }).button().button("disable"); $('button[name=linkButton]').button().button("disable"); });
* http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on
expand.rs
// Copyright (c) The Diem Core Contributors // Copyright (c) The Move Contributors // SPDX-License-Identifier: Apache-2.0 use super::core::{self, Context}; use crate::{ diag, expansion::ast::Value_, naming::ast::{BuiltinTypeName_, FunctionSignature, Type, TypeName_, Type_}, parser::ast::Ability_, typing::ast as T, }; use move_ir_types::location::*; use std::convert::TryInto; //************************************************************************************************** // Functions //************************************************************************************************** pub fn function_body_(context: &mut Context, b_: &mut T::FunctionBody_) { match b_ { T::FunctionBody_::Native => (), T::FunctionBody_::Defined(es) => sequence(context, es), } } pub fn function_signature(context: &mut Context, sig: &mut FunctionSignature) { for (_, st) in &mut sig.parameters { type_(context, st); } type_(context, &mut sig.return_type); } //************************************************************************************************** // Types //************************************************************************************************** fn expected_types(context: &mut Context, ss: &mut [Option<Type>]) { for st_opt in ss.iter_mut().flatten() { type_(context, st_opt); } } fn types(context: &mut Context, ss: &mut Vec<Type>) { for st in ss { type_(context, st); } } pub fn type_(context: &mut Context, ty: &mut Type) { use Type_::*; match &mut ty.value { Anything | UnresolvedError | Param(_) | Unit => (), Ref(_, b) => type_(context, b), Var(tvar) => { let ty_tvar = sp(ty.loc, Var(*tvar)); let replacement = core::unfold_type(&context.subst, ty_tvar); let replacement = match replacement { sp!(_, Var(_)) => panic!("ICE unfold_type_base failed to expand"), sp!(loc, Anything) => { let msg = "Could not infer this type. Try adding an annotation"; context .env .add_diag(diag!(TypeSafety::UninferredType, (ty.loc, msg))); sp(loc, UnresolvedError) } t => t, }; *ty = replacement; type_(context, ty); } Apply(Some(_), sp!(_, TypeName_::Builtin(_)), tys) => types(context, tys), Apply(Some(_), _, _) => panic!("ICE expanding pre expanded type"), Apply(None, _, _) => { let abilities = core::infer_abilities(context, &context.subst, ty.clone()); match &mut ty.value { Apply(abilities_opt, _, tys) => { *abilities_opt = Some(abilities); types(context, tys); } _ => panic!("ICE impossible. tapply switched to nontapply"), } } } } //************************************************************************************************** // Expressions //************************************************************************************************** fn sequence(context: &mut Context, seq: &mut T::Sequence) { for item in seq { sequence_item(context, item) } } fn sequence_item(context: &mut Context, item: &mut T::SequenceItem) { use T::SequenceItem_ as S; match &mut item.value { S::Seq(te) => exp(context, te), S::Declare(tbind) => lvalues(context, tbind), S::Bind(tbind, tys, te) => { lvalues(context, tbind); expected_types(context, tys); exp(context, te) } } } pub fn exp(context: &mut Context, e: &mut T::Exp) { use T::UnannotatedExp_ as E; match &e.exp.value { // dont expand the type for return, abort, break, or continue E::Break | E::Continue | E::Return(_) | E::Abort(_) => { let t = e.ty.clone(); match core::unfold_type(&context.subst, t) { sp!(_, Type_::Anything) => (), mut t => { // report errors if there is an uninferred type argument somewhere type_(context, &mut t); } } e.ty = sp(e.ty.loc, Type_::Anything) } // Loop's default type is ()
has_break: false, .. } => { let t = e.ty.clone(); match core::unfold_type(&context.subst, t) { sp!(_, Type_::Anything) => (), mut t => { // report errors if there is an uninferred type argument somewhere type_(context, &mut t); } } e.ty = sp(e.ty.loc, Type_::Anything) } _ => type_(context, &mut e.ty), } match &mut e.exp.value { E::Use(v) => { let from_user = false; let var = *v; let abs = core::infer_abilities(context, &context.subst, e.ty.clone()); e.exp.value = if abs.has_ability_(Ability_::Copy) { E::Copy { from_user, var } } else { E::Move { from_user, var } } } E::Value(sp!(vloc, Value_::InferredNum(v))) => { use BuiltinTypeName_ as BT; let bt = match e.ty.value.builtin_name() { Some(sp!(_, bt)) if bt.is_numeric() => bt, _ => panic!("ICE inferred num failed {:?}", &e.ty.value), }; let v = *v; let u8_max = std::u8::MAX as u128; let u64_max = std::u64::MAX as u128; let u128_max = std::u128::MAX; let max = match bt { BT::U8 => u8_max, BT::U64 => u64_max, BT::U128 => u128_max, _ => unreachable!(), }; let new_exp = if v > max { let msg = format!( "Expected a literal of type '{}', but the value is too large.", bt ); let fix_bt = if v > u64_max { BT::U128 } else { assert!(v > u8_max); BT::U64 }; let fix = format!( "Annotating the literal might help inference: '{value}{type}'", value=v, type=fix_bt, ); context.env.add_diag(diag!( TypeSafety::InvalidNum, (e.exp.loc, "Invalid numerical literal"), (e.ty.loc, msg), (e.exp.loc, fix), )); E::UnresolvedError } else { let value_ = match bt { BT::U8 => Value_::U8(v.try_into().unwrap()), BT::U64 => Value_::U64(v.try_into().unwrap()), BT::U128 => Value_::U128(v), _ => unreachable!(), }; E::Value(sp(*vloc, value_)) }; e.exp.value = new_exp; } E::Spec(_, used_locals) => used_locals.values_mut().for_each(|ty| type_(context, ty)), E::Unit { .. } | E::Value(_) | E::Constant(_, _) | E::Move { .. } | E::Copy { .. } | E::BorrowLocal(_, _) | E::Break | E::Continue | E::UnresolvedError => (), E::ModuleCall(call) => module_call(context, call), E::Builtin(b, args) => { builtin_function(context, b); exp(context, args); } E::Vector(_vec_loc, _n, ty_arg, args) => { type_(context, ty_arg); exp(context, args); } E::IfElse(eb, et, ef) => { exp(context, eb); exp(context, et); exp(context, ef); } E::While(eb, eloop) => { exp(context, eb); exp(context, eloop); } E::Loop { body: eloop, .. } => exp(context, eloop), E::Block(seq) => sequence(context, seq), E::Assign(assigns, tys, er) => { lvalues(context, assigns); expected_types(context, tys); exp(context, er); } E::Return(er) | E::Abort(er) | E::Dereference(er) | E::UnaryExp(_, er) | E::Borrow(_, er, _) | E::TempBorrow(_, er) => exp(context, er), E::Mutate(el, er) => { exp(context, el); exp(context, er) } E::BinopExp(el, _, operand_ty, er) => { exp(context, el); exp(context, er); type_(context, operand_ty); } E::Pack(_, _, bs, fields) => { types(context, bs); for (_, _, (_, (bt, fe))) in fields.iter_mut() { type_(context, bt); exp(context, fe) } } E::ExpList(el) => exp_list(context, el), E::Cast(el, rhs_ty) | E::Annotate(el, rhs_ty) => { exp(context, el); type_(context, rhs_ty); } } } fn lvalues(context: &mut Context, binds: &mut T::LValueList) { for b in &mut binds.value { lvalue(context, b) } } fn lvalue(context: &mut Context, b: &mut T::LValue) { use T::LValue_ as L; match &mut b.value { L::Ignore => (), L::Var(_, ty) => { type_(context, ty); } L::BorrowUnpack(_, _, _, bts, fields) | L::Unpack(_, _, bts, fields) => { types(context, bts); for (_, _, (_, (bt, innerb))) in fields.iter_mut() { type_(context, bt); lvalue(context, innerb) } } } } fn module_call(context: &mut Context, call: &mut T::ModuleCall) { types(context, &mut call.type_arguments); exp(context, &mut call.arguments); types(context, &mut call.parameter_types) } fn builtin_function(context: &mut Context, b: &mut T::BuiltinFunction) { use T::BuiltinFunction_ as B; match &mut b.value { B::MoveTo(bt) | B::MoveFrom(bt) | B::BorrowGlobal(_, bt) | B::Exists(bt) | B::Freeze(bt) => { type_(context, bt); } B::Assert(_) => (), } } fn exp_list(context: &mut Context, items: &mut Vec<T::ExpListItem>) { for item in items { exp_list_item(context, item) } } fn exp_list_item(context: &mut Context, item: &mut T::ExpListItem) { use T::ExpListItem as I; match item { I::Single(e, st) => { exp(context, e); type_(context, st); } I::Splat(_, e, ss) => { exp(context, e); types(context, ss); } } }
E::Loop {
derive-debug-function-pointer.rs
/* automatically generated by rust-bindgen */ #![allow( dead_code, non_snake_case, non_camel_case_types, non_upper_case_globals )] #[repr(C)] #[derive(Copy, Clone)] pub struct Nice { pub pointer: Nice_Function, pub large_array: [::std::os::raw::c_int; 34usize], } pub type Nice_Function = ::std::option::Option<unsafe extern "C" fn(data: ::std::os::raw::c_int)>; #[test] fn bindgen_test_layout_Nice() { assert_eq!( ::std::mem::size_of::<Nice>(), 144usize, concat!("Size of: ", stringify!(Nice)) ); assert_eq!( ::std::mem::align_of::<Nice>(), 8usize, concat!("Alignment of ", stringify!(Nice)) ); assert_eq!( unsafe { &(*(::std::ptr::null::<Nice>())).pointer as *const _ as usize }, 0usize, concat!( "Offset of field: ", stringify!(Nice), "::", stringify!(pointer) ) ); assert_eq!( unsafe { &(*(::std::ptr::null::<Nice>())).large_array as *const _ as usize }, 8usize, concat!( "Offset of field: ", stringify!(Nice), "::", stringify!(large_array) ) ); } impl Default for Nice { fn default() -> Self { unsafe { ::std::mem::zeroed() } } } impl ::std::fmt::Debug for Nice { fn
(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { write!( f, "Nice {{ pointer: {:?}, large_array: [{}] }}", self.pointer, self.large_array .iter() .enumerate() .map(|(i, v)| format!( "{}{:?}", if i > 0 { ", " } else { "" }, v )) .collect::<String>() ) } }
fmt
jquery-2.2.3.min.js
/*! jQuery V2.2.3 | (c) jQuery Foundation | jquery.org/license */ !function(a,b){"object"==typeof module&&"object"==typeof module.exports?module.exports=a.document?b(a,!0):function(a){if(!a.document)throw new Error("jQuery requires a window with a document");return b(a)}:b(a)}("undefined"!=typeof window?window:this,function(a,b){var c=[],d=a.document,e=c.slice,f=c.concat,g=c.push,h=c.indexOf,i={},j=i.toString,k=i.hasOwnProperty,l={},m="2.2.3",n=function(a,b){return new n.fn.init(a,b)},o=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,p=/^-ms-/,q=/-([\da-z])/gi,r=function(a,b){return b.toUpperCase()};n.fn=n.prototype={jquery:m,constructor:n,selector:"",length:0,toArray:function(){return e.call(this)},get:function(a){return null!=a?0>a?this[a+this.length]:this[a]:e.call(this)},pushStack:function(a){var b=n.merge(this.constructor(),a);return b.prevObject=this,b.context=this.context,b},each:function(a){return n.each(this,a)},map:function(a){return this.pushStack(n.map(this,function(b,c){return a.call(b,c,b)}))},slice:function(){return this.pushStack(e.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(a){var b=this.length,c=+a+(0>a?b:0);return this.pushStack(c>=0&&b>c?[this[c]]:[])},end:function(){return this.prevObject||this.constructor()},push:g,sort:c.sort,splice:c.splice},n.extend=n.fn.extend=function(){var a,b,c,d,e,f,g=arguments[0]||{},h=1,i=arguments.length,j=!1;for("boolean"==typeof g&&(j=g,g=arguments[h]||{},h++),"object"==typeof g||n.isFunction(g)||(g={}),h===i&&(g=this,h--);i>h;h++)if(null!=(a=arguments[h]))for(b in a)c=g[b],d=a[b],g!==d&&(j&&d&&(n.isPlainObject(d)||(e=n.isArray(d)))?(e?(e=!1,f=c&&n.isArray(c)?c:[]):f=c&&n.isPlainObject(c)?c:{},g[b]=n.extend(j,f,d)):void 0!==d&&(g[b]=d));return g},n.extend({expando:"jQuery"+(m+Math.random()).replace(/\D/g,""),isReady:!0,error:function(a){throw new Error(a)},noop:function(){},isFunction:function(a){return"function"===n.type(a)},isArray:Array.isArray,isWindow:function(a){return null!=a&&a===a.window},isNumeric:function(a){var b=a&&a.toString();return!n.isArray(a)&&b-parseFloat(b)+1>=0},isPlainObject:function(a){var b;if("object"!==n.type(a)||a.nodeType||n.isWindow(a))return!1;if(a.constructor&&!k.call(a,"constructor")&&!k.call(a.constructor.prototype||{},"isPrototypeOf"))return!1;for(b in a);return void 0===b||k.call(a,b)},isEmptyObject:function(a){var b;for(b in a)return!1;return!0},type:function(a){return null==a?a+"":"object"==typeof a||"function"==typeof a?i[j.call(a)]||"object":typeof a},globalEval:function(a){var b,c=eval;a=n.trim(a),a&&(1===a.indexOf("use strict")?(b=d.createElement("script"),b.text=a,d.head.appendChild(b).parentNode.removeChild(b)):c(a))},camelCase:function(a){return a.replace(p,"ms-").replace(q,r)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toLowerCase()===b.toLowerCase()},each:function(a,b){var c,d=0;if(s(a)){for(c=a.length;c>d;d++)if(b.call(a[d],d,a[d])===!1)break}else for(d in a)if(b.call(a[d],d,a[d])===!1)break;return a},trim:function(a){return null==a?"":(a+"").replace(o,"")},makeArray:function(a,b){var c=b||[];return null!=a&&(s(Object(a))?n.merge(c,"string"==typeof a?[a]:a):g.call(c,a)),c},inArray:function(a,b,c){return null==b?-1:h.call(b,a,c)},merge:function(a,b){for(var c=+b.length,d=0,e=a.length;c>d;d++)a[e++]=b[d];return a.length=e,a},grep:function(a,b,c){for(var d,e=[],f=0,g=a.length,h=!c;g>f;f++)d=!b(a[f],f),d!==h&&e.push(a[f]);return e},map:function(a,b,c){var d,e,g=0,h=[];if(s(a))for(d=a.length;d>g;g++)e=b(a[g],g,c),null!=e&&h.push(e);else for(g in a)e=b(a[g],g,c),null!=e&&h.push(e);return f.apply([],h)},guid:1,proxy:function(a,b){var c,d,f;return"string"==typeof b&&(c=a[b],b=a,a=c),n.isFunction(a)?(d=e.call(arguments,2),f=function(){return a.apply(b||this,d.concat(e.call(arguments)))},f.guid=a.guid=a.guid||n.guid++,f):void 0},now:Date.now,support:l}),"function"==typeof Symbol&&(n.fn[Symbol.iterator]=c[Symbol.iterator]),n.each("Boolean Number String Function Array Date RegExp Object Error Symbol".split(" "),function(a,b){i["[object "+b+"]"]=b.toLowerCase()});function s(a){var b=!!a&&"length"in a&&a.length,c=n.type(a);return"function"===c||n.isWindow(a)?!1:"array"===c||0===b||"number"==typeof b&&b>0&&b-1 in a}var t=function(a){var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u="sizzle"+1*new Date,v=a.document,w=0,x=0,y=ga(),z=ga(),A=ga(),B=function(a,b){return a===b&&(l=!0),0},C=1<<31,D={}.hasOwnProperty,E=[],F=E.pop,G=E.push,H=E.push,I=E.slice,J=function(a,b){for(var c=0,d=a.length;d>c;c++)if(a[c]===b)return c;return-1},K="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",L="[\\x20\\t\\r\\n\\f]",M="(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+",N="\\["+L+"*("+M+")(?:"+L+"*([*^$|!~]?=)"+L+"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|("+M+"))|)"+L+"*\\]",O=":("+M+")(?:\\((('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|((?:\\\\.|[^\\\\()[\\]]|"+N+")*)|.*)\\)|)",P=new RegExp(L+"+","g"),Q=new RegExp("^"+L+"+|((?:^|[^\\\\])(?:\\\\.)*)"+L+"+$","g"),R=new RegExp("^"+L+"*,"+L+"*"),S=new RegExp("^"+L+"*([>+~]|"+L+")"+L+"*"),T=new RegExp("="+L+"*([^\\]'\"]*?)"+L+"*\\]","g"),U=new RegExp(O),V=new RegExp("^"+M+"$"),W={ID:new RegExp("^#("+M+")"),CLASS:new RegExp("^\\.("+M+")"),TAG:new RegExp("^("+M+"|[*])"),ATTR:new RegExp("^"+N),PSEUDO:new RegExp("^"+O),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+L+"*(even|odd|(([+-]|)(\\d*)n|)"+L+"*(?:([+-]|)"+L+"*(\\d+)|))"+L+"*\\)|)","i"),bool:new RegExp("^(?:"+K+")$","i"),needsContext:new RegExp("^"+L+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+L+"*((?:-\\d)?\\d*)"+L+"*\\)|)(?=[^-]|$)","i")},X=/^(?:input|select|textarea|button)$/i,Y=/^h\d$/i,Z=/^[^{]+\{\s*\[native \w/,$=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,_=/[+~]/,aa=/'|\\/g,ba=new RegExp("\\\\([\\da-f]{1,6}"+L+"?|("+L+")|.)","ig"),ca=function(a,b,c){var d="0x"+b-65536;return d!==d||c?b:0>d?String.fromCharCode(d+65536):String.fromCharCode(d>>10|55296,1023&d|56320)},da=function(){m()};try{H.apply(E=I.call(v.childNodes),v.childNodes),E[v.childNodes.length].nodeType}catch(ea){H={apply:E.length?function(a,b){G.apply(a,I.call(b))}:function(a,b){var c=a.length,d=0;while(a[c++]=b[d++]);a.length=c-1}}}function fa(a,b,d,e){var f,h,j,k,l,o,r,s,w=b&&b.ownerDocument,x=b?b.nodeType:9;if(d=d||[],"string"!=typeof a||!a||1!==x&&9!==x&&11!==x)return d;if(!e&&((b?b.ownerDocument||b:v)!==n&&m(b),b=b||n,p)){if(11!==x&&(o=$.exec(a)))if(f=o[1]){if(9===x){if(!(j=b.getElementById(f)))return d;if(j.id===f)return d.push(j),d}else if(w&&(j=w.getElementById(f))&&t(b,j)&&j.id===f)return d.push(j),d}else{if(o[2])return H.apply(d,b.getElementsByTagName(a)),d;if((f=o[3])&&c.getElementsByClassName&&b.getElementsByClassName)return H.apply(d,b.getElementsByClassName(f)),d}if(c.qsa&&!A[a+" "]&&(!q||!q.test(a))){if(1!==x)w=b,s=a;else if("object"!==b.nodeName.toLowerCase()){(k=b.getAttribute("id"))?k=k.replace(aa,"\\$&"):b.setAttribute("id",k=u),r=g(a),h=r.length,l=V.test(k)?"#"+k:"[id='"+k+"']";while(h--)r[h]=l+" "+qa(r[h]);s=r.join(","),w=_.test(a)&&oa(b.parentNode)||b}if(s)try{return H.apply(d,w.querySelectorAll(s)),d}catch(y){}finally{k===u&&b.removeAttribute("id")}}}return i(a.replace(Q,"$1"),b,d,e)}function ga(){var a=[];function b(c,e){return a.push(c+" ")>d.cacheLength&&delete b[a.shift()],b[c+" "]=e}return b}function ha(a){return a[u]=!0,a}function ia(a){var b=n.createElement("div");try{return!!a(b)}catch(c){return!1}finally{b.parentNode&&b.parentNode.removeChild(b),b=null}}function ja(a,b){var c=a.split("|"),e=c.length;while(e--)d.attrHandle[c[e]]=b}function ka(a,b){var c=b&&a,d=c&&1===a.nodeType&&1===b.nodeType&&(~b.sourceIndex||C)-(~a.sourceIndex||C);if(d)return d;if(c)while(c=c.nextSibling)if(c===b)return-1;return a?1:-1}function la(a){return function(b){var c=b.nodeName.toLowerCase();return"input"===c&&b.type===a}}function ma(a){return function(b){var c=b.nodeName.toLowerCase();return("input"===c||"button"===c)&&b.type===a}}function na(a){return ha(function(b){return b=+b,ha(function(c,d){var e,f=a([],c.length,b),g=f.length;while(g--)c[e=f[g]]&&(c[e]=!(d[e]=c[e]))})})}function oa(a){return a&&"undefined"!=typeof a.getElementsByTagName&&a}c=fa.support={},f=fa.isXML=function(a){var b=a&&(a.ownerDocument||a).documentElement;return b?"HTML"!==b.nodeName:!1},m=fa.setDocument=function(a){var b,e,g=a?a.ownerDocument||a:v;return g!==n&&9===g.nodeType&&g.documentElement?(n=g,o=n.documentElement,p=!f(n),(e=n.defaultView)&&e.top!==e&&(e.addEventListener?e.addEventListener("unload",da,!1):e.attachEvent&&e.attachEvent("onunload",da)),c.attributes=ia(function(a){return a.className="i",!a.getAttribute("className")}),c.getElementsByTagName=ia(function(a){return a.appendChild(n.createComment("")),!a.getElementsByTagName("*").length}),c.getElementsByClassName=Z.test(n.getElementsByClassName),c.getById=ia(function(a){return o.appendChild(a).id=u,!n.getElementsByName||!n.getElementsByName(u).length}),c.getById?(d.find.ID=function(a,b){if("undefined"!=typeof b.getElementById&&p){var c=b.getElementById(a);return c?[c]:[]}},d.filter.ID=function(a){var b=a.replace(ba,ca);return function(a){return a.getAttribute("id")===b}}):(delete d.find.ID,d.filter.ID=function(a){var b=a.replace(ba,ca);return function(a){var c="undefined"!=typeof a.getAttributeNode&&a.getAttributeNode("id");return c&&c.value===b}}),d.find.TAG=c.getElementsByTagName?function(a,b){return"undefined"!=typeof b.getElementsByTagName?b.getElementsByTagName(a):c.qsa?b.querySelectorAll(a):void 0}:function(a,b){var c,d=[],e=0,f=b.getElementsByTagName(a);if("*"===a){while(c=f[e++])1===c.nodeType&&d.push(c);return d}return f},d.find.CLASS=c.getElementsByClassName&&function(a,b){return"undefined"!=typeof b.getElementsByClassName&&p?b.getElementsByClassName(a):void 0},r=[],q=[],(c.qsa=Z.test(n.querySelectorAll))&&(ia(function(a){o.appendChild(a).innerHTML="<a id='"+u+"'></a><select id='"+u+"-\r\\' msallowcapture=''><option selected=''></option></select>",a.querySelectorAll("[msallowcapture^='']").length&&q.push("[*^$]="+L+"*(?:''|\"\")"),a.querySelectorAll("[selected]").length||q.push("\\["+L+"*(?:value|"+K+")"),a.querySelectorAll("[id~="+u+"-]").length||q.push("~="),a.querySelectorAll(":checked").length||q.push(":checked"),a.querySelectorAll("a#"+u+"+*").length||q.push(".#.+[+~]")}),ia(function(a){var b=n.createElement("input");b.setAttribute("type","hidden"),a.appendChild(b).setAttribute("name","D"),a.querySelectorAll("[name=d]").length&&q.push("name"+L+"*[*^$|!~]?="),a.querySelectorAll(":enabled").length||q.push(":enabled",":disabled"),a.querySelectorAll("*,:x"),q.push(",.*:")})),(c.matchesSelector=Z.test(s=o.matches||o.webkitMatchesSelector||o.mozMatchesSelector||o.oMatchesSelector||o.msMatchesSelector))&&ia(function(a){c.disconnectedMatch=s.call(a,"div"),s.call(a,"[s!='']:x"),r.push("!=",O)}),q=q.length&&new RegExp(q.join("|")),r=r.length&&new RegExp(r.join("|")),b=Z.test(o.compareDocumentPosition),t=b||Z.test(o.contains)?function(a,b){var c=9===a.nodeType?a.documentElement:a,d=b&&b.parentNode;return a===d||!(!d||1!==d.nodeType||!(c.contains?c.contains(d):a.compareDocumentPosition&&16&a.compareDocumentPosition(d)))}:function(a,b){if(b)while(b=b.parentNode)if(b===a)return!0;return!1},B=b?function(a,b){if(a===b)return l=!0,0;var d=!a.compareDocumentPosition-!b.compareDocumentPosition;return d?d:(d=(a.ownerDocument||a)===(b.ownerDocument||b)?a.compareDocumentPosition(b):1,1&d||!c.sortDetached&&b.compareDocumentPosition(a)===d?a===n||a.ownerDocument===v&&t(v,a)?-1:b===n||b.ownerDocument===v&&t(v,b)?1:k?J(k,a)-J(k,b):0:4&d?-1:1)}:function(a,b){if(a===b)return l=!0,0;var c,d=0,e=a.parentNode,f=b.parentNode,g=[a],h=[b];if(!e||!f)return a===n?-1:b===n?1:e?-1:f?1:k?J(k,a)-J(k,b):0;if(e===f)return ka(a,b);c=a;while(c=c.parentNode)g.unshift(c);c=b;while(c=c.parentNode)h.unshift(c);while(g[d]===h[d])d++;return d?ka(g[d],h[d]):g[d]===v?-1:h[d]===v?1:0},n):n},fa.matches=function(a,b){return fa(a,null,null,b)},fa.matchesSelector=function(a,b){if((a.ownerDocument||a)!==n&&m(a),b=b.replace(T,"='$1']"),c.matchesSelector&&p&&!A[b+" "]&&(!r||!r.test(b))&&(!q||!q.test(b)))try{var d=s.call(a,b);if(d||c.disconnectedMatch||a.document&&11!==a.document.nodeType)return d}catch(e){}return fa(b,n,null,[a]).length>0},fa.contains=function(a,b){return(a.ownerDocument||a)!==n&&m(a),t(a,b)},fa.attr=function(a,b){(a.ownerDocument||a)!==n&&m(a);var e=d.attrHandle[b.toLowerCase()],f=e&&D.call(d.attrHandle,b.toLowerCase())?e(a,b,!p):void 0;return void 0!==f?f:c.attributes||!p?a.getAttribute(b):(f=a.getAttributeNode(b))&&f.specified?f.value:null},fa.error=function(a){throw new Error("Syntax error, unrecognized expression: "+a)},fa.uniqueSort=function(a){var b,d=[],e=0,f=0;if(l=!c.detectDuplicates,k=!c.sortStable&&a.slice(0),a.sort(B),l){while(b=a[f++])b===a[f]&&(e=d.push(f));while(e--)a.splice(d[e],1)}return k=null,a},e=fa.getText=function(a){var b,c="",d=0,f=a.nodeType;if(f){if(1===f||9===f||11===f){if("string"==typeof a.textContent)return a.textContent;for(a=a.firstChild;a;a=a.nextSibling)c+=e(a)}else if(3===f||4===f)return a.nodeValue}else while(b=a[d++])c+=e(b);return c},d=fa.selectors={cacheLength:50,createPseudo:ha,match:W,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(a){return a[1]=a[1].replace(ba,ca),a[3]=(a[3]||a[4]||a[5]||"").replace(ba,ca),"~="===a[2]&&(a[3]=" "+a[3]+" "),a.slice(0,4)},CHILD:function(a){return a[1]=a[1].toLowerCase(),"nth"===a[1].slice(0,3)?(a[3]||fa.error(a[0]),a[4]=+(a[4]?a[5]+(a[6]||1):2*("even"===a[3]||"odd"===a[3])),a[5]=+(a[7]+a[8]||"odd"===a[3])):a[3]&&fa.error(a[0]),a},PSEUDO:function(a){var b,c=!a[6]&&a[2];return W.CHILD.test(a[0])?null:(a[3]?a[2]=a[4]||a[5]||"":c&&U.test(c)&&(b=g(c,!0))&&(b=c.indexOf(")",c.length-b)-c.length)&&(a[0]=a[0].slice(0,b),a[2]=c.slice(0,b)),a.slice(0,3))}},filter:{TAG:function(a){var b=a.replace(ba,ca).toLowerCase();return"*"===a?function(){return!0}:function(a){return a.nodeName&&a.nodeName.toLowerCase()===b}},CLASS:function(a){var b=y[a+" "];return b||(b=new RegExp("(^|"+L+")"+a+"("+L+"|$)"))&&y(a,function(a){return b.test("string"==typeof a.className&&a.className||"undefined"!=typeof a.getAttribute&&a.getAttribute("class")||"")})},ATTR:function(a,b,c){return function(d){var e=fa.attr(d,a);return null==e?"!="===b:b?(e+="","="===b?e===c:"!="===b?e!==c:"^="===b?c&&0===e.indexOf(c):"*="===b?c&&e.indexOf(c)>-1:"$="===b?c&&e.slice(-c.length)===c:"~="===b?(" "+e.replace(P," ")+" ").indexOf(c)>-1:"|="===b?e===c||e.slice(0,c.length+1)===c+"-":!1):!0}},CHILD:function(a,b,c,d,e){var f="nth"!==a.slice(0,3),g="last"!==a.slice(-4),h="of-type"===b;return 1===d&&0===e?function(a){return!!a.parentNode}:function(b,c,i){var j,k,l,m,n,o,p=f!==g?"nextSibling":"previousSibling",q=b.parentNode,r=h&&b.nodeName.toLowerCase(),s=!i&&!h,t=!1;if(q){if(f){while(p){m=b;while(m=m[p])if(h?m.nodeName.toLowerCase()===r:1===m.nodeType)return!1;o=p="only"===a&&!o&&"nextSibling"}return!0}if(o=[g?q.firstChild:q.lastChild],g&&s){m=q,l=m[u]||(m[u]={}),k=l[m.uniqueID]||(l[m.uniqueID]={}),j=k[a]||[],n=j[0]===w&&j[1],t=n&&j[2],m=n&&q.childNodes[n];while(m=++n&&m&&m[p]||(t=n=0)||o.pop())if(1===m.nodeType&&++t&&m===b){k[a]=[w,n,t];break}}else if(s&&(m=b,l=m[u]||(m[u]={}),k=l[m.uniqueID]||(l[m.uniqueID]={}),j=k[a]||[],n=j[0]===w&&j[1],t=n),t===!1)while(m=++n&&m&&m[p]||(t=n=0)||o.pop())if((h?m.nodeName.toLowerCase()===r:1===m.nodeType)&&++t&&(s&&(l=m[u]||(m[u]={}),k=l[m.uniqueID]||(l[m.uniqueID]={}),k[a]=[w,t]),m===b))break;return t-=e,t===d||t%d===0&&t/d>=0}}},PSEUDO:function(a,b){var c,e=d.pseudos[a]||d.setFilters[a.toLowerCase()]||fa.error("unsupported pseudo: "+a);return e[u]?e(b):e.length>1?(c=[a,a,"",b],d.setFilters.hasOwnProperty(a.toLowerCase())?ha(function(a,c){var d,f=e(a,b),g=f.length;while(g--)d=J(a,f[g]),a[d]=!(c[d]=f[g])}):function(a){return e(a,0,c)}):e}},pseudos:{not:ha(function(a){var b=[],c=[],d=h(a.replace(Q,"$1"));return d[u]?ha(function(a,b,c,e){var f,g=d(a,null,e,[]),h=a.length;while(h--)(f=g[h])&&(a[h]=!(b[h]=f))}):function(a,e,f){return b[0]=a,d(b,null,f,c),b[0]=null,!c.pop()}}),has:ha(function(a){return function(b){return fa(a,b).length>0}}),contains:ha(function(a){return a=a.replace(ba,ca),function(b){return(b.textContent||b.innerText||e(b)).indexOf(a)>-1}}),lang:ha(function(a){return V.test(a||"")||fa.error("unsupported lang: "+a),a=a.replace(ba,ca).toLowerCase(),function(b){var c;do if(c=p?b.lang:b.getAttribute("xml:lang")||b.getAttribute("lang"))return c=c.toLowerCase(),c===a||0===c.indexOf(a+"-");while((b=b.parentNode)&&1===b.nodeType);return!1}}),target:function(b){var c=a.location&&a.location.hash;return c&&c.slice(1)===b.id},root:function(a){return a===o},focus:function(a){return a===n.activeElement&&(!n.hasFocus||n.hasFocus())&&!!(a.type||a.href||~a.tabIndex)},enabled:function(a){return a.disabled===!1},disabled:function(a){return a.disabled===!0},checked:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&!!a.checked||"option"===b&&!!a.selected},selected:function(a){return a.parentNode&&a.parentNode.selectedIndex,a.selected===!0},empty:function(a){for(a=a.firstChild;a;a=a.nextSibling)if(a.nodeType<6)return!1;return!0},parent:function(a){return!d.pseudos.empty(a)},header:function(a){return Y.test(a.nodeName)},input:function(a){return X.test(a.nodeName)},button:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&"button"===a.type||"button"===b},text:function(a){var b;return"input"===a.nodeName.toLowerCase()&&"text"===a.type&&(null==(b=a.getAttribute("type"))||"text"===b.toLowerCase())},first:na(function(){return[0]}),last:na(function(a,b){return[b-1]}),eq:na(function(a,b,c){return[0>c?c+b:c]}),even:na(function(a,b){for(var c=0;b>c;c+=2)a.push(c);return a}),odd:na(function(a,b){for(var c=1;b>c;c+=2)a.push(c);return a}),lt:na(function(a,b,c){for(var d=0>c?c+b:c;--d>=0;)a.push(d);return a}),gt:na(function(a,b,c){for(var d=0>c?c+b:c;++d<b;)a.push(d);return a})}},d.pseudos.nth=d.pseudos.eq;for(b in{radio:!0,checkbox:!0,file:!0,password:!0,image:!0})d.pseudos[b]=la(b);for(b in{submit:!0,reset:!0})d.pseudos[b]=ma(b);function pa(){}pa.prototype=d.filters=d.pseudos,d.setFilters=new pa,g=fa.tokenize=function(a,b){var c,e,f,g,h,i,j,k=z[a+" "];if(k)return b?0:k.slice(0);h=a,i=[],j=d.preFilter;while(h){c&&!(e=R.exec(h))||(e&&(h=h.slice(e[0].length)||h),i.push(f=[])),c=!1,(e=S.exec(h))&&(c=e.shift(),f.push({value:c,type:e[0].replace(Q," ")}),h=h.slice(c.length));for(g in d.filter)!(e=W[g].exec(h))||j[g]&&!(e=j[g](e))||(c=e.shift(),f.push({value:c,type:g,matches:e}),h=h.slice(c.length));if(!c)break}return b?h.length:h?fa.error(a):z(a,i).slice(0)};function qa(a){for(var b=0,c=a.length,d="";c>b;b++)d+=a[b].value;return d}function ra(a,b,c){var d=b.dir,e=c&&"parentNode"===d,f=x++;return b.first?function(b,c,f){while(b=b[d])if(1===b.nodeType||e)return a(b,c,f)}:function(b,c,g){var h,i,j,k=[w,f];if(g){while(b=b[d])if((1===b.nodeType||e)&&a(b,c,g))return!0}else while(b=b[d])if(1===b.nodeType||e){if(j=b[u]||(b[u]={}),i=j[b.uniqueID]||(j[b.uniqueID]={}),(h=i[d])&&h[0]===w&&h[1]===f)return k[2]=h[2];if(i[d]=k,k[2]=a(b,c,g))return!0}}}function sa(a){return a.length>1?function(b,c,d){var e=a.length;while(e--)if(!a[e](b,c,d))return!1;return!0}:a[0]}function ta(a,b,c){for(var d=0,e=b.length;e>d;d++)fa(a,b[d],c);return c}function ua(a,b,c,d,e){for(var f,g=[],h=0,i=a.length,j=null!=b;i>h;h++)(f=a[h])&&(c&&!c(f,d,e)||(g.push(f),j&&b.push(h)));return g}function va(a,b,c,d,e,f){return d&&!d[u]&&(d=va(d)),e&&!e[u]&&(e=va(e,f)),ha(function(f,g,h,i){var j,k,l,m=[],n=[],o=g.length,p=f||ta(b||"*",h.nodeType?[h]:h,[]),q=!a||!f&&b?p:ua(p,m,a,h,i),r=c?e||(f?a:o||d)?[]:g:q;if(c&&c(q,r,h,i),d){j=ua(r,n),d(j,[],h,i),k=j.length;while(k--)(l=j[k])&&(r[n[k]]=!(q[n[k]]=l))}if(f){if(e||a){if(e){j=[],k=r.length;while(k--)(l=r[k])&&j.push(q[k]=l);e(null,r=[],j,i)}k=r.length;while(k--)(l=r[k])&&(j=e?J(f,l):m[k])>-1&&(f[j]=!(g[j]=l))}}else r=ua(r===g?r.splice(o,r.length):r),e?e(null,g,r,i):H.apply(g,r)})}function wa(a){for(var b,c,e,f=a.length,g=d.relative[a[0].type],h=g||d.relative[" "],i=g?1:0,k=ra(function(a){return a===b},h,!0),l=ra(function(a){return J(b,a)>-1},h,!0),m=[function(a,c,d){var e=!g&&(d||c!==j)||((b=c).nodeType?k(a,c,d):l(a,c,d));return b=null,e}];f>i;i++)if(c=d.relative[a[i].type])m=[ra(sa(m),c)];else{if(c=d.filter[a[i].type].apply(null,a[i].matches),c[u]){for(e=++i;f>e;e++)if(d.relative[a[e].type])break;return va(i>1&&sa(m),i>1&&qa(a.slice(0,i-1).concat({value:" "===a[i-2].type?"*":""})).replace(Q,"$1"),c,e>i&&wa(a.slice(i,e)),f>e&&wa(a=a.slice(e)),f>e&&qa(a))}m.push(c)}return sa(m)}function xa(a,b){var c=b.length>0,e=a.length>0,f=function(f,g,h,i,k){var l,o,q,r=0,s="0",t=f&&[],u=[],v=j,x=f||e&&d.find.TAG("*",k),y=w+=null==v?1:Math.random()||.1,z=x.length;for(k&&(j=g===n||g||k);s!==z&&null!=(l=x[s]);s++){if(e&&l){o=0,g||l.ownerDocument===n||(m(l),h=!p);while(q=a[o++])if(q(l,g||n,h)){i.push(l);break}k&&(w=y)}c&&((l=!q&&l)&&r--,f&&t.push(l))}if(r+=s,c&&s!==r){o=0;while(q=b[o++])q(t,u,g,h);if(f){if(r>0)while(s--)t[s]||u[s]||(u[s]=F.call(i));u=ua(u)}H.apply(i,u),k&&!f&&u.length>0&&r+b.length>1&&fa.uniqueSort(i)}return k&&(w=y,j=v),t};return c?ha(f):f}return h=fa.compile=function(a,b){var c,d=[],e=[],f=A[a+" "];if(!f){b||(b=g(a)),c=b.length;while(c--)f=wa(b[c]),f[u]?d.push(f):e.push(f);f=A(a,xa(e,d)),f.selector=a}return f},i=fa.select=function(a,b,e,f){var i,j,k,l,m,n="function"==typeof a&&a,o=!f&&g(a=n.selector||a);if(e=e||[],1===o.length){if(j=o[0]=o[0].slice(0),j.length>2&&"ID"===(k=j[0]).type&&c.getById&&9===b.nodeType&&p&&d.relative[j[1].type]){if(b=(d.find.ID(k.matches[0].replace(ba,ca),b)||[])[0],!b)return e;n&&(b=b.parentNode),a=a.slice(j.shift().value.length)}i=W.needsContext.test(a)?0:j.length;while(i--){if(k=j[i],d.relative[l=k.type])break;if((m=d.find[l])&&(f=m(k.matches[0].replace(ba,ca),_.test(j[0].type)&&oa(b.parentNode)||b))){if(j.splice(i,1),a=f.length&&qa(j),!a)return H.apply(e,f),e;break}}}return(n||h(a,o))(f,b,!p,e,!b||_.test(a)&&oa(b.parentNode)||b),e},c.sortStable=u.split("").sort(B).join("")===u,c.detectDuplicates=!!l,m(),c.sortDetached=ia(function(a){return 1&a.compareDocumentPosition(n.createElement("div"))}),ia(function(a){return a.innerHTML="<a href='#'></a>","#"===a.firstChild.getAttribute("href")})||ja("type|href|height|width",function(a,b,c){return c?void 0:a.getAttribute(b,"type"===b.toLowerCase()?1:2)}),c.attributes&&ia(function(a){return a.innerHTML="<input/>",a.firstChild.setAttribute("value",""),""===a.firstChild.getAttribute("value")})||ja("value",function(a,b,c){return c||"input"!==a.nodeName.toLowerCase()?void 0:a.defaultValue}),ia(function(a){return null==a.getAttribute("disabled")})||ja(K,function(a,b,c){var d;return c?void 0:a[b]===!0?b.toLowerCase():(d=a.getAttributeNode(b))&&d.specified?d.value:null}),fa}(a);n.find=t,n.expr=t.selectors,n.expr[":"]=n.expr.pseudos,n.uniqueSort=n.unique=t.uniqueSort,n.text=t.getText,n.isXMLDoc=t.isXML,n.contains=t.contains;var u=function(a,b,c){var d=[],e=void 0!==c;while((a=a[b])&&9!==a.nodeType)if(1===a.nodeType){if(e&&n(a).is(c))break;d.push(a)}return d},v=function(a,b){for(var c=[];a;a=a.nextSibling)1===a.nodeType&&a!==b&&c.push(a);return c},w=n.expr.match.needsContext,x=/^<([\w-]+)\s*\/?>(?:<\/\1>|)$/,y=/^.[^:#\[\.,]*$/;function z(a,b,c){if(n.isFunction(b))return n.grep(a,function(a,d){return!!b.call(a,d,a)!==c});if(b.nodeType)return n.grep(a,function(a){return a===b!==c});if("string"==typeof b){if(y.test(b))return n.filter(b,a,c);b=n.filter(b,a)}return n.grep(a,function(a){return h.call(b,a)>-1!==c})}n.filter=function(a,b,c){var d=b[0];return c&&(a=":not("+a+")"),1===b.length&&1===d.nodeType?n.find.matchesSelector(d,a)?[d]:[]:n.find.matches(a,n.grep(b,function(a){return 1===a.nodeType}))},n.fn.extend({find:function(a){var b,c=this.length,d=[],e=this;if("string"!=typeof a)return this.pushStack(n(a).filter(function(){for(b=0;c>b;b++)if(n.contains(e[b],this))return!0}));for(b=0;c>b;b++)n.find(a,e[b],d);return d=this.pushStack(c>1?n.unique(d):d),d.selector=this.selector?this.selector+" "+a:a,d},filter:function(a){return this.pushStack(z(this,a||[],!1))},not:function(a){return this.pushStack(z(this,a||[],!0))},is:function(a){return!!z(this,"string"==typeof a&&w.test(a)?n(a):a||[],!1).length}});var A,B=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/,C=n.fn.init=function(a,b,c){var e,f;if(!a)return this;if(c=c||A,"string"==typeof a){if(e="<"===a[0]&&">"===a[a.length-1]&&a.length>=3?[null,a,null]:B.exec(a),!e||!e[1]&&b)return!b||b.jquery?(b||c).find(a):this.constructor(b).find(a);if(e[1]){if(b=b instanceof n?b[0]:b,n.merge(this,n.parseHTML(e[1],b&&b.nodeType?b.ownerDocument||b:d,!0)),x.test(e[1])&&n.isPlainObject(b))for(e in b)n.isFunction(this[e])?this[e](b[e]):this.attr(e,b[e]);return this}return f=d.getElementById(e[2]),f&&f.parentNode&&(this.length=1,this[0]=f),this.context=d,this.selector=a,this}return a.nodeType?(this.context=this[0]=a,this.length=1,this):n.isFunction(a)?void 0!==c.ready?c.ready(a):a(n):(void 0!==a.selector&&(this.selector=a.selector,this.context=a.context),n.makeArray(a,this))};C.prototype=n.fn,A=n(d);var D=/^(?:parents|prev(?:Until|All))/,E={children:!0,contents:!0,next:!0,prev:!0};n.fn.extend({has:function(a){var b=n(a,this),c=b.length;return this.filter(function(){for(var a=0;c>a;a++)if(n.contains(this,b[a]))return!0})},closest:function(a,b){for(var c,d=0,e=this.length,f=[],g=w.test(a)||"string"!=typeof a?n(a,b||this.context):0;e>d;d++)for(c=this[d];c&&c!==b;c=c.parentNode)if(c.nodeType<11&&(g?g.index(c)>-1:1===c.nodeType&&n.find.matchesSelector(c,a))){f.push(c);break}return this.pushStack(f.length>1?n.uniqueSort(f):f)},index:function(a){return a?"string"==typeof a?h.call(n(a),this[0]):h.call(this,a.jquery?a[0]:a):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(a,b){return this.pushStack(n.uniqueSort(n.merge(this.get(),n(a,b))))},addBack:function(a){return this.add(null==a?this.prevObject:this.prevObject.filter(a))}});function F(a,b){while((a=a[b])&&1!==a.nodeType);return a}n.each({parent:function(a){var b=a.parentNode;return b&&11!==b.nodeType?b:null},parents:function(a){return u(a,"parentNode")},parentsUntil:function(a,b,c){return u(a,"parentNode",c)},next:function(a){return F(a,"nextSibling")},prev:function(a){return F(a,"previousSibling")},nextAll:function(a){return u(a,"nextSibling")},prevAll:function(a){return u(a,"previousSibling")},nextUntil:function(a,b,c){return u(a,"nextSibling",c)},prevUntil:function(a,b,c){return u(a,"previousSibling",c)},siblings:function(a){return v((a.parentNode||{}).firstChild,a)},children:function(a){return v(a.firstChild)},contents:function(a){return a.contentDocument||n.merge([],a.childNodes)}},function(a,b){n.fn[a]=function(c,d){var e=n.map(this,b,c);return"Until"!==a.slice(-5)&&(d=c),d&&"string"==typeof d&&(e=n.filter(d,e)),this.length>1&&(E[a]||n.uniqueSort(e),D.test(a)&&e.reverse()),this.pushStack(e)}});var G=/\S+/g;function H(a){var b={};return n.each(a.match(G)||[],function(a,c){b[c]=!0}),b}n.Callbacks=function(a){a="string"==typeof a?H(a):n.extend({},a);var b,c,d,e,f=[],g=[],h=-1,i=function(){for(e=a.once,d=b=!0;g.length;h=-1){c=g.shift();while(++h<f.length)f[h].apply(c[0],c[1])===!1&&a.stopOnFalse&&(h=f.length,c=!1)}a.memory||(c=!1),b=!1,e&&(f=c?[]:"")},j={add:function(){return f&&(c&&!b&&(h=f.length-1,g.push(c)),function d(b){n.each(b,function(b,c){n.isFunction(c)?a.unique&&j.has(c)||f.push(c):c&&c.length&&"string"!==n.type(c)&&d(c)})}(arguments),c&&!b&&i()),this},remove:function(){return n.each(arguments,function(a,b){var c;while((c=n.inArray(b,f,c))>-1)f.splice(c,1),h>=c&&h--}),this},has:function(a){return a?n.inArray(a,f)>-1:f.length>0},empty:function(){return f&&(f=[]),this},disable:function(){return e=g=[],f=c="",this},disabled:function(){return!f},lock:function(){return e=g=[],c||(f=c=""),this},locked:function(){return!!e},fireWith:function(a,c){return e||(c=c||[],c=[a,c.slice?c.slice():c],g.push(c),b||i()),this},fire:function(){return j.fireWith(this,arguments),this},fired:function(){return!!d}};return j},n.extend({Deferred:function(a){var b=[["resolve","done",n.Callbacks("once memory"),"resolved"],["reject","fail",n.Callbacks("once memory"),"rejected"],["notify","progress",n.Callbacks("memory")]],c="pending",d={state:function(){return c},always:function(){return e.done(arguments).fail(arguments),this},then:function(){var a=arguments;return n.Deferred(function(c){n.each(b,function(b,f){var g=n.isFunction(a[b])&&a[b];e[f[1]](function(){var a=g&&g.apply(this,arguments);a&&n.isFunction(a.promise)?a.promise().progress(c.notify).done(c.resolve).fail(c.reject):c[f[0]+"With"](this===d?c.promise():this,g?[a]:arguments)})}),a=null}).promise()},promise:function(a){return null!=a?n.extend(a,d):d}},e={};return d.pipe=d.then,n.each(b,function(a,f){var g=f[2],h=f[3];d[f[1]]=g.add,h&&g.add(function(){c=h},b[1^a][2].disable,b[2][2].lock),e[f[0]]=function(){return e[f[0]+"With"](this===e?d:this,arguments),this},e[f[0]+"With"]=g.fireWith}),d.promise(e),a&&a.call(e,e),e},when:function(a){var b=0,c=e.call(arguments),d=c.length,f=1!==d||a&&n.isFunction(a.promise)?d:0,g=1===f?a:n.Deferred(),h=function(a,b,c){return function(d){b[a]=this,c[a]=arguments.length>1?e.call(arguments):d,c===i?g.notifyWith(b,c):--f||g.resolveWith(b,c)}},i,j,k;if(d>1)for(i=new Array(d),j=new Array(d),k=new Array(d);d>b;b++)c[b]&&n.isFunction(c[b].promise)?c[b].promise().progress(h(b,j,i)).done(h(b,k,c)).fail(g.reject):--f;return f||g.resolveWith(k,c),g.promise()}});var I;n.fn.ready=function(a){return n.ready.promise().done(a),this},n.extend({isReady:!1,readyWait:1,holdReady:function(a){a?n.readyWait++:n.ready(!0)},ready:function(a){(a===!0?--n.readyWait:n.isReady)||(n.isReady=!0,a!==!0&&--n.readyWait>0||(I.resolveWith(d,[n]),n.fn.triggerHandler&&(n(d).triggerHandler("ready"),n(d).off("ready"))))}});function J(){d.removeEventListener("DOMContentLoaded",J),a.removeEventListener("load",J),n.ready()}n.ready.promise=function(b){return I||(I=n.Deferred(),"complete"===d.readyState||"loading"!==d.readyState&&!d.documentElement.doScroll?a.setTimeout(n.ready):(d.addEventListener("DOMContentLoaded",J),a.addEventListener("load",J))),I.promise(b)},n.ready.promise();var K=function(a,b,c,d,e,f,g){var h=0,i=a.length,j=null==c;if("object"===n.type(c)){e=!0;for(h in c)K(a,b,h,c[h],!0,f,g)}else if(void 0!==d&&(e=!0,n.isFunction(d)||(g=!0),j&&(g?(b.call(a,d),b=null):(j=b,b=function(a,b,c){return j.call(n(a),c)})),b))for(;i>h;h++)b(a[h],c,g?d:d.call(a[h],h,b(a[h],c)));return e?a:j?b.call(a):i?b(a[0],c):f},L=function(a){return 1===a.nodeType||9===a.nodeType||!+a.nodeType};function M(){this.expando=n.expando+M.uid++}M.uid=1,M.prototype={register:function(a,b){var c=b||{};return a.nodeType?a[this.expando]=c:Object.defineProperty(a,this.expando,{value:c,writable:!0,configurable:!0}),a[this.expando]},cache:function(a){if(!L(a))return{};var b=a[this.expando];return b||(b={},L(a)&&(a.nodeType?a[this.expando]=b:Object.defineProperty(a,this.expando,{value:b,configurable:!0}))),b},set:function(a,b,c){var d,e=this.cache(a);if("string"==typeof b)e[b]=c;else for(d in b)e[d]=b[d];return e},get:function(a,b){return void 0===b?this.cache(a):a[this.expando]&&a[this.expando][b]},access:function(a,b,c){var d;return void 0===b||b&&"string"==typeof b&&void 0===c?(d=this.get(a,b),void 0!==d?d:this.get(a,n.camelCase(b))):(this.set(a,b,c),void 0!==c?c:b)},remove:function(a,b){var c,d,e,f=a[this.expando];if(void 0!==f){if(void 0===b)this.register(a);else{n.isArray(b)?d=b.concat(b.map(n.camelCase)):(e=n.camelCase(b),b in f?d=[b,e]:(d=e,d=d in f?[d]:d.match(G)||[])),c=d.length;while(c--)delete f[d[c]]}(void 0===b||n.isEmptyObject(f))&&(a.nodeType?a[this.expando]=void 0:delete a[this.expando])}},hasData:function(a){var b=a[this.expando];return void 0!==b&&!n.isEmptyObject(b)}};var N=new M,O=new M,P=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,Q=/[A-Z]/g;function R(a,b,c){var d;if(void 0===c&&1===a.nodeType)if(d="data-"+b.replace(Q,"-$&").toLowerCase(),c=a.getAttribute(d),"string"==typeof c){try{c="true"===c?!0:"false"===c?!1:"null"===c?null:+c+""===c?+c:P.test(c)?n.parseJSON(c):c; }catch(e){}O.set(a,b,c)}else c=void 0;return c}n.extend({hasData:function(a){return O.hasData(a)||N.hasData(a)},data:function(a,b,c){return O.access(a,b,c)},removeData:function(a,b){O.remove(a,b)},_data:function(a,b,c){return N.access(a,b,c)},_removeData:function(a,b){N.remove(a,b)}}),n.fn.extend({data:function(a,b){var c,d,e,f=this[0],g=f&&f.attributes;if(void 0===a){if(this.length&&(e=O.get(f),1===f.nodeType&&!N.get(f,"hasDataAttrs"))){c=g.length;while(c--)g[c]&&(d=g[c].name,0===d.indexOf("data-")&&(d=n.camelCase(d.slice(5)),R(f,d,e[d])));N.set(f,"hasDataAttrs",!0)}return e}return"object"==typeof a?this.each(function(){O.set(this,a)}):K(this,function(b){var c,d;if(f&&void 0===b){if(c=O.get(f,a)||O.get(f,a.replace(Q,"-$&").toLowerCase()),void 0!==c)return c;if(d=n.camelCase(a),c=O.get(f,d),void 0!==c)return c;if(c=R(f,d,void 0),void 0!==c)return c}else d=n.camelCase(a),this.each(function(){var c=O.get(this,d);O.set(this,d,b),a.indexOf("-")>-1&&void 0!==c&&O.set(this,a,b)})},null,b,arguments.length>1,null,!0)},removeData:function(a){return this.each(function(){O.remove(this,a)})}}),n.extend({queue:function(a,b,c){var d;return a?(b=(b||"fx")+"queue",d=N.get(a,b),c&&(!d||n.isArray(c)?d=N.access(a,b,n.makeArray(c)):d.push(c)),d||[]):void 0},dequeue:function(a,b){b=b||"fx";var c=n.queue(a,b),d=c.length,e=c.shift(),f=n._queueHooks(a,b),g=function(){n.dequeue(a,b)};"inprogress"===e&&(e=c.shift(),d--),e&&("fx"===b&&c.unshift("inprogress"),delete f.stop,e.call(a,g,f)),!d&&f&&f.empty.fire()},_queueHooks:function(a,b){var c=b+"queueHooks";return N.get(a,c)||N.access(a,c,{empty:n.Callbacks("once memory").add(function(){N.remove(a,[b+"queue",c])})})}}),n.fn.extend({queue:function(a,b){var c=2;return"string"!=typeof a&&(b=a,a="fx",c--),arguments.length<c?n.queue(this[0],a):void 0===b?this:this.each(function(){var c=n.queue(this,a,b);n._queueHooks(this,a),"fx"===a&&"inprogress"!==c[0]&&n.dequeue(this,a)})},dequeue:function(a){return this.each(function(){n.dequeue(this,a)})},clearQueue:function(a){return this.queue(a||"fx",[])},promise:function(a,b){var c,d=1,e=n.Deferred(),f=this,g=this.length,h=function(){--d||e.resolveWith(f,[f])};"string"!=typeof a&&(b=a,a=void 0),a=a||"fx";while(g--)c=N.get(f[g],a+"queueHooks"),c&&c.empty&&(d++,c.empty.add(h));return h(),e.promise(b)}});var S=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,T=new RegExp("^(?:([+-])=|)("+S+")([a-z%]*)$","i"),U=["Top","Right","Bottom","Left"],V=function(a,b){return a=b||a,"none"===n.css(a,"display")||!n.contains(a.ownerDocument,a)};function W(a,b,c,d){var e,f=1,g=20,h=d?function(){return d.cur()}:function(){return n.css(a,b,"")},i=h(),j=c&&c[3]||(n.cssNumber[b]?"":"px"),k=(n.cssNumber[b]||"px"!==j&&+i)&&T.exec(n.css(a,b));if(k&&k[3]!==j){j=j||k[3],c=c||[],k=+i||1;do f=f||".5",k/=f,n.style(a,b,k+j);while(f!==(f=h()/i)&&1!==f&&--g)}return c&&(k=+k||+i||0,e=c[1]?k+(c[1]+1)*c[2]:+c[2],d&&(d.unit=j,d.start=k,d.end=e)),e}var X=/^(?:checkbox|radio)$/i,Y=/<([\w:-]+)/,Z=/^$|\/(?:java|ecma)script/i,$={option:[1,"<select multiple='multiple'>","</select>"],thead:[1,"<table>","</table>"],col:[2,"<table><colgroup>","</colgroup></table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:[0,"",""]};$.optgroup=$.option,$.tbody=$.tfoot=$.colgroup=$.caption=$.thead,$.th=$.td;function _(a,b){var c="undefined"!=typeof a.getElementsByTagName?a.getElementsByTagName(b||"*"):"undefined"!=typeof a.querySelectorAll?a.querySelectorAll(b||"*"):[];return void 0===b||b&&n.nodeName(a,b)?n.merge([a],c):c}function aa(a,b){for(var c=0,d=a.length;d>c;c++)N.set(a[c],"globalEval",!b||N.get(b[c],"globalEval"))}var ba=/<|&#?\w+;/;function ca(a,b,c,d,e){for(var f,g,h,i,j,k,l=b.createDocumentFragment(),m=[],o=0,p=a.length;p>o;o++)if(f=a[o],f||0===f)if("object"===n.type(f))n.merge(m,f.nodeType?[f]:f);else if(ba.test(f)){g=g||l.appendChild(b.createElement("div")),h=(Y.exec(f)||["",""])[1].toLowerCase(),i=$[h]||$._default,g.innerHTML=i[1]+n.htmlPrefilter(f)+i[2],k=i[0];while(k--)g=g.lastChild;n.merge(m,g.childNodes),g=l.firstChild,g.textContent=""}else m.push(b.createTextNode(f));l.textContent="",o=0;while(f=m[o++])if(d&&n.inArray(f,d)>-1)e&&e.push(f);else if(j=n.contains(f.ownerDocument,f),g=_(l.appendChild(f),"script"),j&&aa(g),c){k=0;while(f=g[k++])Z.test(f.type||"")&&c.push(f)}return l}!function(){var a=d.createDocumentFragment(),b=a.appendChild(d.createElement("div")),c=d.createElement("input");c.setAttribute("type","radio"),c.setAttribute("checked","checked"),c.setAttribute("name","t"),b.appendChild(c),l.checkClone=b.cloneNode(!0).cloneNode(!0).lastChild.checked,b.innerHTML="<textarea>x</textarea>",l.noCloneChecked=!!b.cloneNode(!0).lastChild.defaultValue}();var da=/^key/,ea=/^(?:mouse|pointer|contextmenu|drag|drop)|click/,fa=/^([^.]*)(?:\.(.+)|)/;function ga(){return!0}function ha(){return!1}function ia(){try{return d.activeElement}catch(a){}}function ja(a,b,c,d,e,f){var g,h;if("object"==typeof b){"string"!=typeof c&&(d=d||c,c=void 0);for(h in b)ja(a,h,c,d,b[h],f);return a}if(null==d&&null==e?(e=c,d=c=void 0):null==e&&("string"==typeof c?(e=d,d=void 0):(e=d,d=c,c=void 0)),e===!1)e=ha;else if(!e)return a;return 1===f&&(g=e,e=function(a){return n().off(a),g.apply(this,arguments)},e.guid=g.guid||(g.guid=n.guid++)),a.each(function(){n.event.add(this,b,e,d,c)})}n.event={global:{},add:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,o,p,q,r=N.get(a);if(r){c.handler&&(f=c,c=f.handler,e=f.selector),c.guid||(c.guid=n.guid++),(i=r.events)||(i=r.events={}),(g=r.handle)||(g=r.handle=function(b){return"undefined"!=typeof n&&n.event.triggered!==b.type?n.event.dispatch.apply(a,arguments):void 0}),b=(b||"").match(G)||[""],j=b.length;while(j--)h=fa.exec(b[j])||[],o=q=h[1],p=(h[2]||"").split(".").sort(),o&&(l=n.event.special[o]||{},o=(e?l.delegateType:l.bindType)||o,l=n.event.special[o]||{},k=n.extend({type:o,origType:q,data:d,handler:c,guid:c.guid,selector:e,needsContext:e&&n.expr.match.needsContext.test(e),namespace:p.join(".")},f),(m=i[o])||(m=i[o]=[],m.delegateCount=0,l.setup&&l.setup.call(a,d,p,g)!==!1||a.addEventListener&&a.addEventListener(o,g)),l.add&&(l.add.call(a,k),k.handler.guid||(k.handler.guid=c.guid)),e?m.splice(m.delegateCount++,0,k):m.push(k),n.event.global[o]=!0)}},remove:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,o,p,q,r=N.hasData(a)&&N.get(a);if(r&&(i=r.events)){b=(b||"").match(G)||[""],j=b.length;while(j--)if(h=fa.exec(b[j])||[],o=q=h[1],p=(h[2]||"").split(".").sort(),o){l=n.event.special[o]||{},o=(d?l.delegateType:l.bindType)||o,m=i[o]||[],h=h[2]&&new RegExp("(^|\\.)"+p.join("\\.(?:.*\\.|)")+"(\\.|$)"),g=f=m.length;while(f--)k=m[f],!e&&q!==k.origType||c&&c.guid!==k.guid||h&&!h.test(k.namespace)||d&&d!==k.selector&&("**"!==d||!k.selector)||(m.splice(f,1),k.selector&&m.delegateCount--,l.remove&&l.remove.call(a,k));g&&!m.length&&(l.teardown&&l.teardown.call(a,p,r.handle)!==!1||n.removeEvent(a,o,r.handle),delete i[o])}else for(o in i)n.event.remove(a,o+b[j],c,d,!0);n.isEmptyObject(i)&&N.remove(a,"handle events")}},dispatch:function(a){a=n.event.fix(a);var b,c,d,f,g,h=[],i=e.call(arguments),j=(N.get(this,"events")||{})[a.type]||[],k=n.event.special[a.type]||{};if(i[0]=a,a.delegateTarget=this,!k.preDispatch||k.preDispatch.call(this,a)!==!1){h=n.event.handlers.call(this,a,j),b=0;while((f=h[b++])&&!a.isPropagationStopped()){a.currentTarget=f.elem,c=0;while((g=f.handlers[c++])&&!a.isImmediatePropagationStopped())a.rnamespace&&!a.rnamespace.test(g.namespace)||(a.handleObj=g,a.data=g.data,d=((n.event.special[g.origType]||{}).handle||g.handler).apply(f.elem,i),void 0!==d&&(a.result=d)===!1&&(a.preventDefault(),a.stopPropagation()))}return k.postDispatch&&k.postDispatch.call(this,a),a.result}},handlers:function(a,b){var c,d,e,f,g=[],h=b.delegateCount,i=a.target;if(h&&i.nodeType&&("click"!==a.type||isNaN(a.button)||a.button<1))for(;i!==this;i=i.parentNode||this)if(1===i.nodeType&&(i.disabled!==!0||"click"!==a.type)){for(d=[],c=0;h>c;c++)f=b[c],e=f.selector+" ",void 0===d[e]&&(d[e]=f.needsContext?n(e,this).index(i)>-1:n.find(e,this,null,[i]).length),d[e]&&d.push(f);d.length&&g.push({elem:i,handlers:d})}return h<b.length&&g.push({elem:this,handlers:b.slice(h)}),g},props:"altKey bubbles cancelable ctrlKey currentTarget detail eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),fixHooks:{},keyHooks:{props:"char charCode key keyCode".split(" "),filter:function(a,b){return null==a.which&&(a.which=null!=b.charCode?b.charCode:b.keyCode),a}},mouseHooks:{props:"button buttons clientX clientY offsetX offsetY pageX pageY screenX screenY toElement".split(" "),filter:function(a,b){var c,e,f,g=b.button;return null==a.pageX&&null!=b.clientX&&(c=a.target.ownerDocument||d,e=c.documentElement,f=c.body,a.pageX=b.clientX+(e&&e.scrollLeft||f&&f.scrollLeft||0)-(e&&e.clientLeft||f&&f.clientLeft||0),a.pageY=b.clientY+(e&&e.scrollTop||f&&f.scrollTop||0)-(e&&e.clientTop||f&&f.clientTop||0)),a.which||void 0===g||(a.which=1&g?1:2&g?3:4&g?2:0),a}},fix:function(a){if(a[n.expando])return a;var b,c,e,f=a.type,g=a,h=this.fixHooks[f];h||(this.fixHooks[f]=h=ea.test(f)?this.mouseHooks:da.test(f)?this.keyHooks:{}),e=h.props?this.props.concat(h.props):this.props,a=new n.Event(g),b=e.length;while(b--)c=e[b],a[c]=g[c];return a.target||(a.target=d),3===a.target.nodeType&&(a.target=a.target.parentNode),h.filter?h.filter(a,g):a},special:{load:{noBubble:!0},focus:{trigger:function(){return this!==ia()&&this.focus?(this.focus(),!1):void 0},delegateType:"focusin"},blur:{trigger:function(){return this===ia()&&this.blur?(this.blur(),!1):void 0},delegateType:"focusout"},click:{trigger:function(){return"checkbox"===this.type&&this.click&&n.nodeName(this,"input")?(this.click(),!1):void 0},_default:function(a){return n.nodeName(a.target,"a")}},beforeunload:{postDispatch:function(a){void 0!==a.result&&a.originalEvent&&(a.originalEvent.returnValue=a.result)}}}},n.removeEvent=function(a,b,c){a.removeEventListener&&a.removeEventListener(b,c)},n.Event=function(a,b){return this instanceof n.Event?(a&&a.type?(this.originalEvent=a,this.type=a.type,this.isDefaultPrevented=a.defaultPrevented||void 0===a.defaultPrevented&&a.returnValue===!1?ga:ha):this.type=a,b&&n.extend(this,b),this.timeStamp=a&&a.timeStamp||n.now(),void(this[n.expando]=!0)):new n.Event(a,b)},n.Event.prototype={constructor:n.Event,isDefaultPrevented:ha,isPropagationStopped:ha,isImmediatePropagationStopped:ha,preventDefault:function(){var a=this.originalEvent;this.isDefaultPrevented=ga,a&&a.preventDefault()},stopPropagation:function(){var a=this.originalEvent;this.isPropagationStopped=ga,a&&a.stopPropagation()},stopImmediatePropagation:function(){var a=this.originalEvent;this.isImmediatePropagationStopped=ga,a&&a.stopImmediatePropagation(),this.stopPropagation()}},n.each({mouseenter:"mouseover",mouseleave:"mouseout",pointerenter:"pointerover",pointerleave:"pointerout"},function(a,b){n.event.special[a]={delegateType:b,bindType:b,handle:function(a){var c,d=this,e=a.relatedTarget,f=a.handleObj;return e&&(e===d||n.contains(d,e))||(a.type=f.origType,c=f.handler.apply(this,arguments),a.type=b),c}}}),n.fn.extend({on:function(a,b,c,d){return ja(this,a,b,c,d)},one:function(a,b,c,d){return ja(this,a,b,c,d,1)},off:function(a,b,c){var d,e;if(a&&a.preventDefault&&a.handleObj)return d=a.handleObj,n(a.delegateTarget).off(d.namespace?d.origType+"."+d.namespace:d.origType,d.selector,d.handler),this;if("object"==typeof a){for(e in a)this.off(e,b,a[e]);return this}return b!==!1&&"function"!=typeof b||(c=b,b=void 0),c===!1&&(c=ha),this.each(function(){n.event.remove(this,a,c,b)})}});var ka=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:-]+)[^>]*)\/>/gi,la=/<script|<style|<link/i,ma=/checked\s*(?:[^=]|=\s*.checked.)/i,na=/^true\/(.*)/,oa=/^\s*<!(?:\[CDATA\[|--)|(?:\]\]|--)>\s*$/g;function
(a,b){return n.nodeName(a,"table")&&n.nodeName(11!==b.nodeType?b:b.firstChild,"tr")?a.getElementsByTagName("tbody")[0]||a.appendChild(a.ownerDocument.createElement("tbody")):a}function qa(a){return a.type=(null!==a.getAttribute("type"))+"/"+a.type,a}function ra(a){var b=na.exec(a.type);return b?a.type=b[1]:a.removeAttribute("type"),a}function sa(a,b){var c,d,e,f,g,h,i,j;if(1===b.nodeType){if(N.hasData(a)&&(f=N.access(a),g=N.set(b,f),j=f.events)){delete g.handle,g.events={};for(e in j)for(c=0,d=j[e].length;d>c;c++)n.event.add(b,e,j[e][c])}O.hasData(a)&&(h=O.access(a),i=n.extend({},h),O.set(b,i))}}function ta(a,b){var c=b.nodeName.toLowerCase();"input"===c&&X.test(a.type)?b.checked=a.checked:"input"!==c&&"textarea"!==c||(b.defaultValue=a.defaultValue)}function ua(a,b,c,d){b=f.apply([],b);var e,g,h,i,j,k,m=0,o=a.length,p=o-1,q=b[0],r=n.isFunction(q);if(r||o>1&&"string"==typeof q&&!l.checkClone&&ma.test(q))return a.each(function(e){var f=a.eq(e);r&&(b[0]=q.call(this,e,f.html())),ua(f,b,c,d)});if(o&&(e=ca(b,a[0].ownerDocument,!1,a,d),g=e.firstChild,1===e.childNodes.length&&(e=g),g||d)){for(h=n.map(_(e,"script"),qa),i=h.length;o>m;m++)j=e,m!==p&&(j=n.clone(j,!0,!0),i&&n.merge(h,_(j,"script"))),c.call(a[m],j,m);if(i)for(k=h[h.length-1].ownerDocument,n.map(h,ra),m=0;i>m;m++)j=h[m],Z.test(j.type||"")&&!N.access(j,"globalEval")&&n.contains(k,j)&&(j.src?n._evalUrl&&n._evalUrl(j.src):n.globalEval(j.textContent.replace(oa,"")))}return a}function va(a,b,c){for(var d,e=b?n.filter(b,a):a,f=0;null!=(d=e[f]);f++)c||1!==d.nodeType||n.cleanData(_(d)),d.parentNode&&(c&&n.contains(d.ownerDocument,d)&&aa(_(d,"script")),d.parentNode.removeChild(d));return a}n.extend({htmlPrefilter:function(a){return a.replace(ka,"<$1></$2>")},clone:function(a,b,c){var d,e,f,g,h=a.cloneNode(!0),i=n.contains(a.ownerDocument,a);if(!(l.noCloneChecked||1!==a.nodeType&&11!==a.nodeType||n.isXMLDoc(a)))for(g=_(h),f=_(a),d=0,e=f.length;e>d;d++)ta(f[d],g[d]);if(b)if(c)for(f=f||_(a),g=g||_(h),d=0,e=f.length;e>d;d++)sa(f[d],g[d]);else sa(a,h);return g=_(h,"script"),g.length>0&&aa(g,!i&&_(a,"script")),h},cleanData:function(a){for(var b,c,d,e=n.event.special,f=0;void 0!==(c=a[f]);f++)if(L(c)){if(b=c[N.expando]){if(b.events)for(d in b.events)e[d]?n.event.remove(c,d):n.removeEvent(c,d,b.handle);c[N.expando]=void 0}c[O.expando]&&(c[O.expando]=void 0)}}}),n.fn.extend({domManip:ua,detach:function(a){return va(this,a,!0)},remove:function(a){return va(this,a)},text:function(a){return K(this,function(a){return void 0===a?n.text(this):this.empty().each(function(){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||(this.textContent=a)})},null,a,arguments.length)},append:function(){return ua(this,arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=pa(this,a);b.appendChild(a)}})},prepend:function(){return ua(this,arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=pa(this,a);b.insertBefore(a,b.firstChild)}})},before:function(){return ua(this,arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this)})},after:function(){return ua(this,arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this.nextSibling)})},empty:function(){for(var a,b=0;null!=(a=this[b]);b++)1===a.nodeType&&(n.cleanData(_(a,!1)),a.textContent="");return this},clone:function(a,b){return a=null==a?!1:a,b=null==b?a:b,this.map(function(){return n.clone(this,a,b)})},html:function(a){return K(this,function(a){var b=this[0]||{},c=0,d=this.length;if(void 0===a&&1===b.nodeType)return b.innerHTML;if("string"==typeof a&&!la.test(a)&&!$[(Y.exec(a)||["",""])[1].toLowerCase()]){a=n.htmlPrefilter(a);try{for(;d>c;c++)b=this[c]||{},1===b.nodeType&&(n.cleanData(_(b,!1)),b.innerHTML=a);b=0}catch(e){}}b&&this.empty().append(a)},null,a,arguments.length)},replaceWith:function(){var a=[];return ua(this,arguments,function(b){var c=this.parentNode;n.inArray(this,a)<0&&(n.cleanData(_(this)),c&&c.replaceChild(b,this))},a)}}),n.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){n.fn[a]=function(a){for(var c,d=[],e=n(a),f=e.length-1,h=0;f>=h;h++)c=h===f?this:this.clone(!0),n(e[h])[b](c),g.apply(d,c.get());return this.pushStack(d)}});var wa,xa={HTML:"block",BODY:"block"};function ya(a,b){var c=n(b.createElement(a)).appendTo(b.body),d=n.css(c[0],"display");return c.detach(),d}function za(a){var b=d,c=xa[a];return c||(c=ya(a,b),"none"!==c&&c||(wa=(wa||n("<iframe frameborder='0' width='0' height='0'/>")).appendTo(b.documentElement),b=wa[0].contentDocument,b.write(),b.close(),c=ya(a,b),wa.detach()),xa[a]=c),c}var Aa=/^margin/,Ba=new RegExp("^("+S+")(?!px)[a-z%]+$","i"),Ca=function(b){var c=b.ownerDocument.defaultView;return c&&c.opener||(c=a),c.getComputedStyle(b)},Da=function(a,b,c,d){var e,f,g={};for(f in b)g[f]=a.style[f],a.style[f]=b[f];e=c.apply(a,d||[]);for(f in b)a.style[f]=g[f];return e},Ea=d.documentElement;!function(){var b,c,e,f,g=d.createElement("div"),h=d.createElement("div");if(h.style){h.style.backgroundClip="content-box",h.cloneNode(!0).style.backgroundClip="",l.clearCloneStyle="content-box"===h.style.backgroundClip,g.style.cssText="border:0;width:8px;height:0;top:0;left:-9999px;padding:0;margin-top:1px;position:absolute",g.appendChild(h);function i(){h.style.cssText="-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;position:relative;display:block;margin:auto;border:1px;padding:1px;top:1%;width:50%",h.innerHTML="",Ea.appendChild(g);var d=a.getComputedStyle(h);b="1%"!==d.top,f="2px"===d.marginLeft,c="4px"===d.width,h.style.marginRight="50%",e="4px"===d.marginRight,Ea.removeChild(g)}n.extend(l,{pixelPosition:function(){return i(),b},boxSizingReliable:function(){return null==c&&i(),c},pixelMarginRight:function(){return null==c&&i(),e},reliableMarginLeft:function(){return null==c&&i(),f},reliableMarginRight:function(){var b,c=h.appendChild(d.createElement("div"));return c.style.cssText=h.style.cssText="-webkit-box-sizing:content-box;box-sizing:content-box;display:block;margin:0;border:0;padding:0",c.style.marginRight=c.style.width="0",h.style.width="1px",Ea.appendChild(g),b=!parseFloat(a.getComputedStyle(c).marginRight),Ea.removeChild(g),h.removeChild(c),b}})}}();function Fa(a,b,c){var d,e,f,g,h=a.style;return c=c||Ca(a),g=c?c.getPropertyValue(b)||c[b]:void 0,""!==g&&void 0!==g||n.contains(a.ownerDocument,a)||(g=n.style(a,b)),c&&!l.pixelMarginRight()&&Ba.test(g)&&Aa.test(b)&&(d=h.width,e=h.minWidth,f=h.maxWidth,h.minWidth=h.maxWidth=h.width=g,g=c.width,h.width=d,h.minWidth=e,h.maxWidth=f),void 0!==g?g+"":g}function Ga(a,b){return{get:function(){return a()?void delete this.get:(this.get=b).apply(this,arguments)}}}var Ha=/^(none|table(?!-c[ea]).+)/,Ia={position:"absolute",visibility:"hidden",display:"block"},Ja={letterSpacing:"0",fontWeight:"400"},Ka=["Webkit","O","Moz","ms"],La=d.createElement("div").style;function Ma(a){if(a in La)return a;var b=a[0].toUpperCase()+a.slice(1),c=Ka.length;while(c--)if(a=Ka[c]+b,a in La)return a}function Na(a,b,c){var d=T.exec(b);return d?Math.max(0,d[2]-(c||0))+(d[3]||"px"):b}function Oa(a,b,c,d,e){for(var f=c===(d?"border":"content")?4:"width"===b?1:0,g=0;4>f;f+=2)"margin"===c&&(g+=n.css(a,c+U[f],!0,e)),d?("content"===c&&(g-=n.css(a,"padding"+U[f],!0,e)),"margin"!==c&&(g-=n.css(a,"border"+U[f]+"Width",!0,e))):(g+=n.css(a,"padding"+U[f],!0,e),"padding"!==c&&(g+=n.css(a,"border"+U[f]+"Width",!0,e)));return g}function Pa(b,c,e){var f=!0,g="width"===c?b.offsetWidth:b.offsetHeight,h=Ca(b),i="border-box"===n.css(b,"boxSizing",!1,h);if(d.msFullscreenElement&&a.top!==a&&b.getClientRects().length&&(g=Math.round(100*b.getBoundingClientRect()[c])),0>=g||null==g){if(g=Fa(b,c,h),(0>g||null==g)&&(g=b.style[c]),Ba.test(g))return g;f=i&&(l.boxSizingReliable()||g===b.style[c]),g=parseFloat(g)||0}return g+Oa(b,c,e||(i?"border":"content"),f,h)+"px"}function Qa(a,b){for(var c,d,e,f=[],g=0,h=a.length;h>g;g++)d=a[g],d.style&&(f[g]=N.get(d,"olddisplay"),c=d.style.display,b?(f[g]||"none"!==c||(d.style.display=""),""===d.style.display&&V(d)&&(f[g]=N.access(d,"olddisplay",za(d.nodeName)))):(e=V(d),"none"===c&&e||N.set(d,"olddisplay",e?c:n.css(d,"display"))));for(g=0;h>g;g++)d=a[g],d.style&&(b&&"none"!==d.style.display&&""!==d.style.display||(d.style.display=b?f[g]||"":"none"));return a}n.extend({cssHooks:{opacity:{get:function(a,b){if(b){var c=Fa(a,"opacity");return""===c?"1":c}}}},cssNumber:{animationIterationCount:!0,columnCount:!0,fillOpacity:!0,flexGrow:!0,flexShrink:!0,fontWeight:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":"cssFloat"},style:function(a,b,c,d){if(a&&3!==a.nodeType&&8!==a.nodeType&&a.style){var e,f,g,h=n.camelCase(b),i=a.style;return b=n.cssProps[h]||(n.cssProps[h]=Ma(h)||h),g=n.cssHooks[b]||n.cssHooks[h],void 0===c?g&&"get"in g&&void 0!==(e=g.get(a,!1,d))?e:i[b]:(f=typeof c,"string"===f&&(e=T.exec(c))&&e[1]&&(c=W(a,b,e),f="number"),null!=c&&c===c&&("number"===f&&(c+=e&&e[3]||(n.cssNumber[h]?"":"px")),l.clearCloneStyle||""!==c||0!==b.indexOf("background")||(i[b]="inherit"),g&&"set"in g&&void 0===(c=g.set(a,c,d))||(i[b]=c)),void 0)}},css:function(a,b,c,d){var e,f,g,h=n.camelCase(b);return b=n.cssProps[h]||(n.cssProps[h]=Ma(h)||h),g=n.cssHooks[b]||n.cssHooks[h],g&&"get"in g&&(e=g.get(a,!0,c)),void 0===e&&(e=Fa(a,b,d)),"normal"===e&&b in Ja&&(e=Ja[b]),""===c||c?(f=parseFloat(e),c===!0||isFinite(f)?f||0:e):e}}),n.each(["height","width"],function(a,b){n.cssHooks[b]={get:function(a,c,d){return c?Ha.test(n.css(a,"display"))&&0===a.offsetWidth?Da(a,Ia,function(){return Pa(a,b,d)}):Pa(a,b,d):void 0},set:function(a,c,d){var e,f=d&&Ca(a),g=d&&Oa(a,b,d,"border-box"===n.css(a,"boxSizing",!1,f),f);return g&&(e=T.exec(c))&&"px"!==(e[3]||"px")&&(a.style[b]=c,c=n.css(a,b)),Na(a,c,g)}}}),n.cssHooks.marginLeft=Ga(l.reliableMarginLeft,function(a,b){return b?(parseFloat(Fa(a,"marginLeft"))||a.getBoundingClientRect().left-Da(a,{marginLeft:0},function(){return a.getBoundingClientRect().left}))+"px":void 0}),n.cssHooks.marginRight=Ga(l.reliableMarginRight,function(a,b){return b?Da(a,{display:"inline-block"},Fa,[a,"marginRight"]):void 0}),n.each({margin:"",padding:"",border:"Width"},function(a,b){n.cssHooks[a+b]={expand:function(c){for(var d=0,e={},f="string"==typeof c?c.split(" "):[c];4>d;d++)e[a+U[d]+b]=f[d]||f[d-2]||f[0];return e}},Aa.test(a)||(n.cssHooks[a+b].set=Na)}),n.fn.extend({css:function(a,b){return K(this,function(a,b,c){var d,e,f={},g=0;if(n.isArray(b)){for(d=Ca(a),e=b.length;e>g;g++)f[b[g]]=n.css(a,b[g],!1,d);return f}return void 0!==c?n.style(a,b,c):n.css(a,b)},a,b,arguments.length>1)},show:function(){return Qa(this,!0)},hide:function(){return Qa(this)},toggle:function(a){return"boolean"==typeof a?a?this.show():this.hide():this.each(function(){V(this)?n(this).show():n(this).hide()})}});function Ra(a,b,c,d,e){return new Ra.prototype.init(a,b,c,d,e)}n.Tween=Ra,Ra.prototype={constructor:Ra,init:function(a,b,c,d,e,f){this.elem=a,this.prop=c,this.easing=e||n.easing._default,this.options=b,this.start=this.now=this.cur(),this.end=d,this.unit=f||(n.cssNumber[c]?"":"px")},cur:function(){var a=Ra.propHooks[this.prop];return a&&a.get?a.get(this):Ra.propHooks._default.get(this)},run:function(a){var b,c=Ra.propHooks[this.prop];return this.options.duration?this.pos=b=n.easing[this.easing](a,this.options.duration*a,0,1,this.options.duration):this.pos=b=a,this.now=(this.end-this.start)*b+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),c&&c.set?c.set(this):Ra.propHooks._default.set(this),this}},Ra.prototype.init.prototype=Ra.prototype,Ra.propHooks={_default:{get:function(a){var b;return 1!==a.elem.nodeType||null!=a.elem[a.prop]&&null==a.elem.style[a.prop]?a.elem[a.prop]:(b=n.css(a.elem,a.prop,""),b&&"auto"!==b?b:0)},set:function(a){n.fx.step[a.prop]?n.fx.step[a.prop](a):1!==a.elem.nodeType||null==a.elem.style[n.cssProps[a.prop]]&&!n.cssHooks[a.prop]?a.elem[a.prop]=a.now:n.style(a.elem,a.prop,a.now+a.unit)}}},Ra.propHooks.scrollTop=Ra.propHooks.scrollLeft={set:function(a){a.elem.nodeType&&a.elem.parentNode&&(a.elem[a.prop]=a.now)}},n.easing={linear:function(a){return a},swing:function(a){return.5-Math.cos(a*Math.PI)/2},_default:"swing"},n.fx=Ra.prototype.init,n.fx.step={};var Sa,Ta,Ua=/^(?:toggle|show|hide)$/,Va=/queueHooks$/;function Wa(){return a.setTimeout(function(){Sa=void 0}),Sa=n.now()}function Xa(a,b){var c,d=0,e={height:a};for(b=b?1:0;4>d;d+=2-b)c=U[d],e["margin"+c]=e["padding"+c]=a;return b&&(e.opacity=e.width=a),e}function Ya(a,b,c){for(var d,e=(_a.tweeners[b]||[]).concat(_a.tweeners["*"]),f=0,g=e.length;g>f;f++)if(d=e[f].call(c,b,a))return d}function Za(a,b,c){var d,e,f,g,h,i,j,k,l=this,m={},o=a.style,p=a.nodeType&&V(a),q=N.get(a,"fxshow");c.queue||(h=n._queueHooks(a,"fx"),null==h.unqueued&&(h.unqueued=0,i=h.empty.fire,h.empty.fire=function(){h.unqueued||i()}),h.unqueued++,l.always(function(){l.always(function(){h.unqueued--,n.queue(a,"fx").length||h.empty.fire()})})),1===a.nodeType&&("height"in b||"width"in b)&&(c.overflow=[o.overflow,o.overflowX,o.overflowY],j=n.css(a,"display"),k="none"===j?N.get(a,"olddisplay")||za(a.nodeName):j,"inline"===k&&"none"===n.css(a,"float")&&(o.display="inline-block")),c.overflow&&(o.overflow="hidden",l.always(function(){o.overflow=c.overflow[0],o.overflowX=c.overflow[1],o.overflowY=c.overflow[2]}));for(d in b)if(e=b[d],Ua.exec(e)){if(delete b[d],f=f||"toggle"===e,e===(p?"hide":"show")){if("show"!==e||!q||void 0===q[d])continue;p=!0}m[d]=q&&q[d]||n.style(a,d)}else j=void 0;if(n.isEmptyObject(m))"inline"===("none"===j?za(a.nodeName):j)&&(o.display=j);else{q?"hidden"in q&&(p=q.hidden):q=N.access(a,"fxshow",{}),f&&(q.hidden=!p),p?n(a).show():l.done(function(){n(a).hide()}),l.done(function(){var b;N.remove(a,"fxshow");for(b in m)n.style(a,b,m[b])});for(d in m)g=Ya(p?q[d]:0,d,l),d in q||(q[d]=g.start,p&&(g.end=g.start,g.start="width"===d||"height"===d?1:0))}}function $a(a,b){var c,d,e,f,g;for(c in a)if(d=n.camelCase(c),e=b[d],f=a[c],n.isArray(f)&&(e=f[1],f=a[c]=f[0]),c!==d&&(a[d]=f,delete a[c]),g=n.cssHooks[d],g&&"expand"in g){f=g.expand(f),delete a[d];for(c in f)c in a||(a[c]=f[c],b[c]=e)}else b[d]=e}function _a(a,b,c){var d,e,f=0,g=_a.prefilters.length,h=n.Deferred().always(function(){delete i.elem}),i=function(){if(e)return!1;for(var b=Sa||Wa(),c=Math.max(0,j.startTime+j.duration-b),d=c/j.duration||0,f=1-d,g=0,i=j.tweens.length;i>g;g++)j.tweens[g].run(f);return h.notifyWith(a,[j,f,c]),1>f&&i?c:(h.resolveWith(a,[j]),!1)},j=h.promise({elem:a,props:n.extend({},b),opts:n.extend(!0,{specialEasing:{},easing:n.easing._default},c),originalProperties:b,originalOptions:c,startTime:Sa||Wa(),duration:c.duration,tweens:[],createTween:function(b,c){var d=n.Tween(a,j.opts,b,c,j.opts.specialEasing[b]||j.opts.easing);return j.tweens.push(d),d},stop:function(b){var c=0,d=b?j.tweens.length:0;if(e)return this;for(e=!0;d>c;c++)j.tweens[c].run(1);return b?(h.notifyWith(a,[j,1,0]),h.resolveWith(a,[j,b])):h.rejectWith(a,[j,b]),this}}),k=j.props;for($a(k,j.opts.specialEasing);g>f;f++)if(d=_a.prefilters[f].call(j,a,k,j.opts))return n.isFunction(d.stop)&&(n._queueHooks(j.elem,j.opts.queue).stop=n.proxy(d.stop,d)),d;return n.map(k,Ya,j),n.isFunction(j.opts.start)&&j.opts.start.call(a,j),n.fx.timer(n.extend(i,{elem:a,anim:j,queue:j.opts.queue})),j.progress(j.opts.progress).done(j.opts.done,j.opts.complete).fail(j.opts.fail).always(j.opts.always)}n.Animation=n.extend(_a,{tweeners:{"*":[function(a,b){var c=this.createTween(a,b);return W(c.elem,a,T.exec(b),c),c}]},tweener:function(a,b){n.isFunction(a)?(b=a,a=["*"]):a=a.match(G);for(var c,d=0,e=a.length;e>d;d++)c=a[d],_a.tweeners[c]=_a.tweeners[c]||[],_a.tweeners[c].unshift(b)},prefilters:[Za],prefilter:function(a,b){b?_a.prefilters.unshift(a):_a.prefilters.push(a)}}),n.speed=function(a,b,c){var d=a&&"object"==typeof a?n.extend({},a):{complete:c||!c&&b||n.isFunction(a)&&a,duration:a,easing:c&&b||b&&!n.isFunction(b)&&b};return d.duration=n.fx.off?0:"number"==typeof d.duration?d.duration:d.duration in n.fx.speeds?n.fx.speeds[d.duration]:n.fx.speeds._default,null!=d.queue&&d.queue!==!0||(d.queue="fx"),d.old=d.complete,d.complete=function(){n.isFunction(d.old)&&d.old.call(this),d.queue&&n.dequeue(this,d.queue)},d},n.fn.extend({fadeTo:function(a,b,c,d){return this.filter(V).css("opacity",0).show().end().animate({opacity:b},a,c,d)},animate:function(a,b,c,d){var e=n.isEmptyObject(a),f=n.speed(b,c,d),g=function(){var b=_a(this,n.extend({},a),f);(e||N.get(this,"finish"))&&b.stop(!0)};return g.finish=g,e||f.queue===!1?this.each(g):this.queue(f.queue,g)},stop:function(a,b,c){var d=function(a){var b=a.stop;delete a.stop,b(c)};return"string"!=typeof a&&(c=b,b=a,a=void 0),b&&a!==!1&&this.queue(a||"fx",[]),this.each(function(){var b=!0,e=null!=a&&a+"queueHooks",f=n.timers,g=N.get(this);if(e)g[e]&&g[e].stop&&d(g[e]);else for(e in g)g[e]&&g[e].stop&&Va.test(e)&&d(g[e]);for(e=f.length;e--;)f[e].elem!==this||null!=a&&f[e].queue!==a||(f[e].anim.stop(c),b=!1,f.splice(e,1));!b&&c||n.dequeue(this,a)})},finish:function(a){return a!==!1&&(a=a||"fx"),this.each(function(){var b,c=N.get(this),d=c[a+"queue"],e=c[a+"queueHooks"],f=n.timers,g=d?d.length:0;for(c.finish=!0,n.queue(this,a,[]),e&&e.stop&&e.stop.call(this,!0),b=f.length;b--;)f[b].elem===this&&f[b].queue===a&&(f[b].anim.stop(!0),f.splice(b,1));for(b=0;g>b;b++)d[b]&&d[b].finish&&d[b].finish.call(this);delete c.finish})}}),n.each(["toggle","show","hide"],function(a,b){var c=n.fn[b];n.fn[b]=function(a,d,e){return null==a||"boolean"==typeof a?c.apply(this,arguments):this.animate(Xa(b,!0),a,d,e)}}),n.each({slideDown:Xa("show"),slideUp:Xa("hide"),slideToggle:Xa("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(a,b){n.fn[a]=function(a,c,d){return this.animate(b,a,c,d)}}),n.timers=[],n.fx.tick=function(){var a,b=0,c=n.timers;for(Sa=n.now();b<c.length;b++)a=c[b],a()||c[b]!==a||c.splice(b--,1);c.length||n.fx.stop(),Sa=void 0},n.fx.timer=function(a){n.timers.push(a),a()?n.fx.start():n.timers.pop()},n.fx.interval=13,n.fx.start=function(){Ta||(Ta=a.setInterval(n.fx.tick,n.fx.interval))},n.fx.stop=function(){a.clearInterval(Ta),Ta=null},n.fx.speeds={slow:600,fast:200,_default:400},n.fn.delay=function(b,c){return b=n.fx?n.fx.speeds[b]||b:b,c=c||"fx",this.queue(c,function(c,d){var e=a.setTimeout(c,b);d.stop=function(){a.clearTimeout(e)}})},function(){var a=d.createElement("input"),b=d.createElement("select"),c=b.appendChild(d.createElement("option"));a.type="checkbox",l.checkOn=""!==a.value,l.optSelected=c.selected,b.disabled=!0,l.optDisabled=!c.disabled,a=d.createElement("input"),a.value="t",a.type="radio",l.radioValue="t"===a.value}();var ab,bb=n.expr.attrHandle;n.fn.extend({attr:function(a,b){return K(this,n.attr,a,b,arguments.length>1)},removeAttr:function(a){return this.each(function(){n.removeAttr(this,a)})}}),n.extend({attr:function(a,b,c){var d,e,f=a.nodeType;if(3!==f&&8!==f&&2!==f)return"undefined"==typeof a.getAttribute?n.prop(a,b,c):(1===f&&n.isXMLDoc(a)||(b=b.toLowerCase(),e=n.attrHooks[b]||(n.expr.match.bool.test(b)?ab:void 0)),void 0!==c?null===c?void n.removeAttr(a,b):e&&"set"in e&&void 0!==(d=e.set(a,c,b))?d:(a.setAttribute(b,c+""),c):e&&"get"in e&&null!==(d=e.get(a,b))?d:(d=n.find.attr(a,b),null==d?void 0:d))},attrHooks:{type:{set:function(a,b){if(!l.radioValue&&"radio"===b&&n.nodeName(a,"input")){var c=a.value;return a.setAttribute("type",b),c&&(a.value=c),b}}}},removeAttr:function(a,b){var c,d,e=0,f=b&&b.match(G);if(f&&1===a.nodeType)while(c=f[e++])d=n.propFix[c]||c,n.expr.match.bool.test(c)&&(a[d]=!1),a.removeAttribute(c)}}),ab={set:function(a,b,c){return b===!1?n.removeAttr(a,c):a.setAttribute(c,c),c}},n.each(n.expr.match.bool.source.match(/\w+/g),function(a,b){var c=bb[b]||n.find.attr;bb[b]=function(a,b,d){var e,f;return d||(f=bb[b],bb[b]=e,e=null!=c(a,b,d)?b.toLowerCase():null,bb[b]=f),e}});var cb=/^(?:input|select|textarea|button)$/i,db=/^(?:a|area)$/i;n.fn.extend({prop:function(a,b){return K(this,n.prop,a,b,arguments.length>1)},removeProp:function(a){return this.each(function(){delete this[n.propFix[a]||a]})}}),n.extend({prop:function(a,b,c){var d,e,f=a.nodeType;if(3!==f&&8!==f&&2!==f)return 1===f&&n.isXMLDoc(a)||(b=n.propFix[b]||b, e=n.propHooks[b]),void 0!==c?e&&"set"in e&&void 0!==(d=e.set(a,c,b))?d:a[b]=c:e&&"get"in e&&null!==(d=e.get(a,b))?d:a[b]},propHooks:{tabIndex:{get:function(a){var b=n.find.attr(a,"tabindex");return b?parseInt(b,10):cb.test(a.nodeName)||db.test(a.nodeName)&&a.href?0:-1}}},propFix:{"for":"htmlFor","class":"className"}}),l.optSelected||(n.propHooks.selected={get:function(a){var b=a.parentNode;return b&&b.parentNode&&b.parentNode.selectedIndex,null},set:function(a){var b=a.parentNode;b&&(b.selectedIndex,b.parentNode&&b.parentNode.selectedIndex)}}),n.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],function(){n.propFix[this.toLowerCase()]=this});var eb=/[\t\r\n\f]/g;function fb(a){return a.getAttribute&&a.getAttribute("class")||""}n.fn.extend({addClass:function(a){var b,c,d,e,f,g,h,i=0;if(n.isFunction(a))return this.each(function(b){n(this).addClass(a.call(this,b,fb(this)))});if("string"==typeof a&&a){b=a.match(G)||[];while(c=this[i++])if(e=fb(c),d=1===c.nodeType&&(" "+e+" ").replace(eb," ")){g=0;while(f=b[g++])d.indexOf(" "+f+" ")<0&&(d+=f+" ");h=n.trim(d),e!==h&&c.setAttribute("class",h)}}return this},removeClass:function(a){var b,c,d,e,f,g,h,i=0;if(n.isFunction(a))return this.each(function(b){n(this).removeClass(a.call(this,b,fb(this)))});if(!arguments.length)return this.attr("class","");if("string"==typeof a&&a){b=a.match(G)||[];while(c=this[i++])if(e=fb(c),d=1===c.nodeType&&(" "+e+" ").replace(eb," ")){g=0;while(f=b[g++])while(d.indexOf(" "+f+" ")>-1)d=d.replace(" "+f+" "," ");h=n.trim(d),e!==h&&c.setAttribute("class",h)}}return this},toggleClass:function(a,b){var c=typeof a;return"boolean"==typeof b&&"string"===c?b?this.addClass(a):this.removeClass(a):n.isFunction(a)?this.each(function(c){n(this).toggleClass(a.call(this,c,fb(this),b),b)}):this.each(function(){var b,d,e,f;if("string"===c){d=0,e=n(this),f=a.match(G)||[];while(b=f[d++])e.hasClass(b)?e.removeClass(b):e.addClass(b)}else void 0!==a&&"boolean"!==c||(b=fb(this),b&&N.set(this,"__className__",b),this.setAttribute&&this.setAttribute("class",b||a===!1?"":N.get(this,"__className__")||""))})},hasClass:function(a){var b,c,d=0;b=" "+a+" ";while(c=this[d++])if(1===c.nodeType&&(" "+fb(c)+" ").replace(eb," ").indexOf(b)>-1)return!0;return!1}});var gb=/\r/g,hb=/[\x20\t\r\n\f]+/g;n.fn.extend({val:function(a){var b,c,d,e=this[0];{if(arguments.length)return d=n.isFunction(a),this.each(function(c){var e;1===this.nodeType&&(e=d?a.call(this,c,n(this).val()):a,null==e?e="":"number"==typeof e?e+="":n.isArray(e)&&(e=n.map(e,function(a){return null==a?"":a+""})),b=n.valHooks[this.type]||n.valHooks[this.nodeName.toLowerCase()],b&&"set"in b&&void 0!==b.set(this,e,"value")||(this.value=e))});if(e)return b=n.valHooks[e.type]||n.valHooks[e.nodeName.toLowerCase()],b&&"get"in b&&void 0!==(c=b.get(e,"value"))?c:(c=e.value,"string"==typeof c?c.replace(gb,""):null==c?"":c)}}}),n.extend({valHooks:{option:{get:function(a){var b=n.find.attr(a,"value");return null!=b?b:n.trim(n.text(a)).replace(hb," ")}},select:{get:function(a){for(var b,c,d=a.options,e=a.selectedIndex,f="select-one"===a.type||0>e,g=f?null:[],h=f?e+1:d.length,i=0>e?h:f?e:0;h>i;i++)if(c=d[i],(c.selected||i===e)&&(l.optDisabled?!c.disabled:null===c.getAttribute("disabled"))&&(!c.parentNode.disabled||!n.nodeName(c.parentNode,"optgroup"))){if(b=n(c).val(),f)return b;g.push(b)}return g},set:function(a,b){var c,d,e=a.options,f=n.makeArray(b),g=e.length;while(g--)d=e[g],(d.selected=n.inArray(n.valHooks.option.get(d),f)>-1)&&(c=!0);return c||(a.selectedIndex=-1),f}}}}),n.each(["radio","checkbox"],function(){n.valHooks[this]={set:function(a,b){return n.isArray(b)?a.checked=n.inArray(n(a).val(),b)>-1:void 0}},l.checkOn||(n.valHooks[this].get=function(a){return null===a.getAttribute("value")?"on":a.value})});var ib=/^(?:focusinfocus|focusoutblur)$/;n.extend(n.event,{trigger:function(b,c,e,f){var g,h,i,j,l,m,o,p=[e||d],q=k.call(b,"type")?b.type:b,r=k.call(b,"namespace")?b.namespace.split("."):[];if(h=i=e=e||d,3!==e.nodeType&&8!==e.nodeType&&!ib.test(q+n.event.triggered)&&(q.indexOf(".")>-1&&(r=q.split("."),q=r.shift(),r.sort()),l=q.indexOf(":")<0&&"on"+q,b=b[n.expando]?b:new n.Event(q,"object"==typeof b&&b),b.isTrigger=f?2:3,b.namespace=r.join("."),b.rnamespace=b.namespace?new RegExp("(^|\\.)"+r.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,b.result=void 0,b.target||(b.target=e),c=null==c?[b]:n.makeArray(c,[b]),o=n.event.special[q]||{},f||!o.trigger||o.trigger.apply(e,c)!==!1)){if(!f&&!o.noBubble&&!n.isWindow(e)){for(j=o.delegateType||q,ib.test(j+q)||(h=h.parentNode);h;h=h.parentNode)p.push(h),i=h;i===(e.ownerDocument||d)&&p.push(i.defaultView||i.parentWindow||a)}g=0;while((h=p[g++])&&!b.isPropagationStopped())b.type=g>1?j:o.bindType||q,m=(N.get(h,"events")||{})[b.type]&&N.get(h,"handle"),m&&m.apply(h,c),m=l&&h[l],m&&m.apply&&L(h)&&(b.result=m.apply(h,c),b.result===!1&&b.preventDefault());return b.type=q,f||b.isDefaultPrevented()||o._default&&o._default.apply(p.pop(),c)!==!1||!L(e)||l&&n.isFunction(e[q])&&!n.isWindow(e)&&(i=e[l],i&&(e[l]=null),n.event.triggered=q,e[q](),n.event.triggered=void 0,i&&(e[l]=i)),b.result}},simulate:function(a,b,c){var d=n.extend(new n.Event,c,{type:a,isSimulated:!0});n.event.trigger(d,null,b),d.isDefaultPrevented()&&c.preventDefault()}}),n.fn.extend({trigger:function(a,b){return this.each(function(){n.event.trigger(a,b,this)})},triggerHandler:function(a,b){var c=this[0];return c?n.event.trigger(a,b,c,!0):void 0}}),n.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "),function(a,b){n.fn[b]=function(a,c){return arguments.length>0?this.on(b,null,a,c):this.trigger(b)}}),n.fn.extend({hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)}}),l.focusin="onfocusin"in a,l.focusin||n.each({focus:"focusin",blur:"focusout"},function(a,b){var c=function(a){n.event.simulate(b,a.target,n.event.fix(a))};n.event.special[b]={setup:function(){var d=this.ownerDocument||this,e=N.access(d,b);e||d.addEventListener(a,c,!0),N.access(d,b,(e||0)+1)},teardown:function(){var d=this.ownerDocument||this,e=N.access(d,b)-1;e?N.access(d,b,e):(d.removeEventListener(a,c,!0),N.remove(d,b))}}});var jb=a.location,kb=n.now(),lb=/\?/;n.parseJSON=function(a){return JSON.parse(a+"")},n.parseXML=function(b){var c;if(!b||"string"!=typeof b)return null;try{c=(new a.DOMParser).parseFromString(b,"text/xml")}catch(d){c=void 0}return c&&!c.getElementsByTagName("parsererror").length||n.error("Invalid XML: "+b),c};var mb=/#.*$/,nb=/([?&])_=[^&]*/,ob=/^(.*?):[ \t]*([^\r\n]*)$/gm,pb=/^(?:about|app|app-storage|.+-extension|file|res|widget):$/,qb=/^(?:GET|HEAD)$/,rb=/^\/\//,sb={},tb={},ub="*/".concat("*"),vb=d.createElement("a");vb.href=jb.href;function wb(a){return function(b,c){"string"!=typeof b&&(c=b,b="*");var d,e=0,f=b.toLowerCase().match(G)||[];if(n.isFunction(c))while(d=f[e++])"+"===d[0]?(d=d.slice(1)||"*",(a[d]=a[d]||[]).unshift(c)):(a[d]=a[d]||[]).push(c)}}function xb(a,b,c,d){var e={},f=a===tb;function g(h){var i;return e[h]=!0,n.each(a[h]||[],function(a,h){var j=h(b,c,d);return"string"!=typeof j||f||e[j]?f?!(i=j):void 0:(b.dataTypes.unshift(j),g(j),!1)}),i}return g(b.dataTypes[0])||!e["*"]&&g("*")}function yb(a,b){var c,d,e=n.ajaxSettings.flatOptions||{};for(c in b)void 0!==b[c]&&((e[c]?a:d||(d={}))[c]=b[c]);return d&&n.extend(!0,a,d),a}function zb(a,b,c){var d,e,f,g,h=a.contents,i=a.dataTypes;while("*"===i[0])i.shift(),void 0===d&&(d=a.mimeType||b.getResponseHeader("Content-Type"));if(d)for(e in h)if(h[e]&&h[e].test(d)){i.unshift(e);break}if(i[0]in c)f=i[0];else{for(e in c){if(!i[0]||a.converters[e+" "+i[0]]){f=e;break}g||(g=e)}f=f||g}return f?(f!==i[0]&&i.unshift(f),c[f]):void 0}function Ab(a,b,c,d){var e,f,g,h,i,j={},k=a.dataTypes.slice();if(k[1])for(g in a.converters)j[g.toLowerCase()]=a.converters[g];f=k.shift();while(f)if(a.responseFields[f]&&(c[a.responseFields[f]]=b),!i&&d&&a.dataFilter&&(b=a.dataFilter(b,a.dataType)),i=f,f=k.shift())if("*"===f)f=i;else if("*"!==i&&i!==f){if(g=j[i+" "+f]||j["* "+f],!g)for(e in j)if(h=e.split(" "),h[1]===f&&(g=j[i+" "+h[0]]||j["* "+h[0]])){g===!0?g=j[e]:j[e]!==!0&&(f=h[0],k.unshift(h[1]));break}if(g!==!0)if(g&&a["throws"])b=g(b);else try{b=g(b)}catch(l){return{state:"parsererror",error:g?l:"No conversion from "+i+" to "+f}}}return{state:"success",data:b}}n.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:jb.href,type:"GET",isLocal:pb.test(jb.protocol),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":ub,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/\bxml\b/,html:/\bhtml/,json:/\bjson\b/},responseFields:{xml:"responseXML",text:"responseText",json:"responseJSON"},converters:{"* text":String,"text html":!0,"text json":n.parseJSON,"text xml":n.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(a,b){return b?yb(yb(a,n.ajaxSettings),b):yb(n.ajaxSettings,a)},ajaxPrefilter:wb(sb),ajaxTransport:wb(tb),ajax:function(b,c){"object"==typeof b&&(c=b,b=void 0),c=c||{};var e,f,g,h,i,j,k,l,m=n.ajaxSetup({},c),o=m.context||m,p=m.context&&(o.nodeType||o.jquery)?n(o):n.event,q=n.Deferred(),r=n.Callbacks("once memory"),s=m.statusCode||{},t={},u={},v=0,w="canceled",x={readyState:0,getResponseHeader:function(a){var b;if(2===v){if(!h){h={};while(b=ob.exec(g))h[b[1].toLowerCase()]=b[2]}b=h[a.toLowerCase()]}return null==b?null:b},getAllResponseHeaders:function(){return 2===v?g:null},setRequestHeader:function(a,b){var c=a.toLowerCase();return v||(a=u[c]=u[c]||a,t[a]=b),this},overrideMimeType:function(a){return v||(m.mimeType=a),this},statusCode:function(a){var b;if(a)if(2>v)for(b in a)s[b]=[s[b],a[b]];else x.always(a[x.status]);return this},abort:function(a){var b=a||w;return e&&e.abort(b),z(0,b),this}};if(q.promise(x).complete=r.add,x.success=x.done,x.error=x.fail,m.url=((b||m.url||jb.href)+"").replace(mb,"").replace(rb,jb.protocol+"//"),m.type=c.method||c.type||m.method||m.type,m.dataTypes=n.trim(m.dataType||"*").toLowerCase().match(G)||[""],null==m.crossDomain){j=d.createElement("a");try{j.href=m.url,j.href=j.href,m.crossDomain=vb.protocol+"//"+vb.host!=j.protocol+"//"+j.host}catch(y){m.crossDomain=!0}}if(m.data&&m.processData&&"string"!=typeof m.data&&(m.data=n.param(m.data,m.traditional)),xb(sb,m,c,x),2===v)return x;k=n.event&&m.global,k&&0===n.active++&&n.event.trigger("ajaxStart"),m.type=m.type.toUpperCase(),m.hasContent=!qb.test(m.type),f=m.url,m.hasContent||(m.data&&(f=m.url+=(lb.test(f)?"&":"?")+m.data,delete m.data),m.cache===!1&&(m.url=nb.test(f)?f.replace(nb,"$1_="+kb++):f+(lb.test(f)?"&":"?")+"_="+kb++)),m.ifModified&&(n.lastModified[f]&&x.setRequestHeader("If-Modified-Since",n.lastModified[f]),n.etag[f]&&x.setRequestHeader("If-None-Match",n.etag[f])),(m.data&&m.hasContent&&m.contentType!==!1||c.contentType)&&x.setRequestHeader("Content-Type",m.contentType),x.setRequestHeader("Accept",m.dataTypes[0]&&m.accepts[m.dataTypes[0]]?m.accepts[m.dataTypes[0]]+("*"!==m.dataTypes[0]?", "+ub+"; q=0.01":""):m.accepts["*"]);for(l in m.headers)x.setRequestHeader(l,m.headers[l]);if(m.beforeSend&&(m.beforeSend.call(o,x,m)===!1||2===v))return x.abort();w="abort";for(l in{success:1,error:1,complete:1})x[l](m[l]);if(e=xb(tb,m,c,x)){if(x.readyState=1,k&&p.trigger("ajaxSend",[x,m]),2===v)return x;m.async&&m.timeout>0&&(i=a.setTimeout(function(){x.abort("timeout")},m.timeout));try{v=1,e.send(t,z)}catch(y){if(!(2>v))throw y;z(-1,y)}}else z(-1,"No Transport");function z(b,c,d,h){var j,l,t,u,w,y=c;2!==v&&(v=2,i&&a.clearTimeout(i),e=void 0,g=h||"",x.readyState=b>0?4:0,j=b>=200&&300>b||304===b,d&&(u=zb(m,x,d)),u=Ab(m,u,x,j),j?(m.ifModified&&(w=x.getResponseHeader("Last-Modified"),w&&(n.lastModified[f]=w),w=x.getResponseHeader("etag"),w&&(n.etag[f]=w)),204===b||"HEAD"===m.type?y="nocontent":304===b?y="notmodified":(y=u.state,l=u.data,t=u.error,j=!t)):(t=y,!b&&y||(y="error",0>b&&(b=0))),x.status=b,x.statusText=(c||y)+"",j?q.resolveWith(o,[l,y,x]):q.rejectWith(o,[x,y,t]),x.statusCode(s),s=void 0,k&&p.trigger(j?"ajaxSuccess":"ajaxError",[x,m,j?l:t]),r.fireWith(o,[x,y]),k&&(p.trigger("ajaxComplete",[x,m]),--n.active||n.event.trigger("ajaxStop")))}return x},getJSON:function(a,b,c){return n.get(a,b,c,"json")},getScript:function(a,b){return n.get(a,void 0,b,"script")}}),n.each(["get","post"],function(a,b){n[b]=function(a,c,d,e){return n.isFunction(c)&&(e=e||d,d=c,c=void 0),n.ajax(n.extend({url:a,type:b,dataType:e,data:c,success:d},n.isPlainObject(a)&&a))}}),n._evalUrl=function(a){return n.ajax({url:a,type:"GET",dataType:"script",async:!1,global:!1,"throws":!0})},n.fn.extend({wrapAll:function(a){var b;return n.isFunction(a)?this.each(function(b){n(this).wrapAll(a.call(this,b))}):(this[0]&&(b=n(a,this[0].ownerDocument).eq(0).clone(!0),this[0].parentNode&&b.insertBefore(this[0]),b.map(function(){var a=this;while(a.firstElementChild)a=a.firstElementChild;return a}).append(this)),this)},wrapInner:function(a){return n.isFunction(a)?this.each(function(b){n(this).wrapInner(a.call(this,b))}):this.each(function(){var b=n(this),c=b.contents();c.length?c.wrapAll(a):b.append(a)})},wrap:function(a){var b=n.isFunction(a);return this.each(function(c){n(this).wrapAll(b?a.call(this,c):a)})},unwrap:function(){return this.parent().each(function(){n.nodeName(this,"body")||n(this).replaceWith(this.childNodes)}).end()}}),n.expr.filters.hidden=function(a){return!n.expr.filters.visible(a)},n.expr.filters.visible=function(a){return a.offsetWidth>0||a.offsetHeight>0||a.getClientRects().length>0};var Bb=/%20/g,Cb=/\[\]$/,Db=/\r?\n/g,Eb=/^(?:submit|button|image|reset|file)$/i,Fb=/^(?:input|select|textarea|keygen)/i;function Gb(a,b,c,d){var e;if(n.isArray(b))n.each(b,function(b,e){c||Cb.test(a)?d(a,e):Gb(a+"["+("object"==typeof e&&null!=e?b:"")+"]",e,c,d)});else if(c||"object"!==n.type(b))d(a,b);else for(e in b)Gb(a+"["+e+"]",b[e],c,d)}n.param=function(a,b){var c,d=[],e=function(a,b){b=n.isFunction(b)?b():null==b?"":b,d[d.length]=encodeURIComponent(a)+"="+encodeURIComponent(b)};if(void 0===b&&(b=n.ajaxSettings&&n.ajaxSettings.traditional),n.isArray(a)||a.jquery&&!n.isPlainObject(a))n.each(a,function(){e(this.name,this.value)});else for(c in a)Gb(c,a[c],b,e);return d.join("&").replace(Bb,"+")},n.fn.extend({serialize:function(){return n.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var a=n.prop(this,"elements");return a?n.makeArray(a):this}).filter(function(){var a=this.type;return this.name&&!n(this).is(":disabled")&&Fb.test(this.nodeName)&&!Eb.test(a)&&(this.checked||!X.test(a))}).map(function(a,b){var c=n(this).val();return null==c?null:n.isArray(c)?n.map(c,function(a){return{name:b.name,value:a.replace(Db,"\r\n")}}):{name:b.name,value:c.replace(Db,"\r\n")}}).get()}}),n.ajaxSettings.xhr=function(){try{return new a.XMLHttpRequest}catch(b){}};var Hb={0:200,1223:204},Ib=n.ajaxSettings.xhr();l.cors=!!Ib&&"withCredentials"in Ib,l.ajax=Ib=!!Ib,n.ajaxTransport(function(b){var c,d;return l.cors||Ib&&!b.crossDomain?{send:function(e,f){var g,h=b.xhr();if(h.open(b.type,b.url,b.async,b.username,b.password),b.xhrFields)for(g in b.xhrFields)h[g]=b.xhrFields[g];b.mimeType&&h.overrideMimeType&&h.overrideMimeType(b.mimeType),b.crossDomain||e["X-Requested-With"]||(e["X-Requested-With"]="XMLHttpRequest");for(g in e)h.setRequestHeader(g,e[g]);c=function(a){return function(){c&&(c=d=h.onload=h.onerror=h.onabort=h.onreadystatechange=null,"abort"===a?h.abort():"error"===a?"number"!=typeof h.status?f(0,"error"):f(h.status,h.statusText):f(Hb[h.status]||h.status,h.statusText,"text"!==(h.responseType||"text")||"string"!=typeof h.responseText?{binary:h.response}:{text:h.responseText},h.getAllResponseHeaders()))}},h.onload=c(),d=h.onerror=c("error"),void 0!==h.onabort?h.onabort=d:h.onreadystatechange=function(){4===h.readyState&&a.setTimeout(function(){c&&d()})},c=c("abort");try{h.send(b.hasContent&&b.data||null)}catch(i){if(c)throw i}},abort:function(){c&&c()}}:void 0}),n.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/\b(?:java|ecma)script\b/},converters:{"text script":function(a){return n.globalEval(a),a}}}),n.ajaxPrefilter("script",function(a){void 0===a.cache&&(a.cache=!1),a.crossDomain&&(a.type="GET")}),n.ajaxTransport("script",function(a){if(a.crossDomain){var b,c;return{send:function(e,f){b=n("<script>").prop({charset:a.scriptCharset,src:a.url}).on("load error",c=function(a){b.remove(),c=null,a&&f("error"===a.type?404:200,a.type)}),d.head.appendChild(b[0])},abort:function(){c&&c()}}}});var Jb=[],Kb=/(=)\?(?=&|$)|\?\?/;n.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var a=Jb.pop()||n.expando+"_"+kb++;return this[a]=!0,a}}),n.ajaxPrefilter("json jsonp",function(b,c,d){var e,f,g,h=b.jsonp!==!1&&(Kb.test(b.url)?"url":"string"==typeof b.data&&0===(b.contentType||"").indexOf("application/x-www-form-urlencoded")&&Kb.test(b.data)&&"data");return h||"jsonp"===b.dataTypes[0]?(e=b.jsonpCallback=n.isFunction(b.jsonpCallback)?b.jsonpCallback():b.jsonpCallback,h?b[h]=b[h].replace(Kb,"$1"+e):b.jsonp!==!1&&(b.url+=(lb.test(b.url)?"&":"?")+b.jsonp+"="+e),b.converters["script json"]=function(){return g||n.error(e+" was not called"),g[0]},b.dataTypes[0]="json",f=a[e],a[e]=function(){g=arguments},d.always(function(){void 0===f?n(a).removeProp(e):a[e]=f,b[e]&&(b.jsonpCallback=c.jsonpCallback,Jb.push(e)),g&&n.isFunction(f)&&f(g[0]),g=f=void 0}),"script"):void 0}),n.parseHTML=function(a,b,c){if(!a||"string"!=typeof a)return null;"boolean"==typeof b&&(c=b,b=!1),b=b||d;var e=x.exec(a),f=!c&&[];return e?[b.createElement(e[1])]:(e=ca([a],b,f),f&&f.length&&n(f).remove(),n.merge([],e.childNodes))};var Lb=n.fn.load;n.fn.load=function(a,b,c){if("string"!=typeof a&&Lb)return Lb.apply(this,arguments);var d,e,f,g=this,h=a.indexOf(" ");return h>-1&&(d=n.trim(a.slice(h)),a=a.slice(0,h)),n.isFunction(b)?(c=b,b=void 0):b&&"object"==typeof b&&(e="POST"),g.length>0&&n.ajax({url:a,type:e||"GET",dataType:"html",data:b}).done(function(a){f=arguments,g.html(d?n("<div>").append(n.parseHTML(a)).find(d):a)}).always(c&&function(a,b){g.each(function(){c.apply(this,f||[a.responseText,b,a])})}),this},n.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(a,b){n.fn[b]=function(a){return this.on(b,a)}}),n.expr.filters.animated=function(a){return n.grep(n.timers,function(b){return a===b.elem}).length};function Mb(a){return n.isWindow(a)?a:9===a.nodeType&&a.defaultView}n.offset={setOffset:function(a,b,c){var d,e,f,g,h,i,j,k=n.css(a,"position"),l=n(a),m={};"static"===k&&(a.style.position="relative"),h=l.offset(),f=n.css(a,"top"),i=n.css(a,"left"),j=("absolute"===k||"fixed"===k)&&(f+i).indexOf("auto")>-1,j?(d=l.position(),g=d.top,e=d.left):(g=parseFloat(f)||0,e=parseFloat(i)||0),n.isFunction(b)&&(b=b.call(a,c,n.extend({},h))),null!=b.top&&(m.top=b.top-h.top+g),null!=b.left&&(m.left=b.left-h.left+e),"using"in b?b.using.call(a,m):l.css(m)}},n.fn.extend({offset:function(a){if(arguments.length)return void 0===a?this:this.each(function(b){n.offset.setOffset(this,a,b)});var b,c,d=this[0],e={top:0,left:0},f=d&&d.ownerDocument;if(f)return b=f.documentElement,n.contains(b,d)?(e=d.getBoundingClientRect(),c=Mb(f),{top:e.top+c.pageYOffset-b.clientTop,left:e.left+c.pageXOffset-b.clientLeft}):e},position:function(){if(this[0]){var a,b,c=this[0],d={top:0,left:0};return"fixed"===n.css(c,"position")?b=c.getBoundingClientRect():(a=this.offsetParent(),b=this.offset(),n.nodeName(a[0],"html")||(d=a.offset()),d.top+=n.css(a[0],"borderTopWidth",!0),d.left+=n.css(a[0],"borderLeftWidth",!0)),{top:b.top-d.top-n.css(c,"marginTop",!0),left:b.left-d.left-n.css(c,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var a=this.offsetParent;while(a&&"static"===n.css(a,"position"))a=a.offsetParent;return a||Ea})}}),n.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(a,b){var c="pageYOffset"===b;n.fn[a]=function(d){return K(this,function(a,d,e){var f=Mb(a);return void 0===e?f?f[b]:a[d]:void(f?f.scrollTo(c?f.pageXOffset:e,c?e:f.pageYOffset):a[d]=e)},a,d,arguments.length)}}),n.each(["top","left"],function(a,b){n.cssHooks[b]=Ga(l.pixelPosition,function(a,c){return c?(c=Fa(a,b),Ba.test(c)?n(a).position()[b]+"px":c):void 0})}),n.each({Height:"height",Width:"width"},function(a,b){n.each({padding:"inner"+a,content:b,"":"outer"+a},function(c,d){n.fn[d]=function(d,e){var f=arguments.length&&(c||"boolean"!=typeof d),g=c||(d===!0||e===!0?"margin":"border");return K(this,function(b,c,d){var e;return n.isWindow(b)?b.document.documentElement["client"+a]:9===b.nodeType?(e=b.documentElement,Math.max(b.body["scroll"+a],e["scroll"+a],b.body["offset"+a],e["offset"+a],e["client"+a])):void 0===d?n.css(b,c,g):n.style(b,c,d,g)},b,f?d:void 0,f,null)}})}),n.fn.extend({bind:function(a,b,c){return this.on(a,null,b,c)},unbind:function(a,b){return this.off(a,null,b)},delegate:function(a,b,c,d){return this.on(b,a,c,d)},undelegate:function(a,b,c){return 1===arguments.length?this.off(a,"**"):this.off(b,a||"**",c)},size:function(){return this.length}}),n.fn.andSelf=n.fn.addBack,"function"==typeof define&&define.amd&&define("jquery",[],function(){return n});var Nb=a.jQuery,Ob=a.$;return n.noConflict=function(b){return a.$===n&&(a.$=Ob),b&&a.jQuery===n&&(a.jQuery=Nb),n},b||(a.jQuery=a.$=n),n});
pa
defaults.go
/* * Copyright 2020 The Magma Authors. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package test_utils import "magma/feg/cloud/go/services/feg/obsidian/models" func NewDefaultNetworkConfig() *models.NetworkFederationConfigs { // GyInitMethod_PER_SESSION gyInitMethodPerSession := uint32(1) return &models.NetworkFederationConfigs{ S6a: &models.S6a{ Server: &models.DiameterClientConfigs{ Protocol: "sctp", Retransmits: 3, WatchdogInterval: 1, RetryCount: 5, ProductName: "magma", Host: "magma-fedgw.magma.com", Realm: "magma.com", }, }, Gx: &models.Gx{ Servers: []*models.DiameterClientConfigs{ { Protocol: "tcp", Retransmits: 3, WatchdogInterval: 1, RetryCount: 5, ProductName: "magma", Host: "magma-fedgw.magma.com", Realm: "magma.com", }, }, }, Gy: &models.Gy{ Servers: []*models.DiameterClientConfigs{ { Protocol: "tcp", Retransmits: 3, WatchdogInterval: 1, RetryCount: 5, ProductName: "magma", Host: "magma-fedgw.magma.com", Realm: "magma.com", }, }, InitMethod: &gyInitMethodPerSession, }, Hss: &models.Hss{ Server: &models.DiameterServerConfigs{ Protocol: "tcp", DestHost: "magma.com", DestRealm: "magma.com", }, LteAuthOp: []byte("EREREREREREREREREREREQ=="), LteAuthAmf: []byte("gA"), DefaultSubProfile: &models.SubscriptionProfile{ MaxUlBitRate: 100000000, // 100 Mbps MaxDlBitRate: 200000000, // 200 Mbps }, SubProfiles: make(map[string]models.SubscriptionProfile), StreamSubscribers: false, }, Swx: &models.Swx{ Servers: []*models.DiameterClientConfigs{ &models.DiameterClientConfigs{ Protocol: "sctp", Retransmits: 3, WatchdogInterval: 1, RetryCount: 5, ProductName: "magma", Host: "magma-fedgw.magma.com", Realm: "magma.com", }, }, VerifyAuthorization: false, CacheTTLSeconds: 10800, }, EapAka: &models.EapAka{ Timeout: &models.EapAkaTimeouts{ ChallengeMs: 20000, ErrorNotificationMs: 10000, SessionMs: 43200000, SessionAuthenticatedMs: 5000, }, PlmnIds: []string{}, }, AaaServer: &models.AaaServer{ IDLESessionTimeoutMs: 21600000, AccountingEnabled: false, CreateSessionOnAuth: false, }, ServedNetworkIds: []string{}, Health: &models.Health{ HealthServices: []string{"S6A_PROXY", "SESSION_PROXY"}, UpdateIntervalSecs: 10, CloudDisablePeriodSecs: 10, LocalDisablePeriodSecs: 1, UpdateFailureThreshold: 3, RequestFailureThreshold: 0.50, MinimumRequestThreshold: 1, CPUUtilizationThreshold: 0.90, MemoryAvailableThreshold: 0.90, }, Csfb: &models.Csfb{ Client: &models.SctpClientConfigs{ ServerAddress: "", LocalAddress: "", }, }, } } func NewDefaultGatewayConfig() *models.GatewayFederationConfigs
{ return (*models.GatewayFederationConfigs)(NewDefaultNetworkConfig()) }
task_test.go
/* * Copyright 2014 VMware, Inc. All rights reserved. Licensed under the Apache v2 License.
import ( . "gopkg.in/check.v1" ) func (s *S) Test_WaitTaskCompletion(c *C) { testServer.Response(200, nil, taskExample) task, err := s.vapp.Deploy() _ = testServer.WaitRequest() testServer.Flush() c.Assert(err, IsNil) testServer.Response(200, nil, taskExample) err = task.WaitTaskCompletion() _ = testServer.WaitRequest() testServer.Flush() c.Assert(err, IsNil) } var taskExample = ` <Task cancelRequested="false" endTime="2014-11-10T09:09:31.483Z" expiryTime="2015-02-08T09:09:16.627Z" href="http://localhost:4444/api/task/1b8f926c-eff5-4bea-9b13-4e49bdd50c05" id="urn:vcloud:task:1b8f926c-eff5-4bea-9b13-4e49bdd50c05" name="task" operation="Composed Virtual Application Test API GO4(fdb86157-2e1f-4889-9942-0463836d10e1)" operationName="vdcComposeVapp" serviceNamespace="com.vmware.vcloud" startTime="2014-11-10T09:09:16.627Z" status="success" type="application/vnd.vmware.vcloud.task+xml" xmlns="http://www.vmware.com/vcloud/v1.5" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.vmware.com/vcloud/v1.5 http://10.6.32.3/api/v1.5/schema/master.xsd"> <Owner href="http://localhost:4444/api/vApp/vapp-fdb86157-2e1f-4889-9942-0463836d10e1" name="Test API GO4" type="application/vnd.vmware.vcloud.vApp+xml"/> <User href="http://localhost:4444/api/admin/user/d8ac278a-5b49-4c85-9a81-468838e89eb9" name="[email protected]" type="application/vnd.vmware.admin.user+xml"/> <Organization href="http://localhost:4444/api/org/23bd2339-c55f-403c-baf3-13109e8c8d57" name="M916272752-5793" type="application/vnd.vmware.vcloud.org+xml"/> <Progress>100</Progress> <Details/> </Task> `
*/ package govcloudair
0049_auto_20160804_0509.py
# -*- coding: utf-8 -*- # Generated by Django 1.9.7 on 2016-08-04 05:09 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration):
dependencies = [ ('tournament', '0048_auto_20160803_0311'), ] operations = [ migrations.AddField( model_name='alternate', name='season_player', field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='tournament.SeasonPlayer'), ), migrations.AlterUniqueTogether( name='alternate', unique_together=set([]), ), migrations.RunSQL(''' UPDATE tournament_alternate alt SET season_player_id = (SELECT id FROM tournament_seasonplayer sp WHERE sp.season_id = alt.season_id AND sp.player_id = alt.player_id) ''') ]
start.py
import sys import random import string import datetime import logging import subprocess import json import time import requests import urllib.request import ssl # Fixed Scraping: SSL: CERTIFICATE_VERIFY_FAILED error ssl._create_default_https_context = ssl._create_unverified_context HELP = """OPTIONS: --cute (default) sends cute/caring/lovie-dovie messages (okie ❤️🥰😘) --mean sends moodie messages that tend to pick up fights (k.) --hungry sends food related messages (Kk 😋🤤🍕🍩) --random sends messages like a bipolar (k. ❤️🥰😘) -f (frequent) sends him message more frequently -r (reply) adds auto-reply feature, else ghost him without the tag """ subreddit_list = ["BetterEveryLoop", "AnimalsBeingJerks", "meme"] #todo: https://www.twilio.com/blog/2016/09/how-to-receive-and-respond-to-a-text-message-with-python-flask-and-twilio.html def printInfo(mood, settings, appleID): print("Starting AI girlfriend chatbot...") if 'r' in settings: print("Sending messages to " + appleID + " in " + mood[2:] + " mood with auto-reply feature. ") else: print("Sending messages to " + appleID + " in " + mood[2:] + " mood. ") #credit: https://github.com/noahbroyles/iMessageFUN def runAppleScript(applescript): arguments = [item for x in [("-e", l.strip()) for l in applescript.split('\n') if l.strip() != ''] for item in x] proc = subprocess.Popen(["osascript"] + arguments, stdout=subprocess.PIPE) proc.stdout.flush() def sendMessage(message, appleID): script = ''' on run tell application "Messages" set iMessageService to 1st service whose service type = iMessage set boyfriend to buddy "''' + appleID + '''" of iMessageService send "''' + message + '''" to boyfriend end tell end run''' runAppleScript(script) logging.info("Sent" + message + " at " + str(datetime.datetime.now())) def getMeme(subreddit): response = requests.get("https://meme-api.herokuapp.com/gimme/" + subreddit) url = json.loads(response.text)["url"] # apple script cannot send image file: urllib.request.urlretrieve(url, "meme.jpg") return url def getMessage(path, category): with open('messages/' + path, 'r
eID): if "mean" in mood: sendMessage(message, appleID) elif "hungry" in mood: sendMessage(message, appleID) elif "random" in mood: sendMessage(message, appleID) else: x = random.randint(0, 11) if x%11 == 0: message = getMessage("cute.json", "greetings") sendMessage(message, appleID) else: i = random.randint(0, len(subreddit_list) - 1) meme = getMeme(subreddit_list[i]) sendMessage(meme, appleID) if __name__ == "__main__": args = sys.argv if len(args) < 2: print("\n" + HELP + "\n") try: mood = [arg for arg in args if arg.startswith("--")][0] except IndexError: mood = '--cute' try: settings = [arg for arg in args if arg.startswith("-") and not arg.startswith("--")][0] except IndexError: settings = '' appleID = args[-1] for x in appleID: if x not in string.digits and '@' not in appleID: sys.exit("ERROR: Invalid AppleID or Phone number: {}".format(appleID)) printInfo(mood, settings, appleID) logging.basicConfig(filename="message.log", level=logging.INFO) logging.info("Sending message to " + appleID + " with " + mood + " " + settings) while True: try: generateMessage(mood, appleID) if 'f' in settings: time.sleep(5) else: time.sleep(10) except KeyboardInterrupt: print("RAP got interrupted") break except Exception as e: print(e) logging.ERROR(e) break
') as file: data = json.load(file) x = random.randint(0, len(data[category]) - 1) return data[category][x] def generateMessage(mood, appl
fdmobilenet.py
""" FD-MobileNet for ImageNet-1K, implemented in Gluon. Original paper: 'FD-MobileNet: Improved MobileNet with A Fast Downsampling Strategy,' https://arxiv.org/abs/1802.03750. """ __all__ = ['fdmobilenet_w1', 'fdmobilenet_w3d4', 'fdmobilenet_wd2', 'fdmobilenet_wd4', 'get_fdmobilenet']
def get_fdmobilenet(width_scale, model_name=None, pretrained=False, ctx=cpu(), root=os.path.join("~", ".mxnet", "models"), **kwargs): """ Create FD-MobileNet model with specific parameters. Parameters: ---------- width_scale : float Scale factor for width of layers. model_name : str or None, default None Model name for loading pretrained model. pretrained : bool, default False Whether to load the pretrained weights for model. ctx : Context, default CPU The context in which to load the pretrained weights. root : str, default '~/.mxnet/models' Location for keeping the model parameters. """ channels = [[32], [64], [128, 128], [256, 256], [512, 512, 512, 512, 512, 1024]] first_stage_stride = True if width_scale != 1.0: channels = [[int(cij * width_scale) for cij in ci] for ci in channels] net = MobileNet( channels=channels, first_stage_stride=first_stage_stride, **kwargs) if pretrained: if (model_name is None) or (not model_name): raise ValueError("Parameter `model_name` should be properly initialized for loading pretrained model.") from .model_store import get_model_file net.load_parameters( filename=get_model_file( model_name=model_name, local_model_store_dir_path=root), ctx=ctx) return net def fdmobilenet_w1(**kwargs): """ FD-MobileNet 1.0x model from 'FD-MobileNet: Improved MobileNet with A Fast Downsampling Strategy,' https://arxiv.org/abs/1802.03750. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. ctx : Context, default CPU The context in which to load the pretrained weights. root : str, default '~/.mxnet/models' Location for keeping the model parameters. """ return get_fdmobilenet(width_scale=1.0, model_name="fdmobilenet_w1", **kwargs) def fdmobilenet_w3d4(**kwargs): """ FD-MobileNet 0.75x model from 'FD-MobileNet: Improved MobileNet with A Fast Downsampling Strategy,' https://arxiv.org/abs/1802.03750. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. ctx : Context, default CPU The context in which to load the pretrained weights. root : str, default '~/.mxnet/models' Location for keeping the model parameters. """ return get_fdmobilenet(width_scale=0.75, model_name="fdmobilenet_w3d4", **kwargs) def fdmobilenet_wd2(**kwargs): """ FD-MobileNet 0.5x model from 'FD-MobileNet: Improved MobileNet with A Fast Downsampling Strategy,' https://arxiv.org/abs/1802.03750. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. ctx : Context, default CPU The context in which to load the pretrained weights. root : str, default '~/.mxnet/models' Location for keeping the model parameters. """ return get_fdmobilenet(width_scale=0.5, model_name="fdmobilenet_wd2", **kwargs) def fdmobilenet_wd4(**kwargs): """ FD-MobileNet 0.25x model from 'FD-MobileNet: Improved MobileNet with A Fast Downsampling Strategy,' https://arxiv.org/abs/1802.03750. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. ctx : Context, default CPU The context in which to load the pretrained weights. root : str, default '~/.mxnet/models' Location for keeping the model parameters. """ return get_fdmobilenet(width_scale=0.25, model_name="fdmobilenet_wd4", **kwargs) def _test(): import numpy as np import mxnet as mx pretrained = False models = [ fdmobilenet_w1, fdmobilenet_w3d4, fdmobilenet_wd2, fdmobilenet_wd4, ] for model in models: net = model(pretrained=pretrained) ctx = mx.cpu() if not pretrained: net.initialize(ctx=ctx) net_params = net.collect_params() weight_count = 0 for param in net_params.values(): if (param.shape is None) or (not param._differentiable): continue weight_count += np.prod(param.shape) print("m={}, {}".format(model.__name__, weight_count)) assert (model != fdmobilenet_w1 or weight_count == 2901288) assert (model != fdmobilenet_w3d4 or weight_count == 1833304) assert (model != fdmobilenet_wd2 or weight_count == 993928) assert (model != fdmobilenet_wd4 or weight_count == 383160) x = mx.nd.zeros((1, 3, 224, 224), ctx=ctx) y = net(x) assert (y.shape == (1, 1000)) if __name__ == "__main__": _test()
import os from mxnet import cpu from .mobilenet import MobileNet
utils.py
""" Some codes from https://github.com/Newmu/dcgan_code """ from __future__ import division import math import json import random import pprint import scipy.misc import numpy as np from time import gmtime, strftime from six.moves import xrange from glob import glob import cv2 import imageio import tensorflow as tf import tensorflow.contrib.slim as slim pp = pprint.PrettyPrinter() get_stddev = lambda x, k_h, k_w: 1/math.sqrt(k_w*k_h*x.get_shape()[-1]) def show_all_variables(): model_vars = tf.trainable_variables() slim.model_analyzer.analyze_vars(model_vars, print_info=True) def get_image(image_path, input_height, input_width, resize_height=64, resize_width=64, crop=True, grayscale=False): image = imread(image_path, grayscale) try: return transform(image, input_height, input_width, resize_height, resize_width, crop) except ValueError : print("Bad image. filepath: ", image_path) except AttributeError: print("Bad image. filepath: ", image_path) def save_images(images, size, image_path): return imsave(inverse_transform(images), size, image_path) def imread(path, grayscale = False): try: if (grayscale): img = cv2.imread(path) new_img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) return cv2.imread(new_img, flatten = True).astype(np.float) else: img = cv2.imread(path) new_img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) return new_img.astype(np.float) except(TypeError): print(path) #Do def test_images(path_glob): for path in path_glob: imread(path) def merge_images(images, size): return inverse_transform(images) def merge(images, size): h, w = images.shape[1], images.shape[2] if (images.shape[3] in (3,4)): c = images.shape[3] img = np.zeros((h * size[0], w * size[1], c)) for idx, image in enumerate(images): i = idx % size[1] j = idx // size[1] img[j * h:j * h + h, i * w:i * w + w, :] = image return img elif images.shape[3]==1: img = np.zeros((h * size[0], w * size[1])) for idx, image in enumerate(images): i = idx % size[1] j = idx // size[1] img[j * h:j * h + h, i * w:i * w + w] = image[:,:,0] return img else: raise ValueError('in merge(images,size) images parameter ' 'must have dimensions: HxW or HxWx3 or HxWx4') def imsave(images, size, path): image = np.squeeze(merge(images, size)) return scipy.misc.imsave(path, image) def
(x, crop_h, crop_w, resize_h=64, resize_w=64): if crop_w is None: crop_w = crop_h h, w = x.shape[:2] j = int(round((h - crop_h)/2.)) i = int(round((w - crop_w)/2.)) return scipy.misc.imresize( x[j:j+crop_h, i:i+crop_w], [resize_h, resize_w]) def transform(image, input_height, input_width, resize_height=64, resize_width=64, crop=True): if crop: cropped_image = center_crop( image, input_height, input_width, resize_height, resize_width) else: cropped_image = cv2.resize(image, (resize_height, resize_width)) return np.array(cropped_image)/127.5 - 1. def inverse_transform(images): return (images+1.)/2. def make_gif(images, fname, duration=2, true_image=False): import moviepy.editor as mpy def make_frame(t): try: x = images[int(len(images)/duration*t)] except: x = images[-1] if true_image: return x.astype(np.uint8) else: return ((x+1)/2*255).astype(np.uint8) clip = mpy.VideoClip(make_frame, duration=duration) clip.write_gif(fname, fps = len(images) / duration) def visualize(sess, dcgan, config, option): image_frame_dim = int(math.ceil(config.batch_size**.5)) if option == 0: z_sample = np.random.normal(0, 1, size=(config.batch_size, dcgan.z_dim)) z_sample /= np.linalg.norm(z_sample, axis=0) samples = sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample}) save_images(samples, [image_frame_dim, image_frame_dim], '/content/gdrive/My Drive/samples/test_%s.png' % strftime("%Y%m%d%H%M%S", gmtime())) elif option == 1: values = np.arange(0, 1, 1./config.batch_size) for idx in xrange(100): print(" [*] %d" % idx) z_sample = np.zeros([config.batch_size, dcgan.z_dim]) for kdx, z in enumerate(z_sample): z[idx] = values[kdx] if config.dataset == "mnist": y = np.random.choice(10, config.batch_size) y_one_hot = np.zeros((config.batch_size, 10)) y_one_hot[np.arange(config.batch_size), y] = 1 samples = sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample, dcgan.y: y_one_hot}) elif config.dataset == 'wikiart': y = np.random.choice(27, config.batch_size) y_one_hot = np.zeros((config.batch_size, 27)) y_one_hot[np.arange(config.batch_size), y] = 1 samples = sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample, dcgan.y: y_one_hot}) save_images(samples, [image_frame_dim, image_frame_dim], './samples/test_arange_%s.png' % (idx)) elif option == 2: values = np.arange(0, 1, 1./config.batch_size) for idx in [random.randint(0, 99) for _ in xrange(100)]: print(" [*] %d" % idx) z = np.random.uniform(-0.2, 0.2, size=(dcgan.z_dim)) z_sample = np.tile(z, (config.batch_size, 1)) #z_sample = np.zeros([config.batch_size, dcgan.z_dim]) for kdx, z in enumerate(z_sample): z[idx] = values[kdx] if config.dataset == "mnist": y = np.random.choice(10, config.batch_size) y_one_hot = np.zeros((config.batch_size, 10)) y_one_hot[np.arange(config.batch_size), y] = 1 samples = sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample, dcgan.y: y_one_hot}) elif config.dataset == 'wikiart': y = np.random.choice(27, config.batch_size) y_one_hot = np.zeros((config.batch_size, 27)) y_one_hot[np.arange(config.batch_size), y] = 1 samples = sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample, dcgan.y: y_one_hot}) try: make_gif(samples, './samples/test_gif_%s.gif' % (idx)) except: save_images(samples, [image_frame_dim, image_frame_dim], './samples/test_%s.png' % strftime("%Y%m%d%H%M%S", gmtime())) elif option == 3: values = np.arange(0, 1, 1./config.batch_size) for idx in xrange(100): print(" [*] %d" % idx) z_sample = np.zeros([config.batch_size, dcgan.z_dim]) for kdx, z in enumerate(z_sample): z[idx] = values[kdx] samples = sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample}) make_gif(samples, './samples/test_gif_%s.gif' % (idx)) elif option == 4: image_set = [] values = np.arange(0, 1, 1./config.batch_size) for idx in xrange(100): print(" [*] %d" % idx) z_sample = np.zeros([config.batch_size, dcgan.z_dim]) for kdx, z in enumerate(z_sample): z[idx] = values[kdx] image_set.append(sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample})) make_gif(image_set[-1], './samples/test_gif_%s.gif' % (idx)) new_image_set = [merge(np.array([images[idx] for images in image_set]), [10, 10]) \ for idx in range(64) + range(63, -1, -1)] make_gif(new_image_set, './samples/test_gif_merged.gif', duration=8) def get_max_end(path_dir, num_len=3, fname_pattern='*.jpg'): max_ = 0 for f in glob(path_dir + fname_pattern): curr = int(f[-num_len-4:-4]) if curr > max_: max_ = curr return max_ def image_manifold_size(num_images): print(num_images) manifold_h = int(np.floor(np.sqrt(num_images))) manifold_w = int(np.ceil(np.sqrt(num_images))) assert manifold_h * manifold_w == num_images return manifold_h, manifold_w if __name__ == '__main__': print('Getting image!') import time start = time.time() get_image("albert-gleizes_acrobats-1916.jpg",256,256,256,256) end = (time.time() - start) print ('Took : {.:%4f}'.format(end))
center_crop
util.py
""" Assorted utilities for working with neural networks in AllenNLP. """ # pylint: disable=too-many-lines from collections import defaultdict from typing import Any, Dict, List, Optional, Sequence, Tuple, TypeVar import logging import math import warnings import torch from allennlp.common.checks import ConfigurationError logger = logging.getLogger(__name__) # pylint: disable=invalid-name T = TypeVar('T') def has_tensor(obj) -> bool: """ Given a possibly complex data structure, check if it has any torch.Tensors in it. """ if isinstance(obj, torch.Tensor): return True elif isinstance(obj, dict): return any(has_tensor(value) for value in obj.values()) elif isinstance(obj, (list, tuple)): return any(has_tensor(item) for item in obj) else: return False def move_to_device(obj, cuda_device: int): """ Given a structure (possibly) containing Tensors on the CPU, move all the Tensors to the specified GPU (or do nothing, if they should be on the CPU). """ if cuda_device < 0 or not has_tensor(obj): return obj elif isinstance(obj, torch.Tensor): return obj.cuda(cuda_device) elif isinstance(obj, dict): return {key: move_to_device(value, cuda_device) for key, value in obj.items()} elif isinstance(obj, list): return [move_to_device(item, cuda_device) for item in obj] elif isinstance(obj, tuple): return tuple([move_to_device(item, cuda_device) for item in obj]) else: return obj def batch_tensor_dicts(tensor_dicts: List[Dict[str, torch.Tensor]], remove_trailing_dimension: bool = False) -> Dict[str, torch.Tensor]: """ Takes a list of tensor dictionaries, where each dictionary is assumed to have matching keys, and returns a single dictionary with all tensors with the same key batched together. Parameters ---------- tensor_dicts : ``List[Dict[str, torch.Tensor]]`` The list of tensor dictionaries to batch. remove_trailing_dimension : ``bool`` If ``True``, we will check for a trailing dimension of size 1 on the tensors that are being batched, and remove it if we find it. """ key_to_tensors: Dict[str, List[torch.Tensor]] = defaultdict(list) for tensor_dict in tensor_dicts: for key, tensor in tensor_dict.items(): key_to_tensors[key].append(tensor) batched_tensors = {} for key, tensor_list in key_to_tensors.items(): batched_tensor = torch.stack(tensor_list) if remove_trailing_dimension and all(tensor.size(-1) == 1 for tensor in tensor_list): batched_tensor = batched_tensor.squeeze(-1) batched_tensors[key] = batched_tensor return batched_tensors def get_lengths_from_binary_sequence_mask(mask: torch.Tensor): """ Compute sequence lengths for each batch element in a tensor using a binary mask. Parameters ---------- mask : torch.Tensor, required. A 2D binary mask of shape (batch_size, sequence_length) to calculate the per-batch sequence lengths from. Returns ------- A torch.LongTensor of shape (batch_size,) representing the lengths of the sequences in the batch. """ return mask.long().sum(-1) def get_mask_from_sequence_lengths(sequence_lengths: torch.Tensor, max_length: int) -> torch.Tensor: """ Given a variable of shape ``(batch_size,)`` that represents the sequence lengths of each batch element, this function returns a ``(batch_size, max_length)`` mask variable. For example, if our input was ``[2, 2, 3]``, with a ``max_length`` of 4, we'd return ``[[1, 1, 0, 0], [1, 1, 0, 0], [1, 1, 1, 0]]``. We require ``max_length`` here instead of just computing it from the input ``sequence_lengths`` because it lets us avoid finding the max, then copying that value from the GPU to the CPU so that we can use it to construct a new tensor. """ # (batch_size, max_length) ones = sequence_lengths.new_ones(sequence_lengths.size(0), max_length) range_tensor = ones.cumsum(dim=1) return (sequence_lengths.unsqueeze(1) >= range_tensor).long() def sort_batch_by_length(tensor: torch.Tensor, sequence_lengths: torch.Tensor): """ Sort a batch first tensor by some specified lengths. Parameters ---------- tensor : torch.FloatTensor, required. A batch first Pytorch tensor. sequence_lengths : torch.LongTensor, required. A tensor representing the lengths of some dimension of the tensor which we want to sort by. Returns ------- sorted_tensor : torch.FloatTensor The original tensor sorted along the batch dimension with respect to sequence_lengths. sorted_sequence_lengths : torch.LongTensor The original sequence_lengths sorted by decreasing size. restoration_indices : torch.LongTensor Indices into the sorted_tensor such that ``sorted_tensor.index_select(0, restoration_indices) == original_tensor`` permuation_index : torch.LongTensor The indices used to sort the tensor. This is useful if you want to sort many tensors using the same ordering. """ if not isinstance(tensor, torch.Tensor) or not isinstance(sequence_lengths, torch.Tensor): raise ConfigurationError("Both the tensor and sequence lengths must be torch.Tensors.") sorted_sequence_lengths, permutation_index = sequence_lengths.sort(0, descending=True) sorted_tensor = tensor.index_select(0, permutation_index) index_range = sequence_lengths.new_tensor(torch.arange(0, len(sequence_lengths))) # This is the equivalent of zipping with index, sorting by the original # sequence lengths and returning the now sorted indices. _, reverse_mapping = permutation_index.sort(0, descending=False) restoration_indices = index_range.index_select(0, reverse_mapping) return sorted_tensor, sorted_sequence_lengths, restoration_indices, permutation_index def get_final_encoder_states(encoder_outputs: torch.Tensor, mask: torch.Tensor, bidirectional: bool = False) -> torch.Tensor: """ Given the output from a ``Seq2SeqEncoder``, with shape ``(batch_size, sequence_length, encoding_dim)``, this method returns the final hidden state for each element of the batch, giving a tensor of shape ``(batch_size, encoding_dim)``. This is not as simple as ``encoder_outputs[:, -1]``, because the sequences could have different lengths. We use the mask (which has shape ``(batch_size, sequence_length)``) to find the final state for each batch instance. Additionally, if ``bidirectional`` is ``True``, we will split the final dimension of the ``encoder_outputs`` into two and assume that the first half is for the forward direction of the encoder and the second half is for the backward direction. We will concatenate the last state for each encoder dimension, giving ``encoder_outputs[:, -1, :encoding_dim/2]`` concated with ``encoder_outputs[:, 0, encoding_dim/2:]``. """ # These are the indices of the last words in the sequences (i.e. length sans padding - 1). We # are assuming sequences are right padded. # Shape: (batch_size,) last_word_indices = mask.sum(1).long() - 1 batch_size, _, encoder_output_dim = encoder_outputs.size() expanded_indices = last_word_indices.view(-1, 1, 1).expand(batch_size, 1, encoder_output_dim) # Shape: (batch_size, 1, encoder_output_dim) final_encoder_output = encoder_outputs.gather(1, expanded_indices) final_encoder_output = final_encoder_output.squeeze(1) # (batch_size, encoder_output_dim) if bidirectional: final_forward_output = final_encoder_output[:, :(encoder_output_dim // 2)] final_backward_output = encoder_outputs[:, 0, (encoder_output_dim // 2):] final_encoder_output = torch.cat([final_forward_output, final_backward_output], dim=-1) return final_encoder_output def get_dropout_mask(dropout_probability: float, tensor_for_masking: torch.Tensor): """ Computes and returns an element-wise dropout mask for a given tensor, where each element in the mask is dropped out with probability dropout_probability. Note that the mask is NOT applied to the tensor - the tensor is passed to retain the correct CUDA tensor type for the mask. Parameters ---------- dropout_probability : float, required. Probability of dropping a dimension of the input. tensor_for_masking : torch.Tensor, required. Returns ------- A torch.FloatTensor consisting of the binary mask scaled by 1/ (1 - dropout_probability). This scaling ensures expected values and variances of the output of applying this mask and the original tensor are the same. """ binary_mask = tensor_for_masking.new_tensor(torch.rand(tensor_for_masking.size()) > dropout_probability) # Scale mask by 1/keep_prob to preserve output statistics. dropout_mask = binary_mask.float().div(1.0 - dropout_probability) return dropout_mask def masked_softmax(vector: torch.Tensor, mask: torch.Tensor, dim: int = -1) -> torch.Tensor: """ ``torch.nn.functional.softmax(vector)`` does not work if some elements of ``vector`` should be masked. This performs a softmax on just the non-masked portions of ``vector``. Passing ``None`` in for the mask is also acceptable; you'll just get a regular softmax. ``vector`` can have an arbitrary number of dimensions; the only requirement is that ``mask`` is broadcastable to ``vector's`` shape. If ``mask`` has fewer dimensions than ``vector``, we will unsqueeze on dimension 1 until they match. If you need a different unsqueezing of your mask, do it yourself before passing the mask into this function. In the case that the input vector is completely masked, this function returns an array of ``0.0``. This behavior may cause ``NaN`` if this is used as the last layer of a model that uses categorical cross-entropy loss. """ if mask is None: result = torch.nn.functional.softmax(vector, dim=dim) else: mask = mask.float() while mask.dim() < vector.dim(): mask = mask.unsqueeze(1) # To limit numerical errors from large vector elements outside the mask, we zero these out. result = torch.nn.functional.softmax(vector * mask, dim=dim) result = result * mask result = result / (result.sum(dim=dim, keepdim=True) + 1e-13) return result def masked_log_softmax(vector: torch.Tensor, mask: torch.Tensor, dim: int = -1) -> torch.Tensor: """ ``torch.nn.functional.log_softmax(vector)`` does not work if some elements of ``vector`` should be masked. This performs a log_softmax on just the non-masked portions of ``vector``. Passing ``None`` in for the mask is also acceptable; you'll just get a regular log_softmax. ``vector`` can have an arbitrary number of dimensions; the only requirement is that ``mask`` is broadcastable to ``vector's`` shape. If ``mask`` has fewer dimensions than ``vector``, we will unsqueeze on dimension 1 until they match. If you need a different unsqueezing of your mask, do it yourself before passing the mask into this function. In the case that the input vector is completely masked, the return value of this function is arbitrary, but not ``nan``. You should be masking the result of whatever computation comes out of this in that case, anyway, so the specific values returned shouldn't matter. Also, the way that we deal with this case relies on having single-precision floats; mixing half-precision floats with fully-masked vectors will likely give you ``nans``. If your logits are all extremely negative (i.e., the max value in your logit vector is -50 or lower), the way we handle masking here could mess you up. But if you've got logit values that extreme, you've got bigger problems than this. """ if mask is not None: mask = mask.float() while mask.dim() < vector.dim(): mask = mask.unsqueeze(1) # vector + mask.log() is an easy way to zero out masked elements in logspace, but it # results in nans when the whole vector is masked. We need a very small value instead of a # zero in the mask for these cases. log(1 + 1e-45) is still basically 0, so we can safely # just add 1e-45 before calling mask.log(). We use 1e-45 because 1e-46 is so small it # becomes 0 - this is just the smallest value we can actually use. vector = vector + (mask + 1e-45).log() return torch.nn.functional.log_softmax(vector, dim=dim) def masked_max(vector: torch.Tensor, mask: torch.Tensor, dim: int, keepdim: bool = False, min_val: float = -1e7) -> torch.Tensor: """ To calculate max along certain dimensions on masked values Parameters ---------- vector : ``torch.Tensor`` The vector to calculate max, assume unmasked parts are already zeros mask : ``torch.Tensor`` The mask of the vector. It must be broadcastable with vector. dim : ``int`` The dimension to calculate max keepdim : ``bool`` Whether to keep dimension min_val : ``float`` The minimal value for paddings Returns ------- A ``torch.Tensor`` of including the maximum values. """ one_minus_mask = (1.0 - mask).byte() replaced_vector = vector.masked_fill(one_minus_mask, min_val) max_value, _ = replaced_vector.max(dim=dim, keepdim=keepdim) return max_value def masked_mean(vector: torch.Tensor, mask: torch.Tensor, dim: int, keepdim: bool = False, eps: float = 1e-8) -> torch.Tensor: """ To calculate mean along certain dimensions on masked values Parameters ---------- vector : ``torch.Tensor`` The vector to calculate mean. mask : ``torch.Tensor`` The mask of the vector. It must be broadcastable with vector. dim : ``int`` The dimension to calculate mean keepdim : ``bool`` Whether to keep dimension eps : ``float`` A small value to avoid zero division problem. Returns ------- A ``torch.Tensor`` of including the mean values. """ one_minus_mask = (1.0 - mask).byte() replaced_vector = vector.masked_fill(one_minus_mask, 0.0) value_sum = torch.sum(replaced_vector, dim=dim, keepdim=keepdim) value_count = torch.sum(mask.float(), dim=dim, keepdim=keepdim) return value_sum / value_count.clamp(min=eps) def viterbi_decode(tag_sequence: torch.Tensor, transition_matrix: torch.Tensor, tag_observations: Optional[List[int]] = None): """ Perform Viterbi decoding in log space over a sequence given a transition matrix specifying pairwise (transition) potentials between tags and a matrix of shape (sequence_length, num_tags) specifying unary potentials for possible tags per timestep. Parameters ---------- tag_sequence : torch.Tensor, required. A tensor of shape (sequence_length, num_tags) representing scores for a set of tags over a given sequence. transition_matrix : torch.Tensor, required. A tensor of shape (num_tags, num_tags) representing the binary potentials for transitioning between a given pair of tags. tag_observations : Optional[List[int]], optional, (default = None) A list of length ``sequence_length`` containing the class ids of observed elements in the sequence, with unobserved elements being set to -1. Note that it is possible to provide evidence which results in degenerate labellings if the sequences of tags you provide as evidence cannot transition between each other, or those transitions are extremely unlikely. In this situation we log a warning, but the responsibility for providing self-consistent evidence ultimately lies with the user. Returns ------- viterbi_path : List[int] The tag indices of the maximum likelihood tag sequence. viterbi_score : torch.Tensor The score of the viterbi path. """ sequence_length, num_tags = list(tag_sequence.size()) if tag_observations: if len(tag_observations) != sequence_length: raise ConfigurationError("Observations were provided, but they were not the same length " "as the sequence. Found sequence of length: {} and evidence: {}" .format(sequence_length, tag_observations)) else: tag_observations = [-1 for _ in range(sequence_length)] path_scores = [] path_indices = [] if tag_observations[0] != -1: one_hot = torch.zeros(num_tags) one_hot[tag_observations[0]] = 100000. path_scores.append(one_hot) else: path_scores.append(tag_sequence[0, :]) # Evaluate the scores for all possible paths. for timestep in range(1, sequence_length): # Add pairwise potentials to current scores. summed_potentials = path_scores[timestep - 1].unsqueeze(-1) + transition_matrix scores, paths = torch.max(summed_potentials, 0) # If we have an observation for this timestep, use it # instead of the distribution over tags. observation = tag_observations[timestep] # Warn the user if they have passed # invalid/extremely unlikely evidence. if tag_observations[timestep - 1] != -1: if transition_matrix[tag_observations[timestep - 1], observation] < -10000: logger.warning("The pairwise potential between tags you have passed as " "observations is extremely unlikely. Double check your evidence " "or transition potentials!") if observation != -1: one_hot = torch.zeros(num_tags) one_hot[observation] = 100000. path_scores.append(one_hot) else: path_scores.append(tag_sequence[timestep, :] + scores.squeeze()) path_indices.append(paths.squeeze()) # Construct the most likely sequence backwards. viterbi_score, best_path = torch.max(path_scores[-1], 0) viterbi_path = [int(best_path.numpy())] for backward_timestep in reversed(path_indices): viterbi_path.append(int(backward_timestep[viterbi_path[-1]])) # Reverse the backward path. viterbi_path.reverse() return viterbi_path, viterbi_score def get_text_field_mask(text_field_tensors: Dict[str, torch.Tensor], num_wrapping_dims: int = 0) -> torch.LongTensor: """ Takes the dictionary of tensors produced by a ``TextField`` and returns a mask with 0 where the tokens are padding, and 1 otherwise. We also handle ``TextFields`` wrapped by an arbitrary number of ``ListFields``, where the number of wrapping ``ListFields`` is given by ``num_wrapping_dims``. If ``num_wrapping_dims == 0``, the returned mask has shape ``(batch_size, num_tokens)``. If ``num_wrapping_dims > 0`` then the returned mask has ``num_wrapping_dims`` extra dimensions, so the shape will be ``(batch_size, ..., num_tokens)``. There could be several entries in the tensor dictionary with different shapes (e.g., one for word ids, one for character ids). In order to get a token mask, we use the tensor in the dictionary with the lowest number of dimensions. After subtracting ``num_wrapping_dims``, if this tensor has two dimensions we assume it has shape ``(batch_size, ..., num_tokens)``, and use it for the mask. If instead it has three dimensions, we assume it has shape ``(batch_size, ..., num_tokens, num_features)``, and sum over the last dimension to produce the mask. Most frequently this will be a character id tensor, but it could also be a featurized representation of each token, etc. If the input ``text_field_tensors`` contains the "mask" key, this is returned instead of inferring the mask. TODO(joelgrus): can we change this? NOTE: Our functions for generating masks create torch.LongTensors, because using torch.ByteTensors makes it easy to run into overflow errors when doing mask manipulation, such as summing to get the lengths of sequences - see below. >>> mask = torch.ones([260]).byte() >>> mask.sum() # equals 260. >>> var_mask = torch.autograd.V(mask) >>> var_mask.sum() # equals 4, due to 8 bit precision - the sum overflows. """ if "mask" in text_field_tensors: return text_field_tensors["mask"] tensor_dims = [(tensor.dim(), tensor) for tensor in text_field_tensors.values()] tensor_dims.sort(key=lambda x: x[0]) smallest_dim = tensor_dims[0][0] - num_wrapping_dims if smallest_dim == 2: token_tensor = tensor_dims[0][1] return (token_tensor != 0).long() elif smallest_dim == 3: character_tensor = tensor_dims[0][1] return ((character_tensor > 0).long().sum(dim=-1) > 0).long() else: raise ValueError("Expected a tensor with dimension 2 or 3, found {}".format(smallest_dim)) def last_dim_softmax(tensor: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor: """ Takes a tensor with 3 or more dimensions and does a masked softmax over the last dimension. We assume the tensor has shape ``(batch_size, ..., sequence_length)`` and that the mask (if given) has shape ``(batch_size, sequence_length)``. .. deprecated:: 0.6.1 ``last_dim_softmax`` was deprecated in favor of just using ``masked_softmax`` in version 0.6.1. It will be removed in version 0.8. """ warnings.warn("``last_dim_softmax`` was deprecated in favor of just using ``masked_softmax`` " "in version 0.6.1. It will be removed in version 0.8.", DeprecationWarning) return masked_softmax(tensor, mask, dim=-1) def last_dim_log_softmax(tensor: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor: """ Takes a tensor with 3 or more dimensions and does a masked log softmax over the last dimension. We assume the tensor has shape ``(batch_size, ..., sequence_length)`` and that the mask (if given) has shape ``(batch_size, sequence_length)``. .. deprecated:: 0.6.1 ``last_dim_log_softmax`` was deprecated in favor of just using ``masked_log_softmax`` in version 0.6.1. It will be removed in version 0.8. """ warnings.warn("``last_dim_log_softmax`` was deprecated in favor of just using " "``masked_log_softmax`` in version 0.6.1. It will be removed in version 0.8.", DeprecationWarning) return masked_log_softmax(tensor, mask, dim=-1) def weighted_sum(matrix: torch.Tensor, attention: torch.Tensor) -> torch.Tensor: """ Takes a matrix of vectors and a set of weights over the rows in the matrix (which we call an "attention" vector), and returns a weighted sum of the rows in the matrix. This is the typical computation performed after an attention mechanism. Note that while we call this a "matrix" of vectors and an attention "vector", we also handle higher-order tensors. We always sum over the second-to-last dimension of the "matrix", and we assume that all dimensions in the "matrix" prior to the last dimension are matched in the "vector". Non-matched dimensions in the "vector" must be `directly after the batch dimension`. For example, say I have a "matrix" with dimensions ``(batch_size, num_queries, num_words, embedding_dim)``. The attention "vector" then must have at least those dimensions, and could have more. Both: - ``(batch_size, num_queries, num_words)`` (distribution over words for each query) - ``(batch_size, num_documents, num_queries, num_words)`` (distribution over words in a query for each document) are valid input "vectors", producing tensors of shape: ``(batch_size, num_queries, embedding_dim)`` and ``(batch_size, num_documents, num_queries, embedding_dim)`` respectively. """ # We'll special-case a few settings here, where there are efficient (but poorly-named) # operations in pytorch that already do the computation we need. if attention.dim() == 2 and matrix.dim() == 3: return attention.unsqueeze(1).bmm(matrix).squeeze(1) if attention.dim() == 3 and matrix.dim() == 3: return attention.bmm(matrix) if matrix.dim() - 1 < attention.dim(): expanded_size = list(matrix.size()) for i in range(attention.dim() - matrix.dim() + 1): matrix = matrix.unsqueeze(1) expanded_size.insert(i + 1, attention.size(i + 1)) matrix = matrix.expand(*expanded_size) intermediate = attention.unsqueeze(-1).expand_as(matrix) * matrix return intermediate.sum(dim=-2) def sequence_cross_entropy_with_logits(logits: torch.FloatTensor, targets: torch.LongTensor, weights: torch.FloatTensor, batch_average: bool = None, average: str = "batch", label_smoothing: float = None) -> torch.FloatTensor: """ Computes the cross entropy loss of a sequence, weighted with respect to some user provided weights. Note that the weighting here is not the same as in the :func:`torch.nn.CrossEntropyLoss()` criterion, which is weighting classes; here we are weighting the loss contribution from particular elements in the sequence. This allows loss computations for models which use padding. Parameters ---------- logits : ``torch.FloatTensor``, required. A ``torch.FloatTensor`` of size (batch_size, sequence_length, num_classes) which contains the unnormalized probability for each class. targets : ``torch.LongTensor``, required. A ``torch.LongTensor`` of size (batch, sequence_length) which contains the index of the true class for each corresponding step. weights : ``torch.FloatTensor``, required. A ``torch.FloatTensor`` of size (batch, sequence_length) batch_average : bool, optional, (default = None). A bool indicating whether the loss should be averaged across the batch, or returned as a vector of losses per batch element. .. deprecated:: 0.6.2 ``batch_average`` was deprecated and replaced with the more general ``average`` in version 0.6.2. It will be removed in version 0.8. average: str, optional (default = "batch") If "batch", average the loss across the batches. If "token", average the loss across each item in the input. If ``None``, return a vector of losses per batch element. label_smoothing : ``float``, optional (default = None) Whether or not to apply label smoothing to the cross-entropy loss. For example, with a label smoothing value of 0.2, a 4 class classifcation target would look like ``[0.05, 0.05, 0.85, 0.05]`` if the 3rd class was the correct label. Returns ------- A torch.FloatTensor representing the cross entropy loss. If ``average=="batch"`` or ``average=="token"``, the returned loss is a scalar. If ``average is None``, the returned loss is a vector of shape (batch_size,). """ if batch_average is not None: # Maintain old behavior if batch_average: warnings.warn("batch_average=True was deprecated and replaced " "with average='batch' in version 0.6.2. It will be " "removed in version 0.8.", DeprecationWarning) average = "batch" else: warnings.warn("batch_average=False was deprecated and replaced " "with average=None in version 0.6.2. It will be " "removed in version 0.8.", DeprecationWarning) average = None if average not in {None, "token", "batch"}: raise ValueError("Got average f{average}, expected one of " "None, 'token', or 'batch'") # shape : (batch * sequence_length, num_classes) logits_flat = logits.view(-1, logits.size(-1)) # shape : (batch * sequence_length, num_classes) log_probs_flat = torch.nn.functional.log_softmax(logits_flat, dim=-1) # shape : (batch * max_len, 1) targets_flat = targets.view(-1, 1).long() if label_smoothing is not None and label_smoothing > 0.0: num_classes = logits.size(-1) smoothing_value = label_smoothing / num_classes # Fill all the correct indices with 1 - smoothing value. one_hot_targets = torch.zeros_like(log_probs_flat).scatter_(-1, targets_flat, 1.0 - label_smoothing) smoothed_targets = one_hot_targets + smoothing_value negative_log_likelihood_flat = - log_probs_flat * smoothed_targets negative_log_likelihood_flat = negative_log_likelihood_flat.sum(-1, keepdim=True) else: # Contribution to the negative log likelihood only comes from the exact indices # of the targets, as the target distributions are one-hot. Here we use torch.gather # to extract the indices of the num_classes dimension which contribute to the loss. # shape : (batch * sequence_length, 1) negative_log_likelihood_flat = - torch.gather(log_probs_flat, dim=1, index=targets_flat) # shape : (batch, sequence_length) negative_log_likelihood = negative_log_likelihood_flat.view(*targets.size()) # shape : (batch, sequence_length) negative_log_likelihood = negative_log_likelihood * weights.float() if average == "batch": # shape : (batch_size,) per_batch_loss = negative_log_likelihood.sum(1) / (weights.sum(1).float() + 1e-13) num_non_empty_sequences = ((weights.sum(1) > 0).float().sum() + 1e-13) return per_batch_loss.sum() / num_non_empty_sequences elif average == "token": return negative_log_likelihood.sum() / (weights.sum().float() + 1e-13) else: # shape : (batch_size,) per_batch_loss = negative_log_likelihood.sum(1) / (weights.sum(1).float() + 1e-13) return per_batch_loss def replace_masked_values(tensor: torch.Tensor, mask: torch.Tensor, replace_with: float) -> torch.Tensor: """ Replaces all masked values in ``tensor`` with ``replace_with``. ``mask`` must be broadcastable to the same shape as ``tensor``. We require that ``tensor.dim() == mask.dim()``, as otherwise we won't know which dimensions of the mask to unsqueeze. This just does ``tensor.masked_fill()``, except the pytorch method fills in things with a mask value of 1, where we want the opposite. You can do this in your own code with ``tensor.masked_fill((1 - mask).byte(), replace_with)``. """ if tensor.dim() != mask.dim(): raise ConfigurationError("tensor.dim() (%d) != mask.dim() (%d)" % (tensor.dim(), mask.dim())) return tensor.masked_fill((1 - mask).byte(), replace_with) def tensors_equal(tensor1: torch.Tensor, tensor2: torch.Tensor, tolerance: float = 1e-12) -> bool: """ A check for tensor equality (by value). We make sure that the tensors have the same shape, then check all of the entries in the tensor for equality. We additionally allow the input tensors to be lists or dictionaries, where we then do the above check on every position in the list / item in the dictionary. If we find objects that aren't tensors as we're doing that, we just defer to their equality check. This is kind of a catch-all method that's designed to make implementing ``__eq__`` methods easier, in a way that's really only intended to be useful for tests. """ # pylint: disable=too-many-return-statements if isinstance(tensor1, (list, tuple)): if not isinstance(tensor2, (list, tuple)) or len(tensor1) != len(tensor2): return False return all([tensors_equal(t1, t2, tolerance) for t1, t2 in zip(tensor1, tensor2)]) elif isinstance(tensor1, dict): if not isinstance(tensor2, dict): return False if tensor1.keys() != tensor2.keys(): return False return all([tensors_equal(tensor1[key], tensor2[key], tolerance) for key in tensor1]) elif isinstance(tensor1, torch.Tensor): if not isinstance(tensor2, torch.Tensor): return False if tensor1.size() != tensor2.size(): return False return ((tensor1 - tensor2).abs().float() < tolerance).all() else: try: return tensor1 == tensor2 except RuntimeError: print(type(tensor1), type(tensor2)) raise def device_mapping(cuda_device: int): """ In order to `torch.load()` a GPU-trained model onto a CPU (or specific GPU), you have to supply a `map_location` function. Call this with the desired `cuda_device` to get the function that `torch.load()` needs. """ def inner_device_mapping(storage: torch.Storage, location) -> torch.Storage: # pylint: disable=unused-argument if cuda_device >= 0: return storage.cuda(cuda_device) else: return storage return inner_device_mapping def combine_tensors(combination: str, tensors: List[torch.Tensor]) -> torch.Tensor: """ Combines a list of tensors using element-wise operations and concatenation, specified by a ``combination`` string. The string refers to (1-indexed) positions in the input tensor list, and looks like ``"1,2,1+2,3-1"``. We allow the following kinds of combinations: ``x``, ``x*y``, ``x+y``, ``x-y``, and ``x/y``, where ``x`` and ``y`` are positive integers less than or equal to ``len(tensors)``. Each of the binary operations is performed elementwise. You can give as many combinations as you want in the ``combination`` string. For example, for the input string ``"1,2,1*2"``, the result would be ``[1;2;1*2]``, as you would expect, where ``[;]`` is concatenation along the last dimension. If you have a fixed, known way to combine tensors that you use in a model, you should probably just use something like ``torch.cat([x_tensor, y_tensor, x_tensor * y_tensor])``. This function adds some complexity that is only necessary if you want the specific combination used to be `configurable`. If you want to do any element-wise operations, the tensors involved in each element-wise operation must have the same shape. This function also accepts ``x`` and ``y`` in place of ``1`` and ``2`` in the combination string. """ if len(tensors) > 9: raise ConfigurationError("Double-digit tensor lists not currently supported") combination = combination.replace('x', '1').replace('y', '2') to_concatenate = [_get_combination(piece, tensors) for piece in combination.split(',')] return torch.cat(to_concatenate, dim=-1) def _rindex(sequence: Sequence[T], obj: T) -> int: """ Return zero-based index in the sequence of the last item whose value is equal to obj. Raises a ValueError if there is no such item. Parameters ---------- sequence : ``Sequence[T]`` obj : ``T`` Returns ------- zero-based index associated to the position of the last item equal to obj """ for i in range(len(sequence) - 1, -1, -1): if sequence[i] == obj: return i raise ValueError(f"Unable to find {obj} in sequence {sequence}.") def _get_combination(combination: str, tensors: List[torch.Tensor]) -> torch.Tensor: if combination.isdigit(): index = int(combination) - 1 return tensors[index] else: if len(combination) != 3: raise ConfigurationError("Invalid combination: " + combination) first_tensor = _get_combination(combination[0], tensors) second_tensor = _get_combination(combination[2], tensors) operation = combination[1] if operation == '*': return first_tensor * second_tensor elif operation == '/': return first_tensor / second_tensor elif operation == '+': return first_tensor + second_tensor elif operation == '-': return first_tensor - second_tensor else: raise ConfigurationError("Invalid operation: " + operation) def combine_tensors_and_multiply(combination: str, tensors: List[torch.Tensor], weights: torch.nn.Parameter) -> torch.Tensor: """ Like :func:`combine_tensors`, but does a weighted (linear) multiplication while combining. This is a separate function from ``combine_tensors`` because we try to avoid instantiating large intermediate tensors during the combination, which is possible because we know that we're going to be multiplying by a weight vector in the end. Parameters ---------- combination : ``str`` Same as in :func:`combine_tensors` tensors : ``List[torch.Tensor]`` A list of tensors to combine, where the integers in the ``combination`` are (1-indexed) positions in this list of tensors. These tensors are all expected to have either three or four dimensions, with the final dimension being an embedding. If there are four dimensions, one of them must have length 1. weights : ``torch.nn.Parameter`` A vector of weights to use for the combinations. This should have shape (combined_dim,), as calculated by :func:`get_combined_dim`. """ if len(tensors) > 9: raise ConfigurationError("Double-digit tensor lists not currently supported") combination = combination.replace('x', '1').replace('y', '2') pieces = combination.split(',') tensor_dims = [tensor.size(-1) for tensor in tensors] combination_dims = [_get_combination_dim(piece, tensor_dims) for piece in pieces] dims_so_far = 0 to_sum = [] for piece, combination_dim in zip(pieces, combination_dims): weight = weights[dims_so_far:(dims_so_far + combination_dim)] dims_so_far += combination_dim to_sum.append(_get_combination_and_multiply(piece, tensors, weight)) result = to_sum[0] for result_piece in to_sum[1:]: result = result + result_piece return result def _get_combination_and_multiply(combination: str, tensors: List[torch.Tensor], weight: torch.nn.Parameter) -> torch.Tensor: if combination.isdigit(): index = int(combination) - 1 return torch.matmul(tensors[index], weight) else: if len(combination) != 3: raise ConfigurationError("Invalid combination: " + combination) first_tensor = _get_combination(combination[0], tensors) second_tensor = _get_combination(combination[2], tensors) operation = combination[1] if operation == '*': if first_tensor.dim() > 4 or second_tensor.dim() > 4: raise ValueError("Tensors with dim > 4 not currently supported") if first_tensor.dim() == 4: expanded_dim = _rindex(first_tensor.size(), 1) first_tensor = first_tensor.squeeze(expanded_dim) if second_tensor.dim() == 4: expanded_dim = _rindex(second_tensor.size(), 1) second_tensor = second_tensor.squeeze(expanded_dim) intermediate = first_tensor * weight return torch.matmul(intermediate, second_tensor.transpose(-1, -2)).squeeze(-1) elif operation == '/': if first_tensor.dim() > 4 or second_tensor.dim() > 4: raise ValueError("Tensors with dim > 4 not currently supported") if first_tensor.dim() == 4: expanded_dim = _rindex(first_tensor.size(), 1) first_tensor = first_tensor.squeeze(expanded_dim) if second_tensor.dim() == 4: expanded_dim = _rindex(second_tensor.size(), 1) second_tensor = second_tensor.squeeze(expanded_dim) intermediate = first_tensor * weight return torch.matmul(intermediate, second_tensor.pow(-1).transpose(-1, -2)).squeeze(-1) elif operation == '+': return torch.matmul(first_tensor, weight) + torch.matmul(second_tensor, weight) elif operation == '-': return torch.matmul(first_tensor, weight) - torch.matmul(second_tensor, weight) else: raise ConfigurationError("Invalid operation: " + operation) def get_combined_dim(combination: str, tensor_dims: List[int]) -> int: """ For use with :func:`combine_tensors`. This function computes the resultant dimension when calling ``combine_tensors(combination, tensors)``, when the tensor dimension is known. This is necessary for knowing the sizes of weight matrices when building models that use ``combine_tensors``. Parameters ---------- combination : ``str`` A comma-separated list of combination pieces, like ``"1,2,1*2"``, specified identically to ``combination`` in :func:`combine_tensors`. tensor_dims : ``List[int]`` A list of tensor dimensions, where each dimension is from the `last axis` of the tensors that will be input to :func:`combine_tensors`. """ if len(tensor_dims) > 9: raise ConfigurationError("Double-digit tensor lists not currently supported") combination = combination.replace('x', '1').replace('y', '2') return sum([_get_combination_dim(piece, tensor_dims) for piece in combination.split(',')]) def _get_combination_dim(combination: str, tensor_dims: List[int]) -> int: if combination.isdigit(): index = int(combination) - 1 return tensor_dims[index] else: if len(combination) != 3: raise ConfigurationError("Invalid combination: " + combination) first_tensor_dim = _get_combination_dim(combination[0], tensor_dims) second_tensor_dim = _get_combination_dim(combination[2], tensor_dims) operation = combination[1] if first_tensor_dim != second_tensor_dim: raise ConfigurationError("Tensor dims must match for operation \"{}\"".format(operation)) return first_tensor_dim def logsumexp(tensor: torch.Tensor, dim: int = -1, keepdim: bool = False) -> torch.Tensor: """ A numerically stable computation of logsumexp. This is mathematically equivalent to `tensor.exp().sum(dim, keep=keepdim).log()`. This function is typically used for summing log probabilities. Parameters ---------- tensor : torch.FloatTensor, required. A tensor of arbitrary size. dim : int, optional (default = -1) The dimension of the tensor to apply the logsumexp to. keepdim: bool, optional (default = False) Whether to retain a dimension of size one at the dimension we reduce over. """ max_score, _ = tensor.max(dim, keepdim=keepdim) if keepdim: stable_vec = tensor - max_score else: stable_vec = tensor - max_score.unsqueeze(dim) return max_score + (stable_vec.exp().sum(dim, keepdim=keepdim)).log() def get_device_of(tensor: torch.Tensor) -> int: """ Returns the device of the tensor. """ if not tensor.is_cuda: return -1 else: return tensor.get_device() def flatten_and_batch_shift_indices(indices: torch.Tensor, sequence_length: int) -> torch.Tensor: """ This is a subroutine for :func:`~batched_index_select`. The given ``indices`` of size ``(batch_size, d_1, ..., d_n)`` indexes into dimension 2 of a target tensor, which has size ``(batch_size, sequence_length, embedding_size)``. This function returns a vector that correctly indexes into the flattened target. The sequence length of the target must be provided to compute the appropriate offsets. .. code-block:: python indices = torch.ones([2,3], dtype=torch.long) # Sequence length of the target tensor. sequence_length = 10 shifted_indices = flatten_and_batch_shift_indices(indices, sequence_length) # Indices into the second element in the batch are correctly shifted # to take into account that the target tensor will be flattened before # the indices are applied. assert shifted_indices == [1, 1, 1, 11, 11, 11] Parameters ---------- indices : ``torch.LongTensor``, required. sequence_length : ``int``, required. The length of the sequence the indices index into. This must be the second dimension of the tensor. Returns ------- offset_indices : ``torch.LongTensor`` """ # Shape: (batch_size) offsets = get_range_vector(indices.size(0), get_device_of(indices)) * sequence_length for _ in range(len(indices.size()) - 1): offsets = offsets.unsqueeze(1) # Shape: (batch_size, d_1, ..., d_n) offset_indices = indices + offsets # Shape: (batch_size * d_1 * ... * d_n) offset_indices = offset_indices.view(-1) return offset_indices def batched_index_select(target: torch.Tensor, indices: torch.LongTensor, flattened_indices: Optional[torch.LongTensor] = None) -> torch.Tensor: """ The given ``indices`` of size ``(batch_size, d_1, ..., d_n)`` indexes into the sequence dimension (dimension 2) of the target, which has size ``(batch_size, sequence_length, embedding_size)``. This function returns selected values in the target with respect to the provided indices, which have size ``(batch_size, d_1, ..., d_n, embedding_size)``. This can use the optionally precomputed :func:`~flattened_indices` with size ``(batch_size * d_1 * ... * d_n)`` if given. An example use case of this function is looking up the start and end indices of spans in a sequence tensor. This is used in the :class:`~allennlp.models.coreference_resolution.CoreferenceResolver`. Model to select contextual word representations corresponding to the start and end indices of mentions. The key reason this can't be done with basic torch functions is that we want to be able to use look-up tensors with an arbitrary number of dimensions (for example, in the coref model, we don't know a-priori how many spans we are looking up). Parameters ---------- target : ``torch.Tensor``, required. A 3 dimensional tensor of shape (batch_size, sequence_length, embedding_size). This is the tensor to be indexed. indices : ``torch.LongTensor`` A tensor of shape (batch_size, ...), where each element is an index into the ``sequence_length`` dimension of the ``target`` tensor. flattened_indices : Optional[torch.Tensor], optional (default = None) An optional tensor representing the result of calling :func:~`flatten_and_batch_shift_indices` on ``indices``. This is helpful in the case that the indices can be flattened once and cached for many batch lookups. Returns ------- selected_targets : ``torch.Tensor`` A tensor with shape [indices.size(), target.size(-1)] representing the embedded indices extracted from the batch flattened target tensor. """ if flattened_indices is None: # Shape: (batch_size * d_1 * ... * d_n) flattened_indices = flatten_and_batch_shift_indices(indices, target.size(1)) # Shape: (batch_size * sequence_length, embedding_size) flattened_target = target.view(-1, target.size(-1)) # Shape: (batch_size * d_1 * ... * d_n, embedding_size) flattened_selected = flattened_target.index_select(0, flattened_indices) selected_shape = list(indices.size()) + [target.size(-1)] # Shape: (batch_size, d_1, ..., d_n, embedding_size) selected_targets = flattened_selected.view(*selected_shape) return selected_targets def flattened_index_select(target: torch.Tensor, indices: torch.LongTensor) -> torch.Tensor: """ The given ``indices`` of size ``(set_size, subset_size)`` specifies subsets of the ``target`` that each of the set_size rows should select. The `target` has size ``(batch_size, sequence_length, embedding_size)``, and the resulting selected tensor has size ``(batch_size, set_size, subset_size, embedding_size)``. Parameters ---------- target : ``torch.Tensor``, required. A Tensor of shape (batch_size, sequence_length, embedding_size). indices : ``torch.LongTensor``, required. A LongTensor of shape (set_size, subset_size). All indices must be < sequence_length as this tensor is an index into the sequence_length dimension of the target. Returns ------- selected : ``torch.Tensor``, required. A Tensor of shape (batch_size, set_size, subset_size, embedding_size). """ if indices.dim() != 2:
# Shape: (batch_size, set_size * subset_size, embedding_size) flattened_selected = target.index_select(1, indices.view(-1)) # Shape: (batch_size, set_size, subset_size, embedding_size) selected = flattened_selected.view(target.size(0), indices.size(0), indices.size(1), -1) return selected def get_range_vector(size: int, device: int) -> torch.Tensor: """ Returns a range vector with the desired size, starting at 0. The CUDA implementation is meant to avoid copy data from CPU to GPU. """ if device > -1: return torch.cuda.LongTensor(size, device=device).fill_(1).cumsum(0) - 1 else: return torch.arange(0, size, dtype=torch.long) def bucket_values(distances: torch.Tensor, num_identity_buckets: int = 4, num_total_buckets: int = 10) -> torch.Tensor: """ Places the given values (designed for distances) into ``num_total_buckets``semi-logscale buckets, with ``num_identity_buckets`` of these capturing single values. The default settings will bucket values into the following buckets: [0, 1, 2, 3, 4, 5-7, 8-15, 16-31, 32-63, 64+]. Parameters ---------- distances : ``torch.Tensor``, required. A Tensor of any size, to be bucketed. num_identity_buckets: int, optional (default = 4). The number of identity buckets (those only holding a single value). num_total_buckets : int, (default = 10) The total number of buckets to bucket values into. Returns ------- A tensor of the same shape as the input, containing the indices of the buckets the values were placed in. """ # Chunk the values into semi-logscale buckets using .floor(). # This is a semi-logscale bucketing because we divide by log(2) after taking the log. # We do this to make the buckets more granular in the initial range, where we expect # most values to fall. We then add (num_identity_buckets - 1) because we want these indices # to start _after_ the fixed number of buckets which we specified would only hold single values. logspace_index = (distances.float().log() / math.log(2)).floor().long() + (num_identity_buckets - 1) # create a mask for values which will go into single number buckets (i.e not a range). use_identity_mask = (distances <= num_identity_buckets).long() use_buckets_mask = 1 + (-1 * use_identity_mask) # Use the original values if they are less than num_identity_buckets, otherwise # use the logspace indices. combined_index = use_identity_mask * distances + use_buckets_mask * logspace_index # Clamp to put anything > num_total_buckets into the final bucket. return combined_index.clamp(0, num_total_buckets - 1) def add_sentence_boundary_token_ids(tensor: torch.Tensor, mask: torch.Tensor, sentence_begin_token: Any, sentence_end_token: Any) -> Tuple[torch.Tensor, torch.Tensor]: """ Add begin/end of sentence tokens to the batch of sentences. Given a batch of sentences with size ``(batch_size, timesteps)`` or ``(batch_size, timesteps, dim)`` this returns a tensor of shape ``(batch_size, timesteps + 2)`` or ``(batch_size, timesteps + 2, dim)`` respectively. Returns both the new tensor and updated mask. Parameters ---------- tensor : ``torch.Tensor`` A tensor of shape ``(batch_size, timesteps)`` or ``(batch_size, timesteps, dim)`` mask : ``torch.Tensor`` A tensor of shape ``(batch_size, timesteps)`` sentence_begin_token: Any (anything that can be broadcast in torch for assignment) For 2D input, a scalar with the <S> id. For 3D input, a tensor with length dim. sentence_end_token: Any (anything that can be broadcast in torch for assignment) For 2D input, a scalar with the </S> id. For 3D input, a tensor with length dim. Returns ------- tensor_with_boundary_tokens : ``torch.Tensor`` The tensor with the appended and prepended boundary tokens. If the input was 2D, it has shape (batch_size, timesteps + 2) and if the input was 3D, it has shape (batch_size, timesteps + 2, dim). new_mask : ``torch.Tensor`` The new mask for the tensor, taking into account the appended tokens marking the beginning and end of the sentence. """ # TODO: matthewp, profile this transfer sequence_lengths = mask.sum(dim=1).detach().cpu().numpy() tensor_shape = list(tensor.data.shape) new_shape = list(tensor_shape) new_shape[1] = tensor_shape[1] + 2 tensor_with_boundary_tokens = tensor.new_zeros(*new_shape) if len(tensor_shape) == 2: tensor_with_boundary_tokens[:, 1:-1] = tensor tensor_with_boundary_tokens[:, 0] = sentence_begin_token for i, j in enumerate(sequence_lengths): tensor_with_boundary_tokens[i, j + 1] = sentence_end_token new_mask = (tensor_with_boundary_tokens != 0).long() elif len(tensor_shape) == 3: tensor_with_boundary_tokens[:, 1:-1, :] = tensor for i, j in enumerate(sequence_lengths): tensor_with_boundary_tokens[i, 0, :] = sentence_begin_token tensor_with_boundary_tokens[i, j + 1, :] = sentence_end_token new_mask = ((tensor_with_boundary_tokens > 0).long().sum(dim=-1) > 0).long() else: raise ValueError("add_sentence_boundary_token_ids only accepts 2D and 3D input") return tensor_with_boundary_tokens, new_mask def remove_sentence_boundaries(tensor: torch.Tensor, mask: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: """ Remove begin/end of sentence embeddings from the batch of sentences. Given a batch of sentences with size ``(batch_size, timesteps, dim)`` this returns a tensor of shape ``(batch_size, timesteps - 2, dim)`` after removing the beginning and end sentence markers. The sentences are assumed to be padded on the right, with the beginning of each sentence assumed to occur at index 0 (i.e., ``mask[:, 0]`` is assumed to be 1). Returns both the new tensor and updated mask. This function is the inverse of ``add_sentence_boundary_token_ids``. Parameters ---------- tensor : ``torch.Tensor`` A tensor of shape ``(batch_size, timesteps, dim)`` mask : ``torch.Tensor`` A tensor of shape ``(batch_size, timesteps)`` Returns ------- tensor_without_boundary_tokens : ``torch.Tensor`` The tensor after removing the boundary tokens of shape ``(batch_size, timesteps - 2, dim)`` new_mask : ``torch.Tensor`` The new mask for the tensor of shape ``(batch_size, timesteps - 2)``. """ # TODO: matthewp, profile this transfer sequence_lengths = mask.sum(dim=1).detach().cpu().numpy() tensor_shape = list(tensor.data.shape) new_shape = list(tensor_shape) new_shape[1] = tensor_shape[1] - 2 tensor_without_boundary_tokens = tensor.new_zeros(*new_shape) new_mask = tensor.new_zeros((new_shape[0], new_shape[1]), dtype=torch.long) for i, j in enumerate(sequence_lengths): if j > 2: tensor_without_boundary_tokens[i, :(j - 2), :] = tensor[i, 1:(j - 1), :] new_mask[i, :(j - 2)] = 1 return tensor_without_boundary_tokens, new_mask def add_positional_features(tensor: torch.Tensor, min_timescale: float = 1.0, max_timescale: float = 1.0e4): # pylint: disable=line-too-long """ Implements the frequency-based positional encoding described in `Attention is all you Need <https://www.semanticscholar.org/paper/Attention-Is-All-You-Need-Vaswani-Shazeer/0737da0767d77606169cbf4187b83e1ab62f6077>`_ . Adds sinusoids of different frequencies to a ``Tensor``. A sinusoid of a different frequency and phase is added to each dimension of the input ``Tensor``. This allows the attention heads to use absolute and relative positions. The number of timescales is equal to hidden_dim / 2 within the range (min_timescale, max_timescale). For each timescale, the two sinusoidal signals sin(timestep / timescale) and cos(timestep / timescale) are generated and concatenated along the hidden_dim dimension. Parameters ---------- tensor : ``torch.Tensor`` a Tensor with shape (batch_size, timesteps, hidden_dim). min_timescale : ``float``, optional (default = 1.0) The smallest timescale to use. max_timescale : ``float``, optional (default = 1.0e4) The largest timescale to use. Returns ------- The input tensor augmented with the sinusoidal frequencies. """ _, timesteps, hidden_dim = tensor.size() timestep_range = get_range_vector(timesteps, get_device_of(tensor)).data.float() # We're generating both cos and sin frequencies, # so half for each. num_timescales = hidden_dim // 2 timescale_range = get_range_vector(num_timescales, get_device_of(tensor)).data.float() log_timescale_increments = math.log(float(max_timescale) / float(min_timescale)) / float(num_timescales - 1) inverse_timescales = min_timescale * torch.exp(timescale_range * -log_timescale_increments) # Broadcasted multiplication - shape (timesteps, num_timescales) scaled_time = timestep_range.unsqueeze(1) * inverse_timescales.unsqueeze(0) # shape (timesteps, 2 * num_timescales) sinusoids = torch.cat([torch.sin(scaled_time), torch.cos(scaled_time)], 1) if hidden_dim % 2 != 0: # if the number of dimensions is odd, the cos and sin # timescales had size (hidden_dim - 1) / 2, so we need # to add a row of zeros to make up the difference. sinusoids = torch.cat([sinusoids, sinusoids.new_zeros(timesteps, 1)], 1) return tensor + sinusoids.unsqueeze(0)
raise ConfigurationError("Indices passed to flattened_index_select had shape {} but " "only 2 dimensional inputs are supported.".format(indices.size()))
lib.rs
// This file is part of Substrate. // Copyright (C) 2020 Parity Technologies (UK) Ltd. // SPDX-License-Identifier: Apache-2.0 // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Test utils for the transaction pool together with the test runtime. //! //! See [`TestApi`] for more information. use codec::Encode; use parking_lot::RwLock; use sp_runtime::{ generic::{self, BlockId}, traits::{BlakeTwo256, Hash as HashT, Block as _, Header as _}, transaction_validity::{ TransactionValidity, ValidTransaction, TransactionValidityError, InvalidTransaction, TransactionSource, }, }; use std::collections::{HashSet, HashMap, BTreeMap}; use substrate_test_runtime_client::{ runtime::{Index, AccountId, Block, BlockNumber, Extrinsic, Hash, Header, Transfer}, AccountKeyring::{self, *}, }; use sp_blockchain::CachedHeaderMetadata; use futures::future::ready; /// Error type used by [`TestApi`]. #[derive(Debug, derive_more::From, derive_more::Display)] pub struct Error(sp_transaction_pool::error::Error); impl sp_transaction_pool::error::IntoPoolError for Error { fn into_pool_error(self) -> Result<sp_transaction_pool::error::Error, Self> { Ok(self.0) } } impl std::error::Error for Error { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { Some(&self.0) } } pub enum IsBestBlock { Yes, No, } impl IsBestBlock { pub fn is_best(&self) -> bool { matches!(self, Self::Yes) } } impl From<bool> for IsBestBlock { fn from(is_best: bool) -> Self { if is_best { Self::Yes } else { Self::No } } } #[derive(Default)] pub struct ChainState { pub block_by_number: BTreeMap<BlockNumber, Vec<(Block, IsBestBlock)>>, pub block_by_hash: HashMap<Hash, Block>, pub nonces: HashMap<AccountId, u64>, pub invalid_hashes: HashSet<Hash>, } /// Test Api for transaction pool. pub struct TestApi { valid_modifier: RwLock<Box<dyn Fn(&mut ValidTransaction) + Send + Sync>>, chain: RwLock<ChainState>, validation_requests: RwLock<Vec<Extrinsic>>, } impl TestApi { /// Test Api with Alice nonce set initially. pub fn with_alice_nonce(nonce: u64) -> Self { let api = Self::empty(); api.chain.write().nonces.insert(Alice.into(), nonce); api } /// Default Test Api pub fn empty() -> Self { let api = TestApi { valid_modifier: RwLock::new(Box::new(|_| {})), chain: Default::default(), validation_requests: RwLock::new(Default::default()), }; // Push genesis block api.push_block(0, Vec::new(), true); api } /// Set hook on modify valid result of transaction. pub fn set_valid_modifier(&self, modifier: Box<dyn Fn(&mut ValidTransaction) + Send + Sync>) { *self.valid_modifier.write() = modifier; } /// Push block under given number. pub fn push_block( &self, block_number: BlockNumber, xts: Vec<Extrinsic>, is_best_block: bool, ) -> Header { let parent_hash = { let chain = self.chain.read(); block_number .checked_sub(1) .and_then(|num| { chain.block_by_number .get(&num) .map(|blocks| { blocks[0].0.header.hash() }) }).unwrap_or_default() }; self.push_block_with_parent(parent_hash, xts, is_best_block) } /// Push a block using the given `parent`. /// /// Panics if `parent` does not exists. pub fn push_block_with_parent( &self, parent: Hash, xts: Vec<Extrinsic>, is_best_block: bool, ) -> Header { // `Hash::default()` is the genesis parent hash let block_number = if parent == Hash::default() { 0 } else { *self.chain.read() .block_by_hash .get(&parent) .expect("`parent` exists") .header() .number() + 1 }; let header = Header { number: block_number, digest: Default::default(), extrinsics_root: Hash::random(), parent_hash: parent, state_root: Default::default(), }; self.add_block(Block::new(header.clone(), xts), is_best_block); header } /// Add a block to the internal state. pub fn add_block(&self, block: Block, is_best_block: bool) { let hash = block.header.hash(); let block_number = block.header.number().clone(); let mut chain = self.chain.write(); chain.block_by_hash.insert(hash, block.clone()); chain.block_by_number.entry(block_number).or_default().push((block, is_best_block.into())); } fn hash_and_length_inner(ex: &Extrinsic) -> (Hash, usize) { let encoded = ex.encode(); (BlakeTwo256::hash(&encoded), encoded.len()) } /// Mark some transaction is invalid. /// /// Next time transaction pool will try to validate this /// extrinsic, api will return invalid result. pub fn add_invalid(&self, xts: &Extrinsic) { self.chain.write().invalid_hashes.insert( Self::hash_and_length_inner(xts).0 ); } /// Query validation requests received. pub fn validation_requests(&self) -> Vec<Extrinsic> { self.validation_requests.read().clone() } /// get a reference to the chain state pub fn chain(&self) -> &RwLock<ChainState> { &self.chain } /// Increment nonce in the inner state. pub fn increment_nonce(&self, account: AccountId) { let mut chain = self.chain.write(); chain.nonces.entry(account).and_modify(|n| *n += 1).or_insert(1); } /// Calculate a tree route between the two given blocks. pub fn tree_route( &self, from: Hash, to: Hash, ) -> Result<sp_blockchain::TreeRoute<Block>, Error> { sp_blockchain::tree_route(self, from, to) } } impl sc_transaction_graph::ChainApi for TestApi { type Block = Block; type Error = Error; type ValidationFuture = futures::future::Ready<Result<TransactionValidity, Error>>; type BodyFuture = futures::future::Ready<Result<Option<Vec<Extrinsic>>, Error>>; fn validate_transaction( &self, at: &BlockId<Self::Block>, _source: TransactionSource, uxt: sc_transaction_graph::ExtrinsicFor<Self>, ) -> Self::ValidationFuture { self.validation_requests.write().push(uxt.clone()); match self.block_id_to_number(at) { Ok(Some(number)) => { let found_best = self.chain .read() .block_by_number .get(&number) .map(|blocks| blocks.iter().any(|b| b.1.is_best())) .unwrap_or(false); // If there is no best block, we don't know based on which block we should validate // the transaction. (This is not required for this test function, but in real // environment it would fail because of this). if !found_best { return ready(Ok( Err(TransactionValidityError::Invalid(InvalidTransaction::Custom(1)).into()) )) } }, Ok(None) => return ready(Ok( Err(TransactionValidityError::Invalid(InvalidTransaction::Custom(2)).into()) )), Err(e) => return ready(Err(e)), } let (requires, provides) = if let Some(transfer) = uxt.try_transfer() { let chain_nonce = self.chain.read().nonces.get(&transfer.from).cloned().unwrap_or(0); let requires = if chain_nonce == transfer.nonce { vec![] } else { vec![vec![chain_nonce as u8]] }; let provides = vec![vec![transfer.nonce as u8]]; (requires, provides) } else { (Vec::new(), vec![uxt.encode()]) }; if self.chain.read().invalid_hashes.contains(&self.hash_and_length(&uxt).0) { return ready(Ok( Err(TransactionValidityError::Invalid(InvalidTransaction::Custom(0)).into()) )) } let mut validity = ValidTransaction { priority: 1, requires, provides, longevity: 64, propagate: true, }; (self.valid_modifier.read())(&mut validity); ready(Ok(Ok(validity))) } fn block_id_to_number( &self, at: &BlockId<Self::Block>, ) -> Result<Option<sc_transaction_graph::NumberFor<Self>>, Error> { Ok(match at { generic::BlockId::Hash(x) => self.chain .read() .block_by_hash .get(x) .map(|b| *b.header.number()), generic::BlockId::Number(num) => Some(*num), }) } fn block_id_to_hash( &self, at: &BlockId<Self::Block>, ) -> Result<Option<sc_transaction_graph::BlockHash<Self>>, Error> { Ok(match at { generic::BlockId::Hash(x) => Some(x.clone()), generic::BlockId::Number(num) => self.chain .read() .block_by_number .get(num) .and_then(|blocks| blocks.iter().find(|b| b.1.is_best()).map(|b| b.0.header().hash())), }) } fn hash_and_length( &self, ex: &sc_transaction_graph::ExtrinsicFor<Self>, ) -> (Hash, usize) { Self::hash_and_length_inner(ex) } fn block_body(&self, id: &BlockId<Self::Block>) -> Self::BodyFuture { futures::future::ready(Ok(match id { BlockId::Number(num) => self.chain .read() .block_by_number .get(num) .map(|b| b[0].0.extrinsics().to_vec()), BlockId::Hash(hash) => self.chain .read() .block_by_hash .get(hash) .map(|b| b.extrinsics().to_vec()), })) } } impl sp_blockchain::HeaderMetadata<Block> for TestApi { type Error = Error; fn header_metadata( &self, hash: Hash, ) -> Result<CachedHeaderMetadata<Block>, Self::Error> { let chain = self.chain.read(); let block = chain.block_by_hash.get(&hash).expect("Hash exists"); Ok(block.header().into()) } fn insert_header_metadata( &self, _: Hash, _: CachedHeaderMetadata<Block>, ) { unimplemented!("Not implemented for tests") } fn remove_header_metadata(&self, _: Hash) { unimplemented!("Not implemented for tests") }
} /// Generate transfer extrinsic with a given nonce. /// /// Part of the test api. pub fn uxt(who: AccountKeyring, nonce: Index) -> Extrinsic { let transfer = Transfer { from: who.into(), to: AccountId::default(), nonce, amount: 1, }; let signature = transfer.using_encoded(|e| who.sign(e)).into(); Extrinsic::Transfer { transfer, signature, exhaust_resources_when_not_first: false } }
setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # Note: To use the 'upload' functionality of this file, you must: # $ pip install twine import io import os import sys from shutil import rmtree from setuptools import find_packages, setup, Command # Package meta-data. NAME = 'mypackage' DESCRIPTION = 'My short description for my project.' URL = 'https://github.com/me/myproject' EMAIL = '[email protected]' AUTHOR = 'Awesome Soul' REQUIRES_PYTHON = '>=3.6.0' VERSION = None # What packages are required for this module to be executed? REQUIRED = [ # 'requests', 'maya', 'records', ] # What packages are optional? EXTRAS = { # 'fancy feature': ['django'], } # The rest you shouldn't have to touch too much :) # ------------------------------------------------ # Except, perhaps the License and Trove Classifiers! # If you do change the License, remember to change the Trove Classifier for that! here = os.path.abspath(os.path.dirname(__file__)) # Import the README and use it as the long-description. # Note: this will only work if 'README.md' is present in your MANIFEST.in file! try: with io.open(os.path.join(here, 'README.md'), encoding='utf-8') as f: long_description = '\n' + f.read() except FileNotFoundError: long_description = DESCRIPTION # Load the package's __version__.py module as a dictionary. about = {} if not VERSION: project_slug = NAME.lower().replace("-", "_").replace(" ", "_") with open(os.path.join(here, project_slug, '__version__.py')) as f: exec(f.read(), about) else: about['__version__'] = VERSION class UploadCommand(Command): """Support setup.py upload.""" description = 'Build and publish the package.' user_options = [] @staticmethod def status(s): """Prints things in bold.""" print('\033[1m{0}\033[0m'.format(s)) def initialize_options(self): pass def
(self): pass def run(self): try: self.status('Removing previous builds…') rmtree(os.path.join(here, 'dist')) except OSError: pass self.status('Building Source and Wheel (universal) distribution…') os.system('{0} setup.py sdist bdist_wheel --universal'.format(sys.executable)) self.status('Uploading the package to PyPI via Twine…') os.system('twine upload dist/*') self.status('Pushing git tags…') os.system('git tag v{0}'.format(about['__version__'])) os.system('git push --tags') sys.exit() # Where the magic happens: setup( name=NAME, version=about['__version__'], description=DESCRIPTION, long_description=long_description, long_description_content_type='text/markdown', author=AUTHOR, author_email=EMAIL, python_requires=REQUIRES_PYTHON, url=URL, packages=find_packages(exclude=["tests", "*.tests", "*.tests.*", "tests.*"]), # If your package is a single module, use this instead of 'packages': # py_modules=['mypackage'], # entry_points={ # 'console_scripts': ['mycli=mymodule:cli'], # }, install_requires=REQUIRED, extras_require=EXTRAS, include_package_data=True, license='MIT', classifiers=[ # Trove classifiers # Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy' ], # $ setup.py publish support. cmdclass={ 'upload': UploadCommand, }, )
finalize_options
emitter.asyncGenerators.functionDeclarations.es5_es5.1.normal.js
import * as swcHelpers from "@swc/helpers"; import regeneratorRuntime from "regenerator-runtime"; function
() { return _f1.apply(this, arguments); } function _f1() { _f1 = // @target: es5 // @lib: esnext // @filename: F1.ts swcHelpers.wrapAsyncGenerator(regeneratorRuntime.mark(function _callee() { return regeneratorRuntime.wrap(function _callee$(_ctx) { while(1)switch(_ctx.prev = _ctx.next){ case 0: case "end": return _ctx.stop(); } }, _callee); })); return _f1.apply(this, arguments); } function f2() { return _f2.apply(this, arguments); } function _f2() { _f2 = // @filename: F2.ts swcHelpers.wrapAsyncGenerator(regeneratorRuntime.mark(function _callee() { var x; return regeneratorRuntime.wrap(function _callee$(_ctx) { while(1)switch(_ctx.prev = _ctx.next){ case 0: _ctx.next = 2; return; case 2: x = _ctx.sent; case 3: case "end": return _ctx.stop(); } }, _callee); })); return _f2.apply(this, arguments); } function f3() { return _f3.apply(this, arguments); } function _f3() { _f3 = // @filename: F3.ts swcHelpers.wrapAsyncGenerator(regeneratorRuntime.mark(function _callee() { var x; return regeneratorRuntime.wrap(function _callee$(_ctx) { while(1)switch(_ctx.prev = _ctx.next){ case 0: _ctx.next = 2; return 1; case 2: x = _ctx.sent; case 3: case "end": return _ctx.stop(); } }, _callee); })); return _f3.apply(this, arguments); } function f4() { return _f4.apply(this, arguments); } function _f4() { _f4 = // @filename: F4.ts swcHelpers.wrapAsyncGenerator(regeneratorRuntime.mark(function _callee() { var x; return regeneratorRuntime.wrap(function _callee$(_ctx) { while(1)switch(_ctx.prev = _ctx.next){ case 0: return _ctx.delegateYield(swcHelpers.asyncGeneratorDelegate(swcHelpers.asyncIterator([ 1 ]), swcHelpers.awaitAsyncGenerator), "t0", 1); case 1: x = _ctx.t0; case 2: case "end": return _ctx.stop(); } }, _callee); })); return _f4.apply(this, arguments); } function f5() { return _f5.apply(this, arguments); } function _f5() { _f5 = // @filename: F5.ts swcHelpers.wrapAsyncGenerator(regeneratorRuntime.mark(function _callee1() { var x; return regeneratorRuntime.wrap(function _callee$(_ctx1) { while(1)switch(_ctx1.prev = _ctx1.next){ case 0: return _ctx1.delegateYield(swcHelpers.asyncGeneratorDelegate(swcHelpers.asyncIterator(swcHelpers.wrapAsyncGenerator(regeneratorRuntime.mark(function _callee() { return regeneratorRuntime.wrap(function _callee$(_ctx) { while(1)switch(_ctx.prev = _ctx.next){ case 0: _ctx.next = 2; return 1; case 2: case "end": return _ctx.stop(); } }, _callee); }))()), swcHelpers.awaitAsyncGenerator), "t0", 1); case 1: x = _ctx1.t0; case 2: case "end": return _ctx1.stop(); } }, _callee1); })); return _f5.apply(this, arguments); } function f6() { return _f6.apply(this, arguments); } function _f6() { _f6 = // @filename: F6.ts swcHelpers.wrapAsyncGenerator(regeneratorRuntime.mark(function _callee() { var x; return regeneratorRuntime.wrap(function _callee$(_ctx) { while(1)switch(_ctx.prev = _ctx.next){ case 0: _ctx.next = 2; return swcHelpers.awaitAsyncGenerator(1); case 2: x = _ctx.sent; case 3: case "end": return _ctx.stop(); } }, _callee); })); return _f6.apply(this, arguments); } function f7() { return _f7.apply(this, arguments); } function _f7() { _f7 = // @filename: F7.ts swcHelpers.wrapAsyncGenerator(regeneratorRuntime.mark(function _callee() { return regeneratorRuntime.wrap(function _callee$(_ctx) { while(1)switch(_ctx.prev = _ctx.next){ case 0: return _ctx.abrupt("return", 1); case 1: case "end": return _ctx.stop(); } }, _callee); })); return _f7.apply(this, arguments); }
f1
__init__.py
from .pun import pun_threshold
from .kapur import kapur_threshold, kapur_multithreshold from .johannsen import johannsen_threshold
backend.js
$(window).load(function() { // Animate loader off screen //$(".se-pre-con").fadeOut("slow"); }); // $(document).keydown(function(e){ // if(e.keyCode==123){ // return false; // } // }); // $(document).on("contextmenu",function(e){ // e.preventDefault(); // }); $(document).ready(function(){ $('.confirm').click(function(){ return confirm('Are you sure ?'); }); // $(document).on("click","input",function(){ // $(this).select(); // }); // $(".addNews form .imag_news ul li input").on('change',function(){ // var input=$(this); // var input_val=input.val();//get the value of input // var extension=input_val.substr(input_val.lastIndexOf(".")+1);//to get tha extension // var Allow_extension=['gif','GIF','jpg',"JPG","PNG","png","jpeg"]; //allow extension // if (jQuery.inArray(extension,Allow_extension)>-1) // { // if (input.context.files && input.context.files[0]) // { // var reader= new FileReader(); // reader.onloadend=function(data){ // input.parent().children(".img").html("<img class='ada' src='"+data.target.result+"'>"); // input.addClass('add-one'); // input.parent().css({"border":"0"}); // input.parent().children("span").css({"display":"block"}) // } // reader.readAsDataURL(input.context.files[0]); // } // input.css({"pointer-events":"none"}); // input.parent().css({"cursor":"not-allowed"}) // } // else{ // // $(".add_remove_photo div span").text("Invalid file extension allowed: jpg,jpeg, gif, png Ok").fadeIn(1000).delay(4000).fadeOut(1000); // // input.val(''); // // input.css({"pointer-events":"auto"}); // alert("Invalid file extension allowed: jpg,jpeg, gif, png Ok"); // } // }); // $(".addNews form ul li span").click(function(){ // $(this).parent().children("input").val('').css({"border":"1px solid #ccc"}); // $(this).parent().children(".img").html(""); // $(this).parent().children("span").css({"display":"none"}); // $(this).parent().children("input").css({"pointer-events":"auto"}); // }) // $(".error").delay(2000).fadeOut(1000) // validate forems news_c.php var error_ar=[]; $(".addNews form .title input").on("blur",function(){ var val=$(this).val(); var length=val.length; var section=$(".addNews form .news_c_click"); var place_err=$(".addNews form .titl_err"); var the_err="<div class='error alert alert-danger col-xs-12 col-sm-12 col-md-12 col-lg-12'>العنوان يجب ان يكون اكثر من 5 احرف</div>"; check_form(length,5,section,place_err,the_err,"title"); }); $(".addNews form .desc textarea").on("blur",function(){ var val=$(this).val(); var length=val.length; var section=$(".addNews form .news_c_click"); var place_err=$(".addNews form .desc_err"); var the_err="<div class='error alert alert-danger col-xs-12 col-sm-12 col-md-12 col-lg-12'>الوصف يجب ان يكون اكثر من 30 حرف</div>"; check_form(length,30,section,place_err,the_err,"desc"); }); function check_form(length,max_length,section,place_err,the_err,this_f){ if(length<max_length){ place_err.html(the_err); error_ar[this_f]='no'; preven_def(section,false); }else{ place_err.children(".error").fadeOut(1000); error_ar[this_f]='yes'; preven_def(section,true); } } function preven_def(section,retuen_q){ if (retuen_q&&(error_ar['title']=='yes' || error_ar['desc']=='yes')) { $(section).unbind('click'); }else{ $(section).on("click",function(){ return false }) } } function ajax_rq(data,url,type,place){ $.ajax({ url : url, type : type, data : data, success:function(get_data){ $(place).html(get_data); }, cashe:false, }); } //start define_type var i=1; var apend_h_1=' <div data-toggle="buttons" class="item_nnn col-xs-12"><div data-toggle="buttons"></div> <label class="btn btn-default col-xs-12 col-md-3 pull-right select_s_c">'; var append_h_3='</label><div class="col-xs-12 col-md-3 type_and_num"><label>العدد المفصل:</label><input type="text" class="" name="details_num[]" placeholder="العدد المفصل" ></div>'; var apend_h_4='<div class="col-xs-12 col-md-4 type_and_num"><label>نوع الوحدة :</label><select class=" btn" name="type_[]"><option class="" value="0">----------</option><option class="" value="كرتونة">كرتونة</option><option class="" value="شوال">شوال</option><option class="" value="كيلو">كيلو</option></select></div><div><span class="add_more_def_type col-xs-12 col-md-1 btn btn-success glyphicon glyphicon-plus"></span><span class="delete_more_def_type col-xs-12 col-md-1 btn btn-danger glyphicon glyphicon-minus "></span></div></div> '; var append_5=''; $('.subcat_m .add_in_cat .add_n_f').click(function(){ i++; $('.subcat_m .add_in_cat').append('<div id="row_add_z'+i+'" class="input_name_pr_add col-xs-12"><input type="text" name="name[]" placeholder="اسم الصنف" class="col-xs-12 col-md-10 pull-right name_list" /><button type="button" name="remove" id="'+i+'" class="col-xs-12 col-md-1 btn btn-danger btn_remove glyphicon glyphicon-remove"></button></div><br>'); }); $(document).on('click', '.btn_remove', function(){ var button_id = $(this).attr("id"); $('#row_add_z'+button_id+'').remove(); }); $(".subcat_m .done_a").on("click",function(){ var main_cat_value=$(".subcat_m .main_cat select").val(); var sup_cat_value=$(".subcat_m .sub_cat select").val(); var input_0_value=$(".subcat_m .add_in_cat input").val(); if (main_cat_value == -1||sup_cat_value == -1||input_0_value.length<2) { $(".subcat_m ._alert").html("يجب اكمال البيانات");//set error message to it $(".subcat_m ._alert").fadeIn(1000).delay(1000).fadeOut(1000); }else{ $(".subcat_m").fadeOut(100) x=$('.add_in_cat').find("input") for (var i =0; i<=x.length-1; i++) { if (x[i].value !=''){ var apend_h_2='<input type="checkbox" name="name_s_c[]" autocomplete="off" value="'+x[i].value+'"><span class="glyphicon glyphicon-ok glyphicon-lg"></span><p>'+x[i].value+'</p>'; $(apend_h_1+apend_h_2+append_h_3+apend_h_4+append_5).appendTo(".pop_up_chick_t .items"); } } var h_v=$('.supp_edit_form .select_main_cat').val(); var in_hidden="<div class='dis'><span><input type='hidden' name='main_cat' value='"+h_v+"'></span><span><input type='hidden' name='sub_cat' value='"+sup_cat_value+"'></span><div>"; $(".pop_up_chick_t .items").append(in_hidden); $('.form_check_add_subcat .pop_up_chick_t').delay(500).fadeIn(1000); } }); //////////////////////// //////////////////////////define_type $(document).on("click",".items .add_more_def_type",function(){ var my_dev=$(this).parents(".item_nnn").clone().css({"background-color":"rgb(234, 234, 234)","display":"none"}); my_dev.children(".type_and_num").fadeIn(1000) $(this).parents(".item_nnn").after(my_dev); my_dev.fadeIn(1000) }); $(document).on("click",".items .delete_more_def_type ",function(){ $(this).parents(".item_nnn").fadeOut(1000,function(){$(this).remove()}); }); //checkAll اضافة الاصناف $(".form_check_add_subcat .items .checkAll").click(function () { var element=$(".form_check_add_subcat .items .item_nnn .select_s_c "); if(this.checked){ $('.form_check_add_subcat .items input:checkbox').prop('checked',this.checked); element.addClass("active") }else{ element.removeClass("active"); } }); $(document).on("mousedown",".form_check_add_subcat .items .item_nnn .select_s_c",function(){ var element=$(this).children("input:checkbox"); if(element.attr("active")){ element.removeAttr("active"); }else{ element.attr("active"," "); } }); //getall sub cat by main cat //to get the value of select box and send it to page to get the subcat $(".subcat_m .main_cat select").on("change",function(){ var value_selecy=$(this).val(); if (value_selecy != -1) { var data={main_cat:value_selecy} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { if (data==0) { $(".subcat_m ._alert").html("حدث خطأ يرجى المحاولة مرة أخرى :(");//set error message to it $(".subcat_m ._alert").fadeIn(1000).delay(2000).fadeOut(1000); $(".subcat_m .sub_cat").html("");//to remove the }else{ $(".subcat_m .sub_cat").fadeOut(1000);//to fade out it befor get indo $(".subcat_m .sub_cat").html(data); $(".subcat_m .sub_cat").fadeIn(1000);//to fade out it befor get indo } } }); }else{ $(".subcat_m ._alert").html("يجب تحديد الفئة الرئيسية للصنف");//set error message to it $(".subcat_m ._alert").fadeIn(1000).delay(2000).fadeOut(1000); $(".subcat_m .sub_cat").html("");//to remove the } }); /* **start co_bought **/ $(".add_new_bill .select_type .selectpicker").on("change",function(){ var this_val=$(this).val(); if (this_val!="تاجر جملة" && this_val != "شركة") { $(".add_new_bill .d_err").text("يجب اختيار نوع المورد"); $(".add_new_bill .d_err").fadeIn(1000).delay(2000).fadeOut(1000); $('.add_new_bill .select_type_name_c').html(""); $('.add_new_bill .select_type_name_c').fadeOut(10); }else{ var data={define_type:this_val} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { $('.add_new_bill .select_type_name_c').fadeOut(1000) $('.add_new_bill .select_type_name_c').html(data) $('.add_new_bill .select_type_name_c').fadeIn(1000) } }); } }); //to get sub cat from main cat $(".add_new_bill .select_main_c .selectpicker").on("change",function(){ var this_val=$(this).val(); if (this_val<="0") { $(".add_new_bill .d_err").text("يجب اختيار الفئات الرئيسية للصنف"); $(".add_new_bill .d_err").fadeIn(1000).delay(2000).fadeOut(1000); }else{ var data={main_cat:this_val} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { $('.add_new_bill .select_type_subcat').fadeOut(1000) $('.add_new_bill .select_type_subcat').html(data) $('.add_new_bill .select_type_subcat').fadeIn(1000) } }); } }); var mydev_form_add_bill=$(".add_new_bill .add_bill_body")//copy the bill_body to add it after add bill //to get define_type from sub cat (add)->co_bought $(document).on("change",".add_new_bill .add_bill_body .select_type_subcat .selectpicker",function(){ var this_val=$(this).val(); if (this_val<="0") { $(".add_new_bill .d_err").text("يجب اختيار الفئات الفرعية للصنف"); $(".add_new_bill .d_err").fadeIn(1000).delay(2000).fadeOut(1000); }else{ var data={d_f:this_val} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { $(".supp_edit_form .type_sup_cat").fadeOut(1000) $('.add_new_bill .popup_out .popup .da').html(data) $(".supp_edit_form .type_sup_cat").fadeIn(1000) } }); } }); //get define_type by subcat $(document).on("change",".add_new_bill .select_main_c select",function(){ var this_val=$(this).val();//get th value of this select if (this_val<="0") { $(".add_new_bill .d_err").text("يجب اختيار الفئات الرئيسية للصنف"); $(".add_new_bill .d_err").fadeIn(1000).delay(2000).fadeOut(1000); } }); //onchange سم المورد fadeout main cat $(document).on("change",".add_new_bill .select_type_name_c select",function(){ var this_val=$(this).val();//get th value of this select if (this_val==1) { $(".add_new_bill .d_err").text("يجب اختيار الفئات الرئيسية للصنف"); $(".add_new_bill .d_err").fadeIn(1000).delay(2000).fadeOut(1000); }else{// fadein sub cat $(".add_new_bill .select_main_c").fadeIn(1000); } }); /////////////popup $(document).on("click",'.add_new_bill .type_sup_cat .p_type_sup_cat',function(){ $('.add_new_bill .popup_out').fadeToggle(1000) }); $(document).on("click",'.add_new_bill .popup_out .popup .da table tbody tr',function(){ var value=$(this).find("input").val(); var text=$(this).find('.name').text(); $('.add_new_bill .type_sup_cat .p_type_sup_cat').html(text) $('.add_new_bill .type_sup_cat .type_sup_cat_in').html("<input type='hidden' value='"+value+"' name='type_s_cat'>"); $(".supp_edit_form .num_per_unit").fadeIn(1000) $('.add_new_bill .popup_out').fadeOut(100) }); $(document).on("click",'.add_new_bill .type_sup_cat_c .p_type_sup_cat',function(){ $('.add_new_bill .popup_out').fadeToggle(1000); }); $(document).on("click",'.add_new_bill .popup_out .popup .da table tbody tr , .add_new_bill .popup_out .popup ',function(){ var value=$(this).find("input").val(); var text=$(this).find('.name').text(); $('.add_new_bill .type_sup_cat_c .p_type_sup_cat').html(text) $('.add_new_bill .type_sup_cat_c .type_sup_cat_in').html("<input type='hidden' value='"+value+"' name='type_s_cat'>"); $(".supp_edit_form .num_per_unit").fadeIn(1000) $('.add_new_bill .popup_out').fadeOut(100) }); ///////////end popup //////////////// get the cost $(".supp_edit_form .num_per_unit input").on("keyup",function(){ var value=$(this).val(); var cost_per_nu=$(".supp_edit_form .cost_per_unit input").val(); if (isNaN(value)) { $(".pop_up_alert ").fadeIn(100); $(".pop_up_alert .upper .content h3").html("العدد"); $(".pop_up_alert .upper .content p").html("العدد بالوحدة يجب ان يكون رقم"); $(".pop_up_alert .upper").fadeIn(100); $(".cost_per_unit").fadeOut(1000) }else{ if(cost_per_nu != "" && value !=''){ $(".supp_edit_form .total_cost input").val(value*cost_per_nu) } $(".cost_per_unit").fadeIn(1000) } }); $(".supp_edit_form .cost_per_unit input").on("keyup",function(){ var value=$(this).val(); var value_num=$(".supp_edit_form .num_per_unit input").val(); if (!isNaN(value)) { if(value_num == ""){ $(".pop_up_alert ").fadeIn(100); $(".pop_up_alert .upper .content h3").html("العدد"); $(".pop_up_alert .upper .content p").html("يجب كتابة العدد بالوحدة"); $(".pop_up_alert .upper").fadeIn(100); $(this).val(''); $(".cost_per_unit").fadeOut(1000) $(".total_cost").fadeOut(1000); $(".add_new_bill .r_cost").fadeOut(1000); $(".add_new_bill .r_cost").fadeOut(1000); }else{ $(".total_cost").fadeIn(1000); $(".add_new_bill .nots").fadeIn(1000); $(".total_cost input").val(value*value_num); } }else{ $(".pop_up_alert ").fadeIn(100); $(".pop_up_alert .upper .content h3").html("السعر"); $(".pop_up_alert .upper .content p").html("السعر يجب ان يكون رقم"); $(".pop_up_alert .upper").fadeIn(100); } }); $(".supp_edit_form .total_cost input").on("keyup",function(){ var value=$(".supp_edit_form .cost_per_unit input").val(); var value_num=$(".supp_edit_form .num_per_unit input").val(); var this_val=$(this).val() if (!isNaN(value)) { $(".add_new_bill .r_cost input").val(this_val-value*value_num); }else{ $(".pop_up_alert ").fadeIn(100); $(".pop_up_alert .upper .content h3").html("السعر"); $(".pop_up_alert .upper .content p").html("السعر يجب ان يكون رقم"); $(".pop_up_alert .upper").fadeIn(100); } }); // $(".supp_edit_form .tsdedat input").on("keyup ",function(){ // var value=$(".supp_edit_form .total_cost input").val(); // var this_val=$(this).val() // if (!isNaN(value)) { // $(".add_new_bill .r_cost input").val(this_val-value); // }else{ // alert("السعر يجب ان يكون رقم"); // } // }); // $(".supp_edit_form .desc_cost input").bind("blur",function(){ // var value_total=$(".supp_edit_form .total_cost input").val(); // var value=$(".supp_edit_form .cost_per_unit input").val(); // var value_num=$(".supp_edit_form .num_per_unit input").val(); // var this_val=$(this).val(); // if(this_val > 0 && !isNaN(value)){ // var desc_=(this_val*value*value_num)/100 // var final_desc=value*value_num-desc_; // $(".add_new_bill .total_cost input").val(final_desc); // var new_v=$(".supp_edit_form .total_cost input").val(); // var tasdedat=$(".supp_edit_form .tsdedat input").val(); // $(".add_new_bill .r_cost input").val(tasdedat-new_v); // }else{ // var tasdedat=$(".supp_edit_form .tsdedat input").val(); // $(".add_new_bill .r_cost input").val(tasdedat-value_total); // $(".total_cost input").val(value*value_num); // } // }); ///////////endof get the cost //check the filed of bill co_bought and make tabel to send $(document).on('mousedown',".add_new_bill .add_bill_body .done_c",function(){ var inputs=$(".add_new_bill .add_bill_body :input").find("input"); var input_val_totale=$('.calc_total_c .total_cost_d input').val(); var input_1= inputs.prevObject[0].value; var input_2= inputs.prevObject[1].value; var input_3= inputs.prevObject[2].value; var input_4= inputs.prevObject[3].value; var input_5= inputs.prevObject[4].value; var input_6= inputs.prevObject[5].value; var input_7= inputs.prevObject[6].value; var input_8= inputs.prevObject[7].value; var input_9= inputs.prevObject[8].value; if (input_1<=0||input_2<=0||input_3<=0||input_4<=0||input_5<=0||input_6<=0||input_7<=0||input_8<=0) { $(".add_new_bill .d_err").text("يجب اكمال البيانات"); $(".add_new_bill .d_err").fadeIn(1000).delay(2000).fadeOut(1000); }else if(isNaN(input_3) || isNaN(input_4)||isNaN(input_5)||isNaN(input_6) ||isNaN(input_7)||isNaN(input_8)){ $(".add_new_bill .d_err2").text("من فضلك اكتب ارقام فى الخانات"); $(".add_new_bill .d_err2").fadeIn(1000).delay(2000).fadeOut(1000); }else{ $('form_s_add_new_bill').fadeIn(1000); var type_supplier="<input type='hidden' name='type_supplier[]' value='"+inputs.prevObject[0].value+"'>";//نوع المورد var name_supplier="<input type='hidden' name='name_supplier[]' value='"+inputs.prevObject[1].selectedOptions[0].outerText+"'>";//اسم المورد var man_cat="<input type='hidden' name='main_cat[]' value='"+inputs.prevObject[2].value+"'>";//الفئات الرئيسية للصنف var sub_cat="<input type='hidden' name='sub_cat[]' value='"+inputs.prevObject[3].value+"'>";//لفئات الفرعية var define_type="<input type='hidden' name='define_type[]' value='"+inputs.prevObject[4].value+"'>";//نوع الصنف var num_per_unit="<input type='hidden' name='num_per_unit[]' value='"+inputs.prevObject[5].value+"'>";//العدد بالوحدة var cost_per_unit="<input type='hidden' name='cost_per_unit[]' value='"+inputs.prevObject[6].value+"'>";//السعر بالوحدة var total_cost="<input type='hidden' name='t_c[]' value='"+inputs.prevObject[7].value+"'>";//اجمالى السعر var supplier_i="<input type='hidden' name='supplier_i[]' value='"+inputs.prevObject[1].value+"'>";//اسم المورد var notes="<input type='hidden' name='notes[]' value='"+inputs.prevObject[8].value+"'>";//الملاحظات var define_type_text=$('.add_new_bill .type_sup_cat .p_type_sup_cat').text(); var the_place=$('.form_s_add_new_bill table tbody'); var info_bi="<tr><td>"+inputs.prevObject[0].value+"<span>"+type_supplier+"</span></td><td>"+inputs.prevObject[1].selectedOptions[0].outerText+"<span>"+name_supplier+"</span></td><td>"+define_type_text+"<span>"+define_type+"</span></td><td>"+inputs.prevObject[5].value+"<span>"+num_per_unit+"</span></td><td>"+inputs.prevObject[6].value+"<span>"+cost_per_unit+"</span></td><td class='main_num_delet_w'><span class='num_delet_w'>"+inputs.prevObject[7].value+"</span><span>"+total_cost+"</span> </td><td>"+inputs.prevObject[8].value+"<span>"+notes+"</span><span>"+supplier_i+"</span></td> <td><span class=' r_bi_a btn btn-danger btn_remove glyphicon glyphicon-remove'></span> </td> </tr>"; the_place.append(info_bi) $('.form_s_add_new_bill').fadeIn(1000) $('.calc_total_c .total_cost_d input').val(parseInt(input_val_totale)+parseInt(inputs.prevObject[7].value)) //tsfer $(".add_new_bill .add_bill_body .select_type .selectpicker ").attr("disabled"," "); $(".add_new_bill .add_bill_body .select_type_name_c .selectpicker ").attr("disabled"," "); inputs.prevObject[4].value=0; inputs.prevObject[5].value=0; inputs.prevObject[6].value=0; inputs.prevObject[7].value=0; $('.add_new_bill .type_sup_cat .p_type_sup_cat').text(""); } }); $(document).on("click",'.form_s_add_new_bill table tbody .r_bi_a',function(){ var input_val_totale=$('.calc_total_c .total_cost_d input').val(); var num_=$(this).parents("tr").children(".main_num_delet_w"); var new_num=num_.prevObject[0].cells[5].outerText; $('.calc_total_c .total_cost_d input').val(parseInt(input_val_totale)-parseInt(new_num)) $(this).parents("tr").fadeOut(1000,function(){$(this).remove();}) }) $(document).on('keyup','.calc_total_c .tsdedat input',function(){ var input_val_totale=$('.calc_total_c .total_cost_d input').val(); $('.calc_total_c .elrased_ input').val($(this).val()-input_val_totale); }); $(document).on("click",'.form_s_add_new_bill .done_c',function(){ var input1_val=$('.calc_total_c .elrased_ input').val(); var input1_va2=$('.calc_total_c .tsdedat input').val(); if( isNaN(input1_va2)|| isNaN(input1_va2) ){ $('.calc_total_c .elrased_err').text("يجب كتابة ارقام فقط فى التسديدات"); $(".calc_total_c .elrased_err").fadeIn(1000).delay(2000).fadeOut(1000); $(this).val(0); return false; } else{ $('.calc_total_c .total_cost_d input').removeAttr("disabled"); $('.calc_total_c .elrased_ input').removeAttr("disabled"); } }) //end check //distribution start $(".dis_bution tbody input:hidden,.dis_bution tbody input:text").attr("disabled"," ") $(".dis_bution .checkall").on("click",function () { if(this.checked){ $('.dis_bution input:checkbox').prop('checked',this.checked); $(".dis_bution tbody input:hidden, .dis_bution tbody input:text").removeAttr("disabled"); }else{ $('.dis_bution input:checkbox').prop('checked',this.checked); $(".dis_bution tbody input:hidden, .dis_bution tbody input:text").attr("disabled"," ") } }); $('.dis_bution tbody .cheak_child_box').on('click',function(){ if(this.checked){ $(this).parents("tr").find("input:hidden, input:text").removeAttr("disabled"); }else{ $(this).parents("tr").find("input:hidden,input:text").attr("disabled"," "); } }); $('.dis_bution tbody .shop_input').on('blur',function(){ var val_input_pr=$(this).parents("tr").find(".number_per_unit input").val(); var val_input_2=$(this).parents("tr").find(".shop_input") var shop_1=val_input_2[0].value; var shop_2=val_input_2[1].value; var shop_3=val_input_2[2].value; var shop_4=val_input_2[3].value; if(parseInt(shop_1)>parseInt(val_input_pr) ||parseInt(shop_2)>parseInt(val_input_pr) ||parseInt(shop_3)>parseInt(val_input_pr) || parseInt(shop_4)>parseInt(val_input_pr) ){ $(".pop_up_alert ").fadeIn(100); $(".pop_up_alert .upper .content h3").html("العدد"); $(".pop_up_alert .upper .content p").html("عدد التوزيع اكثر من المتاح"); $(".pop_up_alert .upper").fadeIn(100); $(this).val(0) } if((parseInt(shop_1)+parseInt(shop_2)+parseInt(shop_3)+parseInt(shop_4))>val_input_pr ){ $(".pop_up_alert ").fadeIn(100); $(".pop_up_alert .upper .content h3").html("العدد"); $(".pop_up_alert .upper .content p").html("عدد التوزيع اكثر من المتاح"); $(".pop_up_alert .upper").fadeIn(100); $(this).val(0) } }); $(document).on("click",".body_cat .b_c li",function(){ var this_val=$(this).children("input").val(); var data={main_cat:this_val,do:'ul'} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { if(data.length<10){ $('.body_cat .b_subcat').html("<div class='_alert alert alert-danger'>لايوجد فئات فرعية</div>"); $('.body_cat .b_define_type').html("") $(".body_cat .b_subcat").fadeIn(1000) }else{ $('.body_cat .b_subcat').html(data) $(".body_cat .b_subcat").fadeIn(1000) $('.body_cat .b_define_type').html("") } } }); }); $(document).on("click",".body_cat .b_subcat li",function(){ var this_val=$(this).children("input").val(); var data={d_f:this_val,do:'ul'} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { if(data.length<10){ $('.body_cat .b_define_type').html("<div class='_alert alert alert-danger'>لايوجد اصناف</div>") $(".body_cat .b_define_type").fadeIn(1000) }else{ $('.body_cat .b_define_type').html(data) $(".body_cat .b_define_type").fadeIn(1000) } } }); }); $(document).on("click",".dis_cont .search_h ul .cat",function(){ $('.dis_cont .body_cat').fadeIn(1000); $('.dis_cont .body_bill').fadeOut(1000); }); $(document).on("click",".dis_cont .search_h ul .bill",function(){ $('.dis_cont .body_bill').fadeIn(1000); $('.dis_cont .body_cat').fadeOut(10); }); //dashboard.php $(document).on('click',".right_side .row_main_cat ul li",function(){ var this_val=$(this).children("input").val(); var data={main_cat:this_val,do:'ul'} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { if(data.length<46){ $('.pr_bill .row_sub_cat ').html("<div class='_alert alert alert-danger'>لايوجد فئات فرعية</div>"); $(".pr_bill .row_sub_cat ").fadeIn(1000).delay(100).fadeOut(1000); $('.pr_bill .right_side .define_type_body ').html(" "); $('.pr_bill .right_side .product_body').html(" "); }else{ $('.pr_bill .row_sub_cat ').html(data) $(".pr_bill .row_sub_cat").fadeIn(1000); $('.pr_bill .right_side .define_type_body').html(" "); $('.pr_bill .right_side .product_body').html(" "); } } }); }); $(document).on('click',".row_sub_cat .body_sub_cat ul li",function(){ var this_val=$(this).children("input").val(); var data={d_f:this_val,do:'all_ul'} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { if(data.length<10){ $('.pr_bill .right_side .define_type_body ').html("<div class='_alert alert alert-danger'>لايوجد فئات فرعية</div>"); $(".pr_bill .right_side .define_type_body ").fadeIn(1000).delay(2000).fadeOut(1000); $('.pr_bill .right_side .product_body').html(" "); }else if(data.length>10&&data.length<90){ $('.pr_bill .right_side .define_type_body ').html(data); $('.pr_bill .right_side .product_body').html(" "); }else{ $('.pr_bill .right_side .define_type_body ').html(data) $('.pr_bill .right_side .product_body').html(" "); } } }); }); $(document).on('click',".pr_bill .right_side .define_type_body ul li",function(){ var this_val_define_num=$(this).children("input").val(); var get="product"; var data={define_type:this_val_define_num,get:get} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { if(data.length<10){ $('.pr_bill .right_side .product_body').html("<div class='_alert alert alert-danger'>لايوجد فئات فرعية</div>"); $(".pr_bill .right_side .product_body").fadeIn(1000).delay(2000).fadeOut(1000) }else if(data.length>10 && data.length<100){ $('.pr_bill .right_side .product_body').html(data); $(".pr_bill .right_side .product_body").fadeIn(1000).delay(2000).fadeOut(1000) }else{ $('.pr_bill .right_side .product_body').html(data) $(".pr_bill .right_side .product_body").fadeIn(1000) } } }); }); // $(document).on("click",".left_side .items_s table tbody tr td .delete_row",function(){ var print_row_class=$(this).parents('tr').attr('id'); $(this).parents("tr").fadeOut(1000,function(){$(this).remove();}); $(".print_aaa table tbody #"+print_row_class).fadeOut(1000,function(){$(this).remove();}); var all_input=$(this).parents("tr").find("input"); var totale=all_input[4].value; var input_val= $('.left_side .items_s .all_bill_c .totale_cost_bill input').val(); $('.left_side .items_s .all_bill_c .totale_cost_bill input').val(input_val-totale); $(".left_side .items_s .all_bill_c .paied_bill input").val(input_val-totale); ////////////////////////////////////////////////////////////////////////////////// $('.print_aaa .all_bill_c .totale_cost_bill span').text(input_val-totale); $('.print_aaa .all_bill_c .paied_bill span').text(input_val-totale); }) $(document).on("click",".pr_bill .right_side .product_body .t_shop",function(){ var name_snf=$(this).find(".name_snf").text(); var all_input=$(this).find(".inp input"); var this_id=$(this).attr('id'); // console.log($(this).html(), this_id); // for (var i = 0; i<get_sum_of_row_details_num.length; i++) { // total_details_num+=parseFloat(get_sum_of_row_details_num[i].value); // } // if (parseFloat(total_cost)>=all_input[1].value || parseFloat(total_details_num) >= all_input[2].value) { // alert("لا يمكن الاضافة من هذا الصنف"); // return false; // } var get_sum_of_row_details_num=$(".left_side .items_s table tbody #"+this_id+" ") if (get_sum_of_row_details_num.length>0) { var val_element=$(".left_side .items_s table tbody #"+this_id+" .num_per_unit input").val(); if (parseFloat(val_element)>=parseFloat(all_input[1].value)) { $(".pop_up_alert ").fadeIn(100); $(".pop_up_alert .upper .content h3").html("االكمية"); $(".pop_up_alert .upper .content p").html("لا يوجد كمية كافية"); $(".pop_up_alert .upper").fadeIn(100); }else{ var cost_per_unit_=$(".left_side .items_s table tbody #"+this_id+" .cost_per_unit input").val(); $(".left_side .items_s table tbody #"+this_id+" input[name='t_cost_per_one[]']").val(parseFloat(cost_per_unit_)*parseFloat(val_element)); $(".left_side .items_s table tbody #"+this_id+" .num_per_unit input").val(parseFloat(val_element)+1); $(".left_side .items_s table tbody #"+this_id+" .cost_per_unit").fadeIn(500); $(".left_side .items_s table thead .cost_per_unit").fadeIn(500); $(".left_side .items_s table tbody #"+this_id+" .cost_per_unit input").removeAttr("disabled") } }else{ var append_='<tr class="row1 " id="'+this_id+'">'; append_+='<td class="name">'+name_snf+'</td>'; append_+='<td class="num_per_unit"><input type="text" name="num_per_unit[]" value="0"></td>'; append_+='<td class="cost_per_unit"><input type="text" name="cost_per_unit[]" value="0" disabled=""></td>'; // append_+='<td class="details_num"><input type="text" name="details_num[]" value="0"></td>'; // append_+='<td class="cost_details_num "><input type="text" name="cost_details_num[]" value="0" disabled=""></td>'; append_+='<td><input type="text" name="t_cost_per_one[]" value="0"></td>'; append_+='<td class="hiddenAmount"><input type="hidden" name="amount[]" value="'+all_input[1].value+'"></td>'; append_+='<td class="hiddenBuy_price"><input type="hidden" name="buy_price[]" value="'+all_input[2].value+'"></td>'; append_+='<td><span class="delete_row glyphicon glyphicon-remove"></span></td>'; append_+='<td class="d_n">'; // append_+='<input type="text" name="define_type[]" value="'+all_input[5].value+'">'; append_+='<input type="text" name="cost_per_unit1[]" disabled=" ">'; append_+='<input type="text" name="shop_id" ></td>'; append_+='<td class="d_n check">'; append_+='<input type="hidden" class="num_per_unt" value="'+all_input[1].value+'" disabled="">'; append_+='<input type="hidden" class="mian_details_num" value="'+all_input[2].value+'" > '; // append_+='<input type="hidden" class="cost_per_unit" value="'+all_input[3].value+'" disabled=""> '; // append_+='<input type="hidden" class="cost_per_details_unit" value="'+all_input[4].value+'"disabled="">'; append_+='<input type="text" name="id_un[]" value="'+all_input[0].value+'">';/////////////////////////////////////////// // append_+='<input type="text" value="'+all_input[6].value+'" disabled=" ">'; append_+='<input type="text" name="user_saler" value="1" ></td> </tr>'; $(".left_side .items_s table tbody").append(append_); // apent to print var append_2='<tr class="row1 " id="'+this_id+'">'; append_2+='<td class="name">'+name_snf+'</td>'; append_2+='<td class="num_per_unit"></td>'; append_2+='<td class="cost_per_unit"></td>'; append_2+='<td class="details_num"></td>'; // append_2+='<td class="cost_details_num "></td>'; // append_2+='<td class="to_c"></td>'; $(".left_side .print_aaa table tbody").append(append_2); var all_input_inta=$('.left_side .items_s table tbody tr input'); var get_sum_of_row=$(".left_side .items_s table tbody tr input[name='num_per_unit[]']"); var total_cost=0; for (var i = 0; i<get_sum_of_row.length; i++) { total_cost+=parseFloat(get_sum_of_row[i].value); } } }) function isFloat(x) { return !!(x % 1); } $(document).on("keyup blur",".left_side .items_s table tbody tr input",function(e){ // if (e.keyCode=="8") { // if ($(this).val().length=="0") { // $(this).val(0); // } // } if ($(this).val().trim().length=="0") { $(this).val(0); } var all_input_row=$(this).parents('tr').find("input"); var print_row_class=$(this).parents('tr').attr('id'); var this_val=$(this).val().trim(); var input_per_p=all_input_row[2].value.trim(); var num_per_unit_first_input=$(".left_side .items_s table tbody tr .num_per_unit input").val(); var num_per_pieces_third_input=$(".left_side .items_s table tbody tr .details_num input").val(); var orignal_numper_num_per_unit=all_input_row[8].value;//all available close unit // var orignal_numper_num_per_pieces=all_input_row[9].value;// available open pieces // var orignal_numper_num_detalis=all_input_row[13].value;//the orignal details number from define type t // var cost_per_unit=all_input_row[10].value;//s3r el krtona aw el shwal // var cost_per_detalis_unit=all_input_row[11].value;//s3r el krtona aw el shwal var get_allpieces_in_row=parseFloat(all_input_row[0].value)+parseFloat(input_per_p); var get_avaliable_detalis_num=(orignal_numper_num_per_unit); var get_the_unit_and_pieces=mode_(get_allpieces_in_row); var change_func_to_pieces=get_the_unit_and_pieces[0]+parseFloat(get_the_unit_and_pieces[1]); if (this_val.length>0) { if(!isNaN(this_val)){ var total_row=(parseFloat(all_input_row[2].value)* parseFloat(all_input_row[3].value) )+(parseFloat(all_input_row[1].value)* parseFloat(all_input_row[0].value)); if(change_func_to_pieces>get_avaliable_detalis_num){ $(".pop_up_alert ").fadeIn(100); $(".pop_up_alert .upper .content h3").html("االكمية"); $(".pop_up_alert .upper .content p").html("الكمية اكثر من المتاح"); $(".pop_up_alert .upper").fadeIn(100); $(this).val(0); } }else{ $(".pop_up_alert ").fadeIn(100); $(".pop_up_alert .upper .content h3").html("شروط"); $(".pop_up_alert .upper .content p").html("يجب كتابة ارقام فقط"); $(".pop_up_alert .upper").fadeIn(100); $(this).val(0) } //show the cost per unit if (parseFloat(all_input_row[0].value ) > 0) { $(this).parents('tr').find('.cost_per_unit').fadeIn(500); $(this).parents('tr').find(".cost_per_unit input").removeAttr("disabled"); $('.left_side .items_s table thead .cost_per_unit').fadeIn(500); }else { $(this).parents('tr').find(".cost_per_unit input").attr("disabled",' '); all_input_row[4].value=total_row; $(this).parents('tr').find(".cost_per_unit input").val(0); } $(".print_aaa table tbody #"+print_row_class+" .num_per_unit").text(all_input_row[0].value); $(".print_aaa table tbody #"+print_row_class+" .cost_per_unit").text(all_input_row[1].value); $(".print_aaa table tbody #"+print_row_class+" .details_num").text(all_input_row[2].value); // $(".print_aaa table tbody #"+print_row_class+" .cost_details_num ").text(all_input_row[3].value); // $(".print_aaa table tbody #"+print_row_class+" .to_c ").text(total_row); if (all_input_row[2].value > 0 || all_input_row[0].value > 0) { all_input_row[4].value=(total_row) } check() } }); $(document).on("click",".left_side .items_s table tbody tr input",function(){ if ($(this).val()==0) { $(this).val(" ") } }); //when blur from number per unit $(document).on("keyup",".left_side .items_s table tbody tr .details_num input",function(e){ var all_input_row=$(this).parents('tr').find("input"); var this_val=$(this).val().trim(); var orignal_numper_num_per_unit=all_input_row[8].value;//all available close unit var orignal_numper_num_per_unit=all_input_row[9].value;// available open pieces var orignal_numper_num_detalis=all_input_row[13].value;//the orignal details number from define type t var cost_per_unit=all_input_row[10].value;//s3r el krtona aw el shwal var cost_per_detalis_unit=all_input_row[11].value;//s3r el krtona aw el shwal if (this_val.length>0) { if (parseFloat(all_input_row[2].value ) > 0) { $(this).parents('tr').find(".cost_details_num").fadeIn(500); $(".left_side .items_s table thead .cost_det").fadeIn(500); $(this).parents('tr').find(".cost_details_num input").removeAttr("disabled") }else { $(this).parents('tr').find(".cost_details_num input").attr("disabled",' '); all_input_row[4].value= (parseFloat(all_input_row[2].value)* parseFloat(all_input_row[3].value) )+(parseFloat(all_input_row[1].value)* parseFloat(all_input_row[0].value) ); $(this).parents('tr').find('.cost_details_num input').val(0); } //for put total_cost var get_sum_of_row=$(".left_side .items_s table tbody tr input[name='t_cost_per_one[]']"); var total_cost=0; for (var i = 0; i<get_sum_of_row.length; i++) { total_cost+=parseFloat(get_sum_of_row[i].value); } $('.left_side .items_s .all_bill_c .totale_cost_bill input').val(total_cost); } ////////////////////////////////////////////////////////////////////////////////// }); function check(){ var input_num_per_unit=$(".left_side .items_s table tbody tr .num_per_unit input"); var input_cost_per_unit=$(".left_side .items_s table tbody tr .cost_per_unit input"); // var input_num_per_p=$(".left_side .items_s table tbody tr .details_num input"); // var input_cost_per_p=$(".left_side .items_s table tbody tr .cost_details_num input"); var get_sum_of_row=$(".left_side .items_s table tbody tr input[name='t_cost_per_one[]']"); var total_cost=0; for (var i = 0; i<get_sum_of_row.length; i++) { get_sum_of_row[i].value=(parseFloat(input_num_per_unit[i].value)*parseFloat(input_cost_per_unit[i].value) ) total_cost+=parseFloat(get_sum_of_row[i].value); } $('.left_side .items_s .all_bill_c .totale_cost_bill input').val(total_cost); $(".left_side .items_s .all_bill_c .paied_bill input").val(total_cost); ////////////////////////////////////////////////////////////////////////////////// $('.print_aaa .all_bill_c .totale_cost_bill span').text(total_cost); $('.print_aaa .all_bill_c .paied_bill span').text(total_cost); } $(document).on("blur keyup",".left_side .items_s table tbody tr .cost_per_unit input",function(e){ var all_input_row=$(this).parents('tr').find("input"); var this_val=$(this).val().trim(); var user_input_num_per_unit=all_input_row[0].value; var user_input_num_detalis=all_input_row[1].value; var orignal_numper_num_per_unit=all_input_row[8].value; // console.log(user_input_num_per_unit,orignal_numper_num_per_unit); // var orignal_numper_num_detalis=all_input_row[13].value;//to chech el3dad el mofasl // var cost_per_unit=all_input_row[10].value;//s3r el krtona aw el shwal // var cost_per_detalis_unit=all_input_row[11].value;//s3r el krtona aw el shwal // all_input_row[3] amouuuuuuuuuunt if (this_val.length>0) { if (e.type=="keyup") { if (parseFloat(all_input_row[2].value)>0) { all_input_row[4].value=(parseFloat(all_input_row[2].value)* parseFloat(all_input_row[3].value) )+(parseFloat(all_input_row[1].value)* parseFloat(all_input_row[0].value) ) }else{ all_input_row[4].value=(parseFloat(all_input_row[1].value)* parseFloat(all_input_row[0].value) ) } }else{ if ( parseFloat(all_input_row[1].value) < (parseFloat(all_input_row[8].value) )) { $(".pop_up_alert .upper .content h3").html("سعر الوحدة"); $(".pop_up_alert .upper .content p").html("سعر الوحدة قليل"); $(".pop_up_alert ").fadeIn(100); $(".pop_up_alert .upper").fadeIn(100); // $(this).val(parseFloat(all_input_row[10].value)) all_input_row[4].value=(parseFloat(all_input_row[2].value)* parseFloat(all_input_row[3].value) )+(parseFloat(all_input_row[1].value)* parseFloat(all_input_row[0].value) ) all_input_row[1].value = parseFloat(all_input_row[8].value); } } check() } }) $(document).on("blur keyup",".left_side .items_s table tbody tr .cost_details_num input",function(e){ var all_input_row=$(this).parents('tr').find("input"); var this_val=$(this).val().trim(); var user_input_num_per_unit=all_input_row[0].value; var user_input_num_detalis=all_input_row[1].value; var orignal_numper_num_per_unit=all_input_row[8].value;//to checks3r el krtona // var orignal_numper_num_detalis=all_input_row[13].value;//to chech el3dad el mofasl // var cost_per_unit=all_input_row[10].value;//s3r el krtona aw el shwal // var cost_per_detalis_unit=all_input_row[11].value;//s3r el krtona aw el shwal if (this_val.length>0) { if (e.type=="keyup") { if (parseFloat(all_input_row[2].value)>0) { all_input_row[4].value=(parseFloat(all_input_row[2].value)* parseFloat(all_input_row[3].value) )+(parseFloat(all_input_row[1].value)* parseFloat(all_input_row[0].value) ) }else{ all_input_row[4].value=(parseFloat(all_input_row[2].value)* parseFloat(all_input_row[3].value) ) } }else{ if ( ( parseFloat(all_input_row[11].value)* parseFloat(all_input_row[2].value) ) > (parseFloat(all_input_row[2].value)* parseFloat(all_input_row[3].value) ) ) { $(".pop_up_alert .upper .content h3").html("السعر باقطعة"); $(".pop_up_alert .upper .content p").html("السعر باقطعة قليل"); $(".pop_up_alert .upper").fadeIn(100); $(".pop_up_alert ").fadeIn(100); $(this).val( parseFloat(all_input_row[11].value) ) all_input_row[4].value=(parseFloat(all_input_row[2].value)* parseFloat(all_input_row[3].value) )+(parseFloat(all_input_row[1].value)* parseFloat(all_input_row[0].value) ) }else{ if (parseFloat(all_input_row[2].value)>0) { all_input_row[4].value=(parseFloat(all_input_row[2].value)* parseFloat(all_input_row[3].value) )+(parseFloat(all_input_row[1].value)* parseFloat(all_input_row[0].value) ) }else{ all_input_row[4].value=(parseFloat(all_input_row[2].value)* parseFloat(all_input_row[3].value) ) } } } check() } // var get_sum_of_row=$(".left_side .items_s table tbody tr input[name='t_cost_per_one[]']"); // var total_cost=0; // for (var i = 0; i<get_sum_of_row.length; i++) { // total_cost+=parseFloat(get_sum_of_row[i].value); // } // $('.left_side .items_s .all_bill_c .totale_cost_bill input').val(total_cost); // $(".left_side .items_s .all_bill_c .paied_bill input").val(total_cost); // ////////////////////////////////////////////////////////////////////////////////// // $('.print_aaa .all_bill_c .totale_cost_bill span').text(total_cost); // $('.print_aaa .all_bill_c .paied_bill span').text(total_cost); }) $(document).on("blur",".left_side .items_s table tbody tr .num_per_unit input",function(){ var all_input_row=$(this).parents('tr').find("input"); var user_input_num_per_unit=all_input_row[0].value; var user_input_num_detalis=all_input_row[1].value; var orignal_numper_num_per_unit=all_input_row[8].value;//to checks3r el krtona // var orignal_numper_num_detalis=all_input_row[13].value;//to chech el3dad el mofasl // var cost_per_unit=all_input_row[10].value;//s3r el krtona aw el shwal // var cost_per_detalis_unit=all_input_row[11].value;//s3r el krtona aw el shwal if (parseFloat(all_input_row[0].value) > parseFloat(all_input_row[3].value)) { $(".pop_up_alert .upper .content h3").html("الكمية"); $(".pop_up_alert .upper .content p").html("الكمية غير كافية"); $(".pop_up_alert ").fadeIn(100); $(".pop_up_alert .upper").fadeIn(100); } // if (parseFloat(all_input_row[0].value )==orignal_numper_num_per_unit) { // all_input_row[2].value=0; // all_input_row[3].value=0; // } }) $(document).on("keyup",".left_side .items_s table tbody tr .num_per_unit input",function(){ var input_val=$(this).val(); if (parseFloat(input_val )<1) { alert_("شروط",'العدد بالوحدة يجب ان يكون اكثر من 0'); $(this).val(0); } }) $(document).on('keyup','.left_side .items_s .all_bill_c .paied_bill input',function(){ var this_val=$(this).val(); var input_val= $('.left_side .items_s .all_bill_c .totale_cost_bill input').val(); $('.left_side .items_s .all_bill_c .reminder input').val(parseFloat(this_val)-parseFloat(input_val)); $('.print_aaa .all_bill_c .paied_bill span').text(this_val); $('.print_aaa .all_bill_c .reminder span').text(parseFloat(this_val)-parseFloat(input_val)); }) $(document).on(' submit','.left_side #form_bill',function(e){ var cus_select=$('.left_side .select_client select ').val(); //for put total_cost var get_sum_of_row=$(".left_side .items_s table tbody tr input[name='t_cost_per_one[]']"); var total_cost=0; for (var i = 0; i<get_sum_of_row.length; i++) { total_cost+=parseFloat(get_sum_of_row[i].value); } $('.left_side .items_s .all_bill_c .totale_cost_bill input').val(total_cost); ///////////////////////////////////////////////////////////////////////////////////////////// var get_num_per_unit=$(".left_side .items_s table tbody tr input[name='num_per_unit[]']"); var get_cost_num_per_unit=$(".left_side .items_s table tbody tr input[name='cost_per_unit[]']"); for (var i = 0; i<get_num_per_unit.length; i++) { if ((parseFloat(get_cost_num_per_unit[i].value)<=0 || get_cost_num_per_unit[i].value==" " ) && parseFloat(get_num_per_unit[i].value)>0) { alert_("شروط","يجب كتابة السعر بالوحدة"); e.preventDefault(); } } ////////////////////////////////////////////////////////////////////////////////// var get_details_num=$(".left_side .items_s table tbody tr input[name='details_num[]']"); var get_cost_details_num=$(".left_side .items_s table tbody tr input[name='cost_details_num[]']"); for (var i = 0; i<get_details_num.length; i++) { if ((parseFloat(get_cost_details_num[i].value)<=0 || get_cost_details_num[i].value==" " ) && parseFloat(get_details_num[i].value)>0) { alert_("شروط","يجب كتابة السعر بالقطعة"); e.preventDefault(); } } if (cus_select<=0) { $(".pop_up_alert .upper .content h3").html("شروط"); $(".pop_up_alert .upper .content p").html("يجب اختيار العميل"); $(".pop_up_alert ").fadeIn(100); $(".pop_up_alert .upper").fadeIn(100); e.preventDefault(); } var input_val= $('.left_side .items_s .all_bill_c .totale_cost_bill input').val(); if (input_val<0) { $(".pop_up_alert .upper .content h3").html("شروط"); $(".pop_up_alert .upper .content p").html("اجمالى الفاتورة يجب ان يكون اكثر من 0"); $(".pop_up_alert ").fadeIn(100); $(".pop_up_alert .upper").fadeIn(100); e.preventDefault(); } var input_val_bill= $('.left_side .items_s .all_bill_c .paied_bill input').val(); if (input_val_bill<0) { $(".pop_up_alert .upper .content h3").html("شروط"); $(".pop_up_alert .upper .content p").html("المدفوع يجب ان يكون اكثر من 0"); $(".pop_up_alert ").fadeIn(100); $(".pop_up_alert .upper").fadeIn(100); e.preventDefault(); } if (parseFloat(input_val)==0) { $(".pop_up_alert .upper .content h3").html("شروط"); $(".pop_up_alert .upper .content p").html("يجب اكمال البيانات"); $(".pop_up_alert ").fadeIn(100); $(".pop_up_alert .upper").fadeIn(100); e.preventDefault(); } $('.left_side .items_s .all_bill_c .totale_cost_bill input').removeAttr("disabled"); $('.left_side .items_s .all_bill_c .reminder input').removeAttr("disabled"); $(".left_side .items_s table tbody tr .cost_details_num input").removeAttr("disabled"); $(".left_side .items_s table tbody tr .cost_per_unit input").removeAttr("disabled"); }); // search suppliers $('.suppliers .search_cus input').on('keypress',function(e){ var value=$(this).val(); if(e.which==13){ var data={search:value,do:'tabel'} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { $('.suppliers table tbody').html(data) } }); } }) //search_cus $('.all_shop .search_cus input').on('keypress',function(e){ var value=$(this).val(); var shop_id=$('.shop_tabel .content_search_shop thead tr input').val(); if(e.which==13){ var data={search:value,do:'search_shop'} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { $('.content_search_shop').html(data) } }); } }); $('.shop_tabel .search_cus input').on('keypress',function(e){ var value=$(this).val(); var shop_id=$('.suppliers_s .search_cus .s_h input').val(); if(e.which==13){ var data={search:value,do:'search_shop_per_one',shopid:shop_id} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { $('.content_search_shop').html(data) } }); } }); //.left_side .add_user_dash .supp_edit_form .add_cu_dash //dashboard add user $(".pr_bill .left_side .client-add span").click(function(){ $(this).parents('.left_side').find(".add_user_dash").animate({top: "0"},5); }); $(".pr_bill .left_side .add_user_dash .close_user").click(function(){ $(this).parents('.left_side').find(".add_user_dash").animate({top: "-2000px"},5); }) $(".pr_bill .left_side .supp_edit_form .add_cu_dash").on("click",function(){ var input_form=$(this).parents('form').find('input'); var text_b=$(this).parents('form').find('.text').val(); var name=input_form[0].value; var mobile=input_form[1].value; var place=input_form[2].value; var data={name:name,mobile:mobile,place:place,text:text_b} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { if (data== -1) { alert_("تحذير","اسم العميل موجود من قبل") }else{ $('.left_side .select_client select ').append(data); $(".left_side .add_user_dash").animate({top: "-10000px"},5); input_form[0].value=''; input_form[1].value=''; input_form[2].value=''; $(this).parents('form').find('.text').val(" "); var name_ti=data.innerHTML; $('.name_cust_in_bill').html("اسم العميل :"+name) } } }); }) /////////////////////close main popup $(document).on("click",".pop_up_alert .upper .content button,.pop_up_alert .upper .content span",function(){ $(".pop_up_alert .upper").fadeOut(1000); }); $(document).on("click",'.pr_bill .left_side .print_bill_bill',function(e){ var get_num_per_unit=$(".left_side .items_s table tbody tr input[name='num_per_unit[]']"); var get_cost_num_per_unit=$(".left_side .items_s table tbody tr input[name='cost_per_unit[]']"); for (var i = 0; i<get_num_per_unit.length; i++) { if ((parseFloat(get_cost_num_per_unit[i].value)<=0 || get_cost_num_per_unit[i].value==" " ) && parseFloat(get_num_per_unit[i].value)>0) { alert_("شروط","يجب كتابة السعر بالوحدة"); e.preventDefault(); return false; } } ////////////////////////////////////////////////////////////////////////////////// var get_details_num=$(".left_side .items_s table tbody tr input[name='details_num[]']"); var get_cost_details_num=$(".left_side .items_s table tbody tr input[name='cost_details_num[]']"); for (var i = 0; i<get_details_num.length; i++) { if ((parseFloat(get_cost_details_num[i].value)<=0 || get_cost_details_num[i].value==" " ) && parseFloat(get_details_num[i].value)>0) { alert_("شروط","يجب كتابة السعر بالقطعة"); e.preventDefault(); return false; } } var z=printthis(); }) $(document).on("click",'.print_bill',function(){ var z=printthis(); }) function printthis(){ var data = document.getElementById('print_aaa').innerHTML; var mywindow = window.open('', 'RESETE', 'height=400,width=400'); mywindow.document.write('<html><head><style type="text/css">*{text-align:center;font-weight:bold;font-size:12px;font-family:arial;}@page{size:auto;margin:0mm 0mm 0mm 0mm;}table{border: 1px solid #000000;}</style>'); //mywindow.document.write('<link rel="stylesheet" href="main.css" type="text/css" />'); mywindow.document.write('</head><body >'); mywindow.document.write(data); mywindow.document.write('</body></html>'); mywindow.print(); mywindow.close(); return true; } //gard $('.gard_shop .s_g').on('click',function(){ var from=$(this).parents('.gard_shop').find(".g_from").val(); var to=$(this).parents('.gard_shop').find(".g_to").val(); var sh_i=$(".sh_i_sh input").val(); if (from=='' || to=='') { $(".pop_up_alert .upper .content h3").html("شروط"); $(".pop_up_alert .upper .content p").html("يجب تحديد الوقت فى الجرد من و الجرد الى"); $(".pop_up_alert ").fadeIn(100); $(".pop_up_alert .upper").fadeIn(100); }else{ var data={date_from:from,date_to:to,do:'gr_d',sh_i:sh_i} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { $(".gard_shop_b_1 .content_search_shop ").html(data) } }); } }); $(".gard_shop_b_1 .search_cus input").on("keypress",function(e){ var value=$(this).val(); var shop_id=$(".sh_i_sh input").val(); if(e.which==13){ var data={search:value,make:'gr_name',sh_i:shop_id} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { $(".gard_shop_b_1 .content_search_shop ").html(data) } }); } }); $('.all_shop_gr .gard_shop_all .s_g').on('click',function(){ var from=$(this).parents('.gard_shop_all').find(".g_from").val(); var to=$(this).parents('.gard_shop_all').find(".g_to").val(); if (from=='' || to=='') { $(".pop_up_alert .upper .content h3").html("شروط"); $(".pop_up_alert .upper .content p").html("يجب تحديد الوقت فى الجرد من و الجرد الى"); $(".pop_up_alert ").fadeIn(100); $(".pop_up_alert .upper").fadeIn(100); }else{ var data={date_from:from,date_to:to,m:'gr_d_all_main'} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { $(".all_shop_gr .content_search_shop_grd ").html(data) } }); } }); $(".all_shop_gr .search_cus input").on("keypress",function(e){ var value=$(this).val(); if(e.which==13){ var data={search:value,make:'gr_name_all'} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { $(".all_shop_gr .content_search_shop_grd ").html(data) } }); } }); $(".search_bill_cu .search_cus input").on("keypress",function(e){ var value=$(this).val(); if(e.which==13){ var data={search:value,make:'get_bill_a'} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { $(".search_bill_cu .distribution_cont ").html(data) } }); } }); $(".bill_p_main_page .search_bill_pr input").on("keypress",function(e){ var value=$(this).val(); if(e.which==13){ var data={search:value,make:'get_bill_prouduct'} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { $(".bill_p_main_page table tbody ").html(data) } }); } }); $(".right_side .search_cus input").on("keypress",function(e){ var value=$(this).val(); if(e.which==13){ var data={search:value,make:'get_all_p_all'} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { if(data.length<10){ $('.pr_bill .right_side .product_body').html("<div class='_alert alert alert-danger'>لايوجد فئات فرعية</div>"); $(".pr_bill .right_side .product_body").fadeIn(1000).delay(2000).fadeOut(1000) }else if(data.length>10 && data.length<100){ $('.pr_bill .right_side .product_body').html(data); $(".pr_bill .right_side .product_body").fadeIn(1000).delay(2000).fadeOut(1000) }else{ $('.pr_bill .right_side .product_body').html(data) $(".pr_bill .right_side .product_body").fadeIn(1000) } } }); } }); //checks //to get define_type from sub cat (add)->co_bought $(document).on("change",".check_b .select_type_subcat_ .selectpicker",function(){ var this_val=$(this).val(); if (this_val<="0") { $(".add_new_bill .d_err").text("يجب اختيار الفئات الفرعية للصنف"); $(".add_new_bill .d_err").fadeIn(1000).delay(2000).fadeOut(1000); }else{ var data={define_type:this_val,do:'select_id'} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { $(".check_b .type_sup").fadeOut(1000) $('.check_b .type_sup').html(data) $(".check_b .type_sup").fadeIn(1000) } }); } }); //alert p $(".alert_sh_d .al_").fadeIn(1000,function(){ $(this).parents(".alert_sh_d").fadeIn(100) $(this).fadeIn(1000,function(){ $(this).delay(5000).fadeOut(100,function(){ $(this).parents(".alert_sh_d").fadeOut(100) }) ; }) }) $('.alert_sh_d .dis_this , .alert_sh_d .al_ .dis').on('click',function(){ $(this).parents(".alert_sh_d").fadeOut(1000) ; }); $(document).on("click",".alert_sh_d .al_ .dele",function(){ var e_val=$(this).parents(".alert_sh_d").find('.show_me_a').text() ; var data={edit_s:e_val,do:'ed_che_o_1'} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { if (data==true) { $(".alert_sh_d").fadeOut(1000) ; } } }); }); $(document).on('click','.desc_def_add .plus_desc_def_select',function(){ var from=$(this).parents('.desc_def_add').find(".bill_from").val(); var to=$(this).parents('.desc_def_add').find(".bill_to").val(); var suo_us=$(this).parents('.desc_def_add').find(".type_sup .selectpicker").val(); var select_type=$(this).parents('.desc_def_add').find(".select_type_subcat_ .selectpicker ").val(); if (from=='' || to=='') { $(".pop_up_alert .upper .content h3").html("شروط"); $(".pop_up_alert .upper .content p").html("يجب تحديد الوقت فى الفواتير من و الفواتير الى"); $(".pop_up_alert ").fadeIn(100); $(".pop_up_alert .upper").fadeIn(100); }else if(suo_us== 'undefined' || suo_us == '' ||suo_us <1 ||select_type<1 ){ $(".pop_up_alert .upper .content h3").html("شروط"); $(".pop_up_alert .upper .content p").html("يجب تحديد النوع"); $(".pop_up_alert ").fadeIn(100); $(".pop_up_alert .upper").fadeIn(100); } else{ var data={date_from:from,date_to:to,supp_i:suo_us,m:'get_sum'} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { $(".desc_def_add input[name='t_bill']").val(data) } }); } }); $(document).on('click','.desc_def_add .plus_desc_def_select',function(){ var from=$(this).parents('.desc_def_add').find(".bill_from").val(); var to=$(this).parents('.desc_def_add').find(".bill_to").val(); var suo_us=$(this).parents('.desc_def_add').find(".type_sup .selectpicker").val(); var select_type=$(this).parents('.desc_def_add').find(".select_type_subcat_ .selectpicker ").val(); if (from=='' || to=='') { $(".pop_up_alert .upper .content h3").html("شروط"); $(".pop_up_alert .upper .content p").html("يجب تحديد الوقت فى الفواتير من و الفواتير الى"); $(".pop_up_alert ").fadeIn(100); $(".pop_up_alert .upper").fadeIn(100); }else if(suo_us== 'undefined' || suo_us == '' ||suo_us <1 ||select_type<1 ){ $(".pop_up_alert .upper .content h3").html("شروط"); $(".pop_up_alert .upper .content p").html("يجب تحديد النوع"); $(".pop_up_alert ").fadeIn(100); $(".pop_up_alert .upper").fadeIn(100); } else{ var data={date_from:from,date_to:to,supp_i:suo_us,m:'get_sum'} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { $(".desc_def_add input[name='t_bill']").val(data) } }); } }); $(".desc_def_add input[name='desc']").on("blur",function(){ var this_val=$(this).val(); var t_cost=$(".desc_def_add input[name='t_bill']").val(); if (this_val>100) { alert_("شروط",'قيمة الخصم اكثر من 100%'); $(this).val(0); }else if(this_val==""){ $(this).val(0); $(".desc_def_add input[name='desc_v']").val(0); }else if(isNaN(this_val)){ alert_("شروط",'يجب كتابة ارقام'); $(this).val(0); $(".desc_def_add input[name='desc_v']").val(0); }else{ $(".desc_def_add input[name='desc_v']").val((parseFloat(this_val)*parseFloat(t_cost))/100); } }) $(".bill_desc_edit input[name='desc']").on("blur",function(){ var this_val=$(this).val(); var t_cost=$(".bill_desc_edit input[name='t_bill']").val(); if (this_val>100) { alert_("شروط",'قيمة الخصم اكثر من 100%'); $(this).val(0); }else if(this_val==""){ $(this).val(0); $(".bill_desc_edit input[name='desc_v']").val(0); }else if(isNaN(this_val)){ alert_("شروط",'يجب كتابة ارقام'); $(this).val(0); $(".bill_desc_edit input[name='desc_v']").val(0); }else{ $(".bill_desc_edit input[name='desc_v']").val((parseFloat(this_val)*parseFloat(t_cost))/100); } }) $("#desc_def_add").on("submit",function(e){ $(".desc_def_add input[name='t_bill']").removeAttr("disabled"); }) $(document).on("change",".left_side .select_client select",function(){ var name=this.selectedOptions[0].innerHTML; $('.name_cust_in_bill').html("اسم العميل :"+name) $('.name_cust_in_billEn').html("Client Name :"+name) }); //bill_prouduct remove readonly from input $(".edit_form_bill_pr_s").on("click blur keyup focus",function(e){ var this_val=$(this).val(); var all_val=$(".edit_form_bill_pr_s"); if (e.type=="keyup" || e.type=="blur" ) { if (isNaN(all_val[0].value) || isNaN(all_val[1].value) || isNaN(all_val[2].value) ) { alert_("شروط","يجب كتابة ارقام فقط"); $(this).val(this_val) } } if (e.type=="click") { $(this).removeAttr("readonly"); } }); $(document).on("click",".pop_up_request .close",function(){ $(".pop_up_request").fadeOut(100); $(".pop_up_request input").val(0) }); //other shop product popup $(document).on("click",".o_shop",function(){ $(".pop_up_request").fadeIn(100); var pop_up_inputs=$(".pop_up_request").find("input"); var this_input=$(this).find("input"); var product_i=this_input[0].value;//product id var num_per_unit=this_input[1].value;//orginal number per unit var details_num=this_input[2].value;//orginal details_num var define_type_num=this_input[3].value;//orginal details_num var shop_id=this_input[4].value;//orginal details_num var send_num_per_unit=pop_up_inputs[0].value; var send_details_num=pop_up_inputs[1].value; var append_pop="<input type='hidden' value='"+product_i+"'>"; append_pop+="<input type='hidden' value='"+num_per_unit+"'>"; append_pop+="<input type='hidden' value='"+details_num+"'>"; append_pop+="<input type='hidden' value='"+define_type_num+"'>"; append_pop+="<input type='hidden' value='"+shop_id+"'>"; $(".pop_up_request .pop_up_body .inp_h").html(append_pop); }) $(document).on("click",".alert_not .do_ .ac",function(){ var id_log=$(this).parents(".alert_sh_d_n").find(".da input").val(); var this_=$(this); $.ajax({ url:"check_type.php", method:"POST", data:{make:"accept_","i":id_log}, success:function(data) { var new_data=data.trim(); if (new_data=="done") { alert_("تم ","تم"); this_.parents(".alert_sh_d_n").fadeOut(100); } } }); }) $(document).on("click",".alert_not .do_ .dl",function(){ var id_log=$(this).parents(".alert_sh_d_n").find(".da input").val(); var this_=$(this); $.ajax({ url:"check_type.php", method:"POST", data:{make:"delete_","i":id_log}, success:function(data) { var new_data=data.trim(); if (new_data=="done") { alert_("تم ","تم"); this_.parents(".alert_sh_d_n").fadeOut(100); } } }); }) $(document).on("click",".pop_up_request .upper .get",function(){ var pop_up_inputs=$(".pop_up_request").find("input"); var this_input=$(this).parents(".pop_up_request").find(".pop_up_body input"); var product_i =parseFloat(this_input[3].value);//product id var num_per_unit =parseFloat(this_input[4].value);//orginal number per unit var details_num =parseFloat(this_input[5].value);//orginal details_num var define_type_num=parseFloat(this_input[6].value);//orginal details_num var proudct_shopid =parseFloat(this_input[7].value);//orginal details_num var send_num_per_unit=parseFloat(this_input[0].value); var send_details_num=parseFloat(this_input[1].value); var send_shop_id=parseFloat(this_input[2].value); var final_details_num_shop=(num_per_unit*define_type_num)+details_num; var final_details_num_shop_need=(send_num_per_unit*define_type_num)+send_details_num; if (final_details_num_shop_need>final_details_num_shop) { alert_("شروط","الكمية اكثر من المتاح فى المتجر "); this_input[0].value=0; this_input[1].value=0; }else if(send_num_per_unit==0&& send_details_num==0){ alert_("شروط","يجب تحديد العدد بالوحدة او العدد بالقطعة"); this_input[0].value=0; this_input[1].value=0; }else if(send_num_per_unit<=-1 || send_details_num<=-1){ alert_("شروط","القيم يجب ان تكون اكثر من 0"); this_input[0].value=0; this_input[1].value=0; }else if(1==2){ // check if the type of this krtona wla kilo to allow point // alert_("شروط","القيم يجب ان تكون اكثر من 0"); // this_input[0].value=0; // this_input[1].value=0; }else{ //send to data base var data={product_i:product_i, send_num_per_unit:send_num_per_unit, send_details_num:send_details_num, send_shop_id:send_shop_id, proudct_shopid:proudct_shopid, make:'get_product'} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { var data_trim=data.trim(); if (data_trim=="product_id_error"){ alert_("حدث خطأ","حدث خطأ"); this_input[0].value=0; this_input[1].value=0; $(".pop_up_request").fadeOut(100); }else if(data_trim=="product_id_done"){ alert_("تم","تم ارسال طلبك"); $(".pop_up_request").fadeOut(100); this_input[0].value=0; this_input[1].value=0; }else if(data_trim=="log_error"){ alert_("شروط","يجب قبول او رفض الطلب المسبق لانشاء طلب اخر"); $(".pop_up_request").fadeOut(100); this_input[0].value=0; this_input[1].value=0; }else{ $(".pop_up_request").fadeOut(100); this_input[0].value=0; this_input[1].value=0; } } }); } // var orginal_num= mode_(pars,define_type_num);//change all product in another shop to details_num }); //manage_staff 7dor w ensraf $(document).on("click",".control_manage_staff .add",function(){ var this_parent=$(this).parent(".control") var day=$(this).parents(".control_manage_staff").find(".day").text(); var staff_i=$(this).parents(".control_manage_staff").find(".staff").text(); if(isNaN(staff_i)){ alert_("404","حدث خطا"); return null; }else{ var data={day:day,staff_i:staff_i,do:'7dor'} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { var data_trim=data.trim(); if (data_trim=="7dor_done") { alert_("تم","تمت عملية الحضور"); //change button this_parent.html("<span class='btn btn-danger remove'>إلغاء الحضور</span>"); }else if(data_trim=="error_404"){ alert_("حدث خطا","يرجى المحاولة في وقت لاحق "); }else if(data_trim=="id_not_found"){ alert_("حدث خطا","حدث خطا"); }else{ alert_("حدث خطا","خطا"); } } }); } }); $(document).on("click",".control_manage_staff .remove",function(){ var this_parent=$(this).parent(".control"); var day=$(this).parents(".control_manage_staff").find(".day").text(); var staff_i=$(this).parents(".control_manage_staff").find(".staff").text(); if(isNaN(staff_i)){ alert_("404","حدث خطا"); return null; }else{ var data={day:day,staff_i:staff_i,do:'ensraf'} $.ajax({ url:"check_type.php", method:"POST", data:data, success:function(data) { var data_trim=data.trim(); if (data_trim=="del_done") { alert_("تم","تمت عملية الغاء الحضور"); //change button this_parent.html("<span class='btn btn-success add'>حضور</span>"); }else if(data_trim=="error_404"){ alert_("حدث خطا","يرجى المحاولة في وقت لاحق "); }else if(data_trim=="id_not_found"){ alert_("حدث خطا","حدث خطا"); }else{ alert_("حدث خطا","خطا"); } } }); } }); $(".save_customer").on("click",function(){ var id=$(".select_client .selectpicker").val(); var id_shop=$(".select_client .shop_i").val(); document.cookie = "default_customer"+id_shop+"="+id+"; expires=Thu, 18 Dec 2022 12:00:00 UTC"; }); $(document).on("click",".alert_not .alert_sh_d_n .close",function(){ $(this).parents(".alert_sh_d_n").fadeOut(100); }); num_only=function(t){ if(isNaN(t.value)){ alert_("شروط","يجب كتابة ارقام فقط"); t.value=0; } } function mode_(top,bottom){ var step1=Math.floor(top/bottom); var step2=step1*bottom; var step3=top-step2; var final_r=[step1,step3]; return final_r; } function alert_(m_mes,desc_mes){ $(".pop_up_alert ").fadeIn(100); $(".pop_up_alert .upper .content h3").html(m_mes); $(".pop_up_alert .upper .content p").html(desc_mes); $(".pop_up_alert .upper").fadeIn(100); } });//end of code
entry_defs.rs
use crate::core::ribosome::FnComponents; use crate::core::ribosome::HostAccess; use crate::core::ribosome::Invocation; use crate::core::ribosome::ZomesToInvoke; use derive_more::Constructor; use holochain_serialized_bytes::prelude::*; use holochain_types::prelude::*; use std::collections::BTreeMap; #[derive(Debug, Clone)] pub struct EntryDefsInvocation; impl EntryDefsInvocation { #[allow(clippy::new_without_default)] pub fn new() -> Self { Self } } #[derive(Clone, Constructor)] pub struct EntryDefsHostAccess; impl From<&HostAccess> for EntryDefsHostAccess { fn from(_: &HostAccess) -> Self { Self } } impl From<EntryDefsHostAccess> for HostAccess { fn from(entry_defs_host_access: EntryDefsHostAccess) -> Self { Self::EntryDefs(entry_defs_host_access) } } impl From<&EntryDefsHostAccess> for HostFnAccess { fn from(_: &EntryDefsHostAccess) -> Self { Self::none() } } impl Invocation for EntryDefsInvocation { fn zomes(&self) -> ZomesToInvoke { ZomesToInvoke::All } fn fn_components(&self) -> FnComponents { vec!["entry_defs".into()].into() } fn host_input(self) -> Result<ExternIO, SerializedBytesError> { ExternIO::encode(()) } } /// the aggregate result of _all_ entry defs callbacks #[derive(PartialEq, Debug, Clone)] pub enum EntryDefsResult { /// simple mapping between zome and defs Defs(BTreeMap<ZomeName, EntryDefs>), Err(ZomeName, String), } impl From<Vec<(ZomeName, EntryDefsCallbackResult)>> for EntryDefsResult { fn from(callback_results: Vec<(ZomeName, EntryDefsCallbackResult)>) -> Self { callback_results.into_iter().fold( EntryDefsResult::Defs(BTreeMap::new()), |acc, x| match x { // err overrides everything (zome_name, EntryDefsCallbackResult::Err(fail_string)) => { Self::Err(zome_name, fail_string) } // passing callback allows the acc to carry forward (zome_name, EntryDefsCallbackResult::Defs(defs)) => match acc { Self::Defs(mut btreemap) => { btreemap.insert(zome_name, defs); Self::Defs(btreemap) } Self::Err(_, _) => acc, }, }, ) } } #[cfg(test)] mod test { use super::EntryDefsHostAccess; use super::EntryDefsResult; use crate::core::ribosome::Invocation; use crate::core::ribosome::ZomesToInvoke; use crate::fixt::EntryDefsFixturator; use crate::fixt::EntryDefsInvocationFixturator; use crate::fixt::ZomeNameFixturator; use ::fixt::prelude::*; use holochain_types::prelude::*; use holochain_zome_types::entry_def::EntryDefsCallbackResult; use holochain_zome_types::ExternIO; use std::collections::BTreeMap; #[test] /// this is a non-standard fold test because the result is not so simple fn entry_defs_callback_result_fold() { let mut rng = ::fixt::rng(); let mut zome_name_fixturator = ZomeNameFixturator::new(::fixt::Unpredictable); let mut entry_defs_fixturator = EntryDefsFixturator::new(::fixt::Unpredictable); let mut string_fixturator = StringFixturator::new(::fixt::Unpredictable); // zero defs assert_eq!(EntryDefsResult::Defs(BTreeMap::new()), vec![].into(),); // one defs let zome_name = zome_name_fixturator.next().unwrap(); let entry_defs = entry_defs_fixturator.next().unwrap(); assert_eq!( EntryDefsResult::Defs({ let mut tree = BTreeMap::new(); tree.insert(zome_name.clone(), entry_defs.clone()); tree }), vec![(zome_name, EntryDefsCallbackResult::Defs(entry_defs)),].into(), ); // two defs let zome_name_one = zome_name_fixturator.next().unwrap(); let entry_defs_one = entry_defs_fixturator.next().unwrap(); let zome_name_two = zome_name_fixturator.next().unwrap(); let entry_defs_two = entry_defs_fixturator.next().unwrap(); assert_eq!( EntryDefsResult::Defs({ let mut tree = BTreeMap::new(); tree.insert(zome_name_one.clone(), entry_defs_one.clone()); tree.insert(zome_name_two.clone(), entry_defs_two.clone()); tree }), vec![ (zome_name_one, EntryDefsCallbackResult::Defs(entry_defs_one)), (zome_name_two, EntryDefsCallbackResult::Defs(entry_defs_two)), ] .into() ); // some err let mut results = vec![]; let number_of_fails = rng.gen_range(1, 3); let number_of_defs = rng.gen_range(0, 3); for _ in 0..number_of_fails { results.push(( zome_name_fixturator.next().unwrap(), EntryDefsCallbackResult::Err(string_fixturator.next().unwrap()), )); } for _ in 0..number_of_defs { results.push(( zome_name_fixturator.next().unwrap(), EntryDefsCallbackResult::Defs(entry_defs_fixturator.next().unwrap()), )); } results.shuffle(&mut rng); let result: EntryDefsResult = results.into(); match result { EntryDefsResult::Err(_, _) => assert!(true), _ => assert!(false), } } #[test] fn entry_defs_host_access() { assert_eq!( HostFnAccess::from(&EntryDefsHostAccess), HostFnAccess::none() ); } #[tokio::test(flavor = "multi_thread")] async fn entry_defs_invocation_zomes() { let entry_defs_invocation = EntryDefsInvocationFixturator::new(::fixt::Unpredictable) .next() .unwrap(); assert_eq!(ZomesToInvoke::All, entry_defs_invocation.zomes(),); } #[tokio::test(flavor = "multi_thread")] async fn entry_defs_invocation_fn_components() { let entry_defs_invocation = EntryDefsInvocationFixturator::new(::fixt::Unpredictable) .next() .unwrap(); let mut expected = vec!["entry_defs"]; for fn_component in entry_defs_invocation.fn_components() { assert_eq!(fn_component, expected.pop().unwrap()); } } #[tokio::test(flavor = "multi_thread")] async fn entry_defs_invocation_host_input() { let entry_defs_invocation = EntryDefsInvocationFixturator::new(::fixt::Unpredictable) .next() .unwrap(); let host_input = entry_defs_invocation.clone().host_input().unwrap(); assert_eq!(host_input, ExternIO::encode(()).unwrap()); } } #[cfg(test)] #[cfg(feature = "slow_tests")] mod slow_tests { use crate::core::ribosome::guest_callback::entry_defs::EntryDefsHostAccess; use crate::core::ribosome::guest_callback::entry_defs::EntryDefsResult; use crate::core::ribosome::RibosomeT; use crate::fixt::curve::Zomes; use crate::fixt::EntryDefsInvocationFixturator; use crate::fixt::RealRibosomeFixturator; use crate::fixt::ZomeCallHostAccessFixturator; use ::fixt::prelude::*; use holochain_types::prelude::*; use holochain_wasm_test_utils::TestWasm; pub use holochain_zome_types::entry_def::EntryVisibility; use std::collections::BTreeMap; #[tokio::test(flavor = "multi_thread")] async fn test_entry_defs_unimplemented() { let ribosome = RealRibosomeFixturator::new(Zomes(vec![TestWasm::Foo])) .next() .unwrap(); let entry_defs_invocation = EntryDefsInvocationFixturator::new(::fixt::Empty) .next() .unwrap(); let result = ribosome .run_entry_defs(EntryDefsHostAccess, entry_defs_invocation) .unwrap(); assert_eq!(result, EntryDefsResult::Defs(BTreeMap::new()),); } #[tokio::test(flavor = "multi_thread")] async fn test_entry_defs_index_lookup()
#[tokio::test(flavor = "multi_thread")] async fn test_entry_defs_implemented_defs() { let ribosome = RealRibosomeFixturator::new(Zomes(vec![TestWasm::EntryDefs])) .next() .unwrap(); let entry_defs_invocation = EntryDefsInvocationFixturator::new(::fixt::Empty) .next() .unwrap(); let result = ribosome .run_entry_defs(EntryDefsHostAccess, entry_defs_invocation) .unwrap(); assert_eq!( result, EntryDefsResult::Defs({ let mut tree = BTreeMap::new(); let zome_name: ZomeName = "entry_defs".into(); let defs: EntryDefs = vec![ EntryDef { id: "post".into(), visibility: EntryVisibility::Public, crdt_type: CrdtType, required_validations: 5.into(), required_validation_type: Default::default(), }, EntryDef { id: "comment".into(), visibility: EntryVisibility::Private, crdt_type: CrdtType, required_validations: 5.into(), required_validation_type: Default::default(), }, ] .into(); tree.insert(zome_name, defs); tree }), ); } }
{ let test_env = holochain_lmdb::test_utils::test_cell_env(); let env = test_env.env(); let mut workspace = crate::core::workflow::CallZomeWorkspace::new(env.clone().into()).unwrap(); crate::core::workflow::fake_genesis(&mut workspace.source_chain) .await .unwrap(); let workspace_lock = crate::core::workflow::CallZomeWorkspaceLock::new(workspace); let mut host_access = fixt!(ZomeCallHostAccess); host_access.workspace = workspace_lock; let output: () = crate::call_test_ribosome!(host_access, TestWasm::EntryDefs, "assert_indexes", ()); assert_eq!(&(), &output); }
rest.rs
use crate::{ api::{ client::{Client, ViewType}, error::{BatchError, CreateError, DeleteError, GetError, StorageError, UpdateError}, experiment::Experiment, limits, run::{Metric, Param, Run, RunData, RunInfo, RunStatus, RunTag}, search::{PageToken, RunList, Search}, }, ExperimentId, RunId, }; use anyhow::{Context, Error}; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use std::{ fmt::Display, io::{Read, Write}, }; #[derive(Deserialize)] struct RestErrorResponse { pub error_code: RestErrorCode, pub message: String, } #[derive(Debug, Clone, thiserror::Error)] pub enum RestError { #[error("{status} {code}: {message}")] Known { status: u16, code: RestErrorCode, message: String, }, #[error("Unknown {status} error:\n{body}")] Unknown { status: u16, body: String }, } #[derive(Debug, Clone, PartialEq, Eq, Deserialize)] #[serde(from = "&str")] pub enum RestErrorCode { ResourceAlreadyExists, ResourceDoesNotExist, InvalidParameterValue, Unknown(String), } impl From<&str> for RestErrorCode { fn from(value: &str) -> Self { match value { "RESOURCE_ALREADY_EXISTS" => RestErrorCode::ResourceAlreadyExists, "RESOURCE_DOES_NOT_EXIST" => RestErrorCode::ResourceDoesNotExist, "INVALID_PARAMETER_VALUE" => RestErrorCode::InvalidParameterValue, _ => return RestErrorCode::Unknown(value.to_owned()), } } } impl Display for RestErrorCode { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{:?}", self) } } #[derive(PartialEq, Eq)] enum RestMethod { Get, Post, } impl RestMethod { fn handler(&self) -> fn (&str) -> ureq::Request { match self { Self::Get => ureq::get, Self::Post => ureq::post, } } } pub struct Server { api_url: String, } fn parse_error(response: ureq::Response) -> RestError { let status = response.status(); let body = response .into_string() .unwrap_or_else(|_| "Could not turn error body into String.".to_string()); let response = serde_json::from_str::<RestErrorResponse>(&body).ok(); if let Some(response) = response { RestError::Known { status, code: response.error_code, message: response.message, } } else { RestError::Unknown { status, body } } } impl Server { pub fn new(api_url: impl Into<String>) -> Self { Server { api_url: api_url.into(), } } fn execute<Ep, Val, Hand, Err>(&mut self, request: Ep, error_handler: Hand) -> Result<Val, Err> where Ep: Endpoint<Value = Val> + EndpointExt, Hand: FnOnce(RestError) -> Err, Err: From<anyhow::Error>, { let url = format!("{}/{}", self.api_url, Ep::PATH); let http_response = if Ep::METHOD == RestMethod::Get { let query_str = Ep::write_request_query_string(&request).context("serializing request failed")?; Ep::METHOD.handler()(&url).query_str(&query_str).call() } else { let buffer = Ep::write_request_body_string(&request).context("serializing request failed")?; Ep::METHOD.handler()(&url).send_string(&buffer) }; if http_response.error() { let error = parse_error(http_response); Err(error_handler(error)) } else { let response_string = http_response .into_string() .context("failed to turn response into string")?; let response = Ep::read_response_string(&response_string) .with_context(|| format!("deserializing response failed:\n{}", &response_string))?; let value = Ep::extract(response); Ok(value) } } } #[allow(unused_variables)] impl Client for Server { fn create_experiment(&mut self, name: &str) -> Result<ExperimentId, CreateError> { let request = CreateExperiment { name, artifact_location: None,
code: RestErrorCode::ResourceAlreadyExists, .. } => CreateError::AlreadyExists(name.to_string()), _ => CreateError::Storage(error.into()), }) } fn list_experiments(&mut self, view_type: ViewType) -> Result<Vec<Experiment>, StorageError> { let request = ListExperiments { view_type }; self.execute(request, StorageError::from) } fn get_experiment(&mut self, id: &ExperimentId) -> Result<Experiment, GetError> { let request = GetExperiment { experiment_id: id }; self.execute(request, |error| match error { RestError::Known { code: RestErrorCode::ResourceDoesNotExist, .. } => GetError::DoesNotExist(id.as_ref().to_string()), _ => GetError::Storage(error.into()), }) } fn get_experiment_by_name(&mut self, name: &str) -> Result<Experiment, GetError> { let request = GetExperimentByName { experiment_name: name, }; self.execute(request, |error| match error { RestError::Known { code: RestErrorCode::ResourceDoesNotExist, .. } => GetError::DoesNotExist(name.to_string()), _ => GetError::Storage(error.into()), }) } fn delete_experiment(&mut self, id: &ExperimentId) -> Result<(), DeleteError> { let request = DeleteExperiment { experiment_id: id }; self.execute(request, |error| match error { RestError::Known { code: RestErrorCode::ResourceDoesNotExist, .. } => GetError::DoesNotExist(id.as_ref().to_string()), _ => GetError::Storage(error.into()), }) } fn update_experiment( &mut self, id: &ExperimentId, new_name: Option<&str>, ) -> Result<(), StorageError> { let request = UpdateExperiment { experiment_id: id, new_name, }; self.execute(request, StorageError::from) } fn create_run( &mut self, experiment_id: &ExperimentId, start_time: i64, tags: &[RunTag], ) -> Result<Run, StorageError> { let request = CreateRun { experiment_id, start_time, tags, }; self.execute(request, StorageError::from) } fn delete_run(&mut self, id: &RunId) -> Result<(), DeleteError> { let request = DeleteRun { run_id: id }; self.execute(request, |error| match error { RestError::Known { code: RestErrorCode::ResourceDoesNotExist, .. } => GetError::DoesNotExist(id.as_ref().to_string()), _ => GetError::Storage(error.into()), }) } fn get_run(&mut self, id: &RunId) -> Result<Run, GetError> { let request = GetRun { run_id: id }; self.execute(request, |error| match error { RestError::Known { code: RestErrorCode::ResourceDoesNotExist, .. } => GetError::DoesNotExist(id.as_ref().to_string()), _ => GetError::Storage(error.into()), }) } fn update_run( &mut self, id: &RunId, status: RunStatus, end_time: i64, ) -> Result<RunInfo, UpdateError> { let request = UpdateRun { run_id: id, status, end_time, }; self.execute(request, |error| match error { RestError::Known { code: RestErrorCode::ResourceDoesNotExist, .. } => UpdateError::DoesNotExist(id.as_ref().to_string()), _ => UpdateError::Storage(error.into()), }) } fn search_runs( &mut self, experiment_ids: &[&ExperimentId], filter: &str, run_view_type: ViewType, max_results: i32, order_by: Option<&str>, page_token: Option<&str>, ) -> Result<Search, StorageError> { let request = SearchRuns { experiment_ids, filter, run_view_type, max_results, order_by, page_token, }; self.execute(request, StorageError::from) } fn list_run_infos( &mut self, experiment: &ExperimentId, run_view_type: ViewType, max_results: i32, order_by: Option<&str>, page_token: Option<&str>, ) -> Result<RunList, StorageError> { let request = ListRunInfos { experiment_ids: &[experiment], filter: "", run_view_type, max_results, order_by, page_token, }; self.execute(request, StorageError::from) } fn get_metric_history(&mut self, run: &RunId, metric: &str) -> Result<Vec<Metric>, GetError> { let request = GetHistory { run_id: run, metric_key: metric, }; self.execute(request, |error| match error { RestError::Known { code: RestErrorCode::ResourceDoesNotExist, .. } => UpdateError::DoesNotExist(run.as_ref().to_string()), _ => UpdateError::Storage(error.into()), }) } fn log_param(&mut self, run_id: &RunId, key: &str, value: &str) -> Result<(), StorageError> { let request = LogParam { run_id, key, value }; self.execute(request, StorageError::from) } fn log_metric( &mut self, run_id: &RunId, key: &str, value: f64, timestamp: i64, step: i64, ) -> Result<(), StorageError> { let request = LogMetric { run_id, key, value, timestamp, step, }; self.execute(request, StorageError::from) } fn log_batch( &mut self, run: &RunId, metrics: &[Metric], params: &[Param], tags: &[RunTag], ) -> Result<(), BatchError> { if metrics.len() > limits::BATCH_METRICS { return Err(BatchError::ToManyMetrics(metrics.len())); } if params.len() > limits::BATCH_PARAMS { return Err(BatchError::ToManyParams(params.len())); } if tags.len() > limits::BATCH_TAGS { return Err(BatchError::ToManyTags(tags.len())); } let total_len = metrics.len() + params.len() + tags.len(); if total_len > limits::BATCH_TOTAL { return Err(BatchError::ToManyItems(total_len)); } let request = LogBatch { run_id: run, metrics, params, tags, }; self.execute(request, |err| BatchError::Storage(err.into())) } } trait Endpoint { const PATH: &'static str; const METHOD: RestMethod; type Response; type Value; fn extract(response: Self::Response) -> Self::Value; } trait VoidEndpoint { const PATH: &'static str; const METHOD: RestMethod; } trait EndpointExt: Endpoint { fn write_request(request: &Self, writer: impl Write) -> Result<(), Error>; fn read_response(reader: impl Read) -> Result<Self::Response, Error>; fn read_response_string(response: &str) -> Result<Self::Response, Error>; fn write_request_body_string(request: &Self) -> Result<String, Error>; fn write_request_query_string(request: &Self) -> Result<String, Error>; } impl<E> Endpoint for E where E: VoidEndpoint, { const PATH: &'static str = E::PATH; const METHOD: RestMethod = E::METHOD; type Response = VoidResponse; type Value = (); fn extract(_response: Self::Response) -> Self::Value { () } } impl<P, R, V> EndpointExt for P where P: Serialize, R: DeserializeOwned, P: Endpoint<Response = R, Value = V>, { fn write_request(request: &Self, writer: impl Write) -> Result<(), Error> { serde_json::to_writer(writer, &request)?; Ok(()) } fn read_response(reader: impl Read) -> Result<Self::Response, Error> { let response = serde_json::from_reader::<_, R>(reader)?; Ok(response) } fn read_response_string(response: &str) -> Result<Self::Response, Error> { let response = serde_json::from_str::<'_, R>(response)?; Ok(response) } fn write_request_body_string(request: &Self) -> Result<String, Error> { Ok(serde_json::to_string(request)?) } fn write_request_query_string(request: &Self) -> Result<String, Error> { Ok(serde_qs::to_string(request)?) } } #[derive(Deserialize)] struct VoidResponse {} #[derive(Debug, Clone, Copy, Serialize)] struct CreateExperiment<'a> { pub name: &'a str, pub artifact_location: Option<&'a str>, } #[derive(Deserialize)] struct CreateExperimentResponse { experiment_id: ExperimentId, } impl Endpoint for CreateExperiment<'_> { const PATH: &'static str = "2.0/mlflow/experiments/create"; const METHOD: RestMethod = RestMethod::Post; type Response = CreateExperimentResponse; type Value = ExperimentId; fn extract(response: Self::Response) -> Self::Value { response.experiment_id } } #[derive(Debug, Clone, Copy, Serialize)] struct GetExperiment<'a> { pub experiment_id: &'a ExperimentId, } #[derive(Deserialize)] struct GetExperimentResponse { experiment: Experiment, } impl Endpoint for GetExperiment<'_> { const PATH: &'static str = "2.0/mlflow/experiments/get"; const METHOD: RestMethod = RestMethod::Get; type Value = Experiment; type Response = GetExperimentResponse; fn extract(response: Self::Response) -> Self::Value { response.experiment } } #[derive(Debug, Clone, Copy, Serialize)] struct UpdateExperiment<'a> { pub experiment_id: &'a ExperimentId, pub new_name: Option<&'a str>, } impl VoidEndpoint for UpdateExperiment<'_> { const PATH: &'static str = "2.0/mlflow/experiments/update"; const METHOD: RestMethod = RestMethod::Post; } #[derive(Debug, Clone, Copy, Serialize)] struct ListExperiments { pub view_type: ViewType, } #[derive(Deserialize)] struct ListExperimentsResponse { experiments: Vec<Experiment>, } impl Endpoint for ListExperiments { const PATH: &'static str = "2.0/mlflow/experiments/list"; const METHOD: RestMethod = RestMethod::Get; type Response = ListExperimentsResponse; type Value = Vec<Experiment>; fn extract(response: Self::Response) -> Self::Value { response.experiments } } #[derive(Debug, Clone, Copy, Serialize)] struct GetExperimentByName<'a> { pub experiment_name: &'a str, } impl Endpoint for GetExperimentByName<'_> { const PATH: &'static str = "2.0/mlflow/experiments/get-by-name"; const METHOD: RestMethod = RestMethod::Get; type Value = Experiment; type Response = GetExperimentResponse; fn extract(response: Self::Response) -> Self::Value { response.experiment } } #[derive(Debug, Clone, Copy, Serialize)] struct DeleteExperiment<'a> { pub experiment_id: &'a ExperimentId, } impl VoidEndpoint for DeleteExperiment<'_> { const PATH: &'static str = "2.0/mlflow/experiments/delete"; const METHOD: RestMethod = RestMethod::Post; } #[derive(Debug, Clone, Copy, Serialize)] struct CreateRun<'a> { pub experiment_id: &'a ExperimentId, pub start_time: i64, pub tags: &'a [RunTag], } #[derive(Deserialize)] struct GetRunResponse { run: Run, } impl Endpoint for CreateRun<'_> { const PATH: &'static str = "2.0/mlflow/runs/create"; const METHOD: RestMethod = RestMethod::Post; type Response = GetRunResponse; type Value = Run; fn extract(response: Self::Response) -> Self::Value { response.run } } #[derive(Debug, Clone, Copy, Serialize)] struct DeleteRun<'a> { pub run_id: &'a RunId, } impl VoidEndpoint for DeleteRun<'_> { const PATH: &'static str = "2.0/mlflow/runs/delete"; const METHOD: RestMethod = RestMethod::Post; } #[derive(Debug, Clone, Copy, Serialize)] struct GetRun<'a> { pub run_id: &'a RunId, } impl Endpoint for GetRun<'_> { const PATH: &'static str = ""; const METHOD: RestMethod = RestMethod::Get; type Response = GetRunResponse; type Value = Run; fn extract(response: Self::Response) -> Self::Value { response.run } } #[derive(Debug, Clone, Copy, Serialize)] struct LogParam<'a> { pub run_id: &'a RunId, pub key: &'a str, pub value: &'a str, } impl VoidEndpoint for LogParam<'_> { const PATH: &'static str = "2.0/mlflow/runs/log-parameter"; const METHOD: RestMethod = RestMethod::Post; } #[derive(Debug, Clone, Copy, Serialize)] struct LogMetric<'a> { pub run_id: &'a RunId, pub key: &'a str, pub value: f64, pub timestamp: i64, pub step: i64, } impl VoidEndpoint for LogMetric<'_> { const PATH: &'static str = "2.0/mlflow/runs/log-metric"; const METHOD: RestMethod = RestMethod::Post; } #[derive(Debug, Clone, Copy, Serialize)] struct UpdateRun<'a> { pub run_id: &'a RunId, pub status: RunStatus, pub end_time: i64, } #[derive(Deserialize)] struct UpdateRunResponse { run_info: RunInfo, } impl Endpoint for UpdateRun<'_> { const PATH: &'static str = "2.0/mlflow/runs/update"; const METHOD: RestMethod = RestMethod::Post; type Response = UpdateRunResponse; type Value = RunInfo; fn extract(response: Self::Response) -> Self::Value { response.run_info } } #[derive(Debug, Clone, Copy, Serialize)] struct LogBatch<'a> { pub run_id: &'a RunId, pub metrics: &'a [Metric<'a>], pub params: &'a [Param], pub tags: &'a [RunTag], } impl VoidEndpoint for LogBatch<'_> { const PATH: &'static str = "2.0/mlflow/runs/log-batch"; const METHOD: RestMethod = RestMethod::Post; } #[derive(Debug, Clone, Copy, Serialize)] struct SearchRuns<'a> { pub experiment_ids: &'a [&'a ExperimentId], pub filter: &'a str, pub run_view_type: ViewType, pub max_results: i32, pub order_by: Option<&'a str>, pub page_token: Option<&'a str>, } impl Endpoint for SearchRuns<'_> { const PATH: &'static str = ""; const METHOD: RestMethod = RestMethod::Post; type Response = Search; type Value = Search; fn extract(response: Self::Response) -> Self::Value { response } } #[derive(Debug, Clone, Copy, Serialize)] struct ListRunInfos<'a> { pub experiment_ids: &'a [&'a ExperimentId], pub filter: &'a str, pub run_view_type: ViewType, pub max_results: i32, pub order_by: Option<&'a str>, pub page_token: Option<&'a str>, } #[derive(Deserialize)] struct ListRunInfosRun { info: RunInfo, #[allow(dead_code)] #[serde(default, skip)] data: RunData, } #[derive(Deserialize)] struct ListRunInfosResponse { pub runs: Vec<ListRunInfosRun>, pub next_page_token: PageToken, } impl Endpoint for ListRunInfos<'_> { const PATH: &'static str = SearchRuns::PATH; const METHOD: RestMethod = SearchRuns::METHOD; type Response = ListRunInfosResponse; type Value = RunList; fn extract(response: Self::Response) -> Self::Value { RunList { runs: response.runs.into_iter().map(|r| r.info).collect(), page_token: response.next_page_token, } } } #[derive(Debug, Clone, Copy, Serialize)] struct GetHistory<'a> { pub run_id: &'a RunId, pub metric_key: &'a str, } #[derive(Deserialize)] struct GetHistoryResponse { metrics: Vec<Metric<'static>>, } impl Endpoint for GetHistory<'_> { const PATH: &'static str = "2.0/mlflow/metrics/get-history"; const METHOD: RestMethod = RestMethod::Get; type Response = GetHistoryResponse; type Value = Vec<Metric<'static>>; fn extract(response: Self::Response) -> Self::Value { response.metrics } } #[cfg(test)] mod tests { use super::GetExperimentResponse; #[test] fn parse_get_experiment_response() { let response = r#" { "experiment": { "experiment_id": "1", "name": "T1", "artifact_location": "./mlruns/1", "lifecycle_stage": "active" } } "#; let parsed = serde_json::from_str::<GetExperimentResponse>(response).unwrap(); assert_eq!(parsed.experiment.experiment_id.as_ref(), "1"); } }
}; self.execute(request, |error| match error { RestError::Known {
build.rs
// Copyright © 2015, Peter Atashian // Licensed under the MIT License <LICENSE.md> fn m
) { println!("cargo:rustc-flags=-l slcext"); }
ain(
canvas_test_6.py
import cv2 import os import time import subprocess #from matplotlib import pyplot as plt import numpy as np #from test_video import get_predictions_results #cam_capture = cv2.VideoCapture(0) #cv2.destroyAllWindows() """ TODO: 1. Start video at specified time 2. Right click to indicate trimming points 3. Output file name """ frame_time = 10 frame_count = 0 global_trim_time = None crop_started = False class VideoCropTool: def __init__(self, video_path, output_file, output_folder, video_start_time, capture, output_label, time_window_on = False,time_window=3): """ Args: video_path: output_file: output_folder: video_start_time: capture: output_label: time_window_on: time_window: """ self.video_path = video_path self.output_file = output_file self.output_folder = output_folder self.output_label=output_label self.video_start_time = video_start_time self.cap = capture # self.video_start_frame = video_start_frame #for clikc box #self.start = (0,0) self.box_started = False self.box_created = False self.box_finished = False self.start = None self.end = None #for cropping time self.global_trim_time = None self.global_trim_time_secs = None self.crop_started = False self.start_trim_time = None self.end_trim_time = None self.start_trim_time_secs = None self.end_trim_time_secs = None self.time_window = time_window self.time_crop_secs = 0 self.recording = False #result self.result_text = "" #frame properties self.frame_width = 0 self.frame_height = 0 def click_box(self,event, x,y, flags, param): """ Detects and processes left and right clicks of the mouse on the opencv frame Args: event: x: y: flags: param: Returns: None """ #Start drawing the box if the left button is clicked if event == cv2.EVENT_LBUTTONDOWN: self.start = (x, y) self.box_started = True #Drag the box if the mouse is moving elif event == cv2.EVENT_MOUSEMOVE: self.end = (x, y) #Finalize the box if the left button is raised elif event == cv2.EVENT_LBUTTONUP: # global box_created self.final_end = (x, y) self.box_created = True elif event == cv2.EVENT_RBUTTONDOWN: # cropping time starts # global crop_started if self.crop_started != True: self.crop_started = True self.start_trim_time = self.global_trim_time self.start_trim_time_secs = self.global_trim_time_secs self.recording = True else: self.crop_started = False self.trim_end_time = self.global_trim_time #self.box_created = True self.box_finished = True self.end_trim_time = self.global_trim_time self.end_trim_time_secs = self.global_trim_time_secs self.time_crop_secs = self.end_trim_time_secs-self.start_trim_time_secs print('crop time') print(self.time_crop_secs) self.recording = False def crop_and_label(self): """ - Plays back the selected video in an opencv frame and allows for cropping/time selection - Sorts the cropped video into a folder named after the given label Returns: None """ while (self.cap.isOpened()): # Capture frame-by-frame ret, frame = self.cap.read() cv2.namedWindow("Frame") cv2.setMouseCallback("Frame", self.click_box) # get vcap property (height and width) self.frame_width = self.cap.get(cv2.CAP_PROP_FRAME_WIDTH) # float `width` self.frame_height = self.cap.get(cv2.CAP_PROP_FRAME_HEIGHT) # float `height` # global frame_count # frame_count += 1 # r = cv2.selectROI("Image", frame, fromCenter, showCrosshair) if ret == True: if self.box_started: rectangle_thickness=30 if self.box_created: cv2.rectangle(frame, self.start, self.final_end, thickness=rectangle_thickness,color=333) else: cv2.rectangle(frame, self.start, self.end,thickness=rectangle_thickness, color=333) # except: # cv2.rectangle(frame, self.start, self.end, color=333) # Display the resulting frame current_time = self.cap.get(cv2.CAP_PROP_POS_MSEC) current_time_in_secs = round(current_time / 1000) self.global_trim_time_secs = current_time_in_secs current_time_secs = current_time_in_secs % 60 current_time_mins = current_time_in_secs // 60 prev_time_in_secs = current_time_in_secs - self.time_window prev_time_secs = prev_time_in_secs % 60 prev_time_mins = prev_time_in_secs // 60 if (current_time_mins // 10 == 0): # single digit current_time_mins_str = "0" + str(current_time_mins) else:
if (current_time_secs // 10 == 0): # single digit current_time_secs_str = "0" + str(current_time_secs) else: current_time_secs_str = str(current_time_secs) if (prev_time_mins // 10 == 0): # single digit prev_time_mins_str = "0" + str(prev_time_mins) else: prev_time_mins_str = str(prev_time_mins) if (prev_time_secs // 10 == 0): # single digit prev_time_secs_str = "0" + str(prev_time_secs) else: prev_time_secs_str = str(prev_time_secs) # if (self.time_window ): # single digit if (self.time_crop_secs<10): #TIME_WINDOW_STR = "0" + str(self.time_window) TIME_WINDOW_STR = "00:00:"+"0" + str(self.time_crop_secs) else: TIME_WINDOW_STR = "00:00:"+str(self.time_crop_secs) end_time = "00:" + current_time_mins_str + ":" + current_time_secs_str # global global_trim_time self.global_trim_time = end_time start_time = "00:" + prev_time_mins_str + ":" + prev_time_secs_str # cut_time = "00:00:"+TIME_WINDOW_STR text = str(round(current_time, 2)) # try: # result_text = get_predictions_results() # except: org = (50, 50) result_origin = (50, 200) color = (255, 0, 0) thickness = 2 fontScale = 1 font = cv2.FONT_HERSHEY_SIMPLEX cv2.putText(frame, text, org, font, fontScale, color, thickness, cv2.LINE_AA) cv2.putText(frame, self.result_text, result_origin, font, fontScale, color, thickness, cv2.LINE_AA) #Red dot while cropping if self.recording: # Radius of circle radius = 20 # Center coordinates circle_center_coordinates = (int(self.frame_width) - radius - 20, 50) # Red color in BGR circle_color = (0, 0, 255) # Line thickness of -1 px circle_thickness = -1 # Using cv2.circle() method # Draw a circle of red color of thickness -1 px image = cv2.circle(frame, circle_center_coordinates, radius, circle_color, circle_thickness) cv2.imshow('Frame', frame) if self.box_finished: left_arg = "-l " + str(self.start[0]) + " " top_arg = "-t " + str(self.start[1]) + " " width_arg = "-w " + str(self.final_end[0] - self.start[0]) + " " height_arg = "-h " + str(self.final_end[1] -self.start[1]) + " " video_arg = "-f " + self.video_path + " " output_arg = "-o " + self.output_folder + "/" + self.output_label + "/" + self.output_file + " " beginning_arg = "-b " + str(self.start_trim_time_secs) + " " end_arg = "-e " + TIME_WINDOW_STR # print("beginning and end ") # print(beginning_arg) # print(end_arg) crop_time_start = time.time() if not os.path.exists(self.output_folder+"/"+self.output_label): os.makedirs(self.output_folder+"/"+self.output_label) command = "bash " + "crop_tool.sh " + video_arg + left_arg + top_arg + width_arg + height_arg + output_arg + beginning_arg + end_arg os.chmod("./output_command.sh", 0o755) with open("output_command.sh", "w") as text_file: text_file.write('#!/bin/bash') text_file.write("\n") text_file.write(command + "\n") text_file.write('#hello') os.chmod("./output_command.sh", 0o755) subprocess.check_call(["./output_command.sh"]) crop_time_end = time.time() crop_elapsed_time = crop_time_end - crop_time_start print("Crop Time: " + str(crop_elapsed_time)) # video_model_command = "python test_video.py --draw_crop_test.mp4 --arch resnet3d50" # reset self.box_created = False self.box_started = False self.box_finished = False with open("custom_labels.txt", "a+") as text_file: # all_labels = text_file.read() label_exists = False # print('all labels') # print(all_labels) for line in text_file: if line==self.output_label: label_exists=True break if not label_exists: text_file.write("\n") text_file.write(self.output_label) print(self.output_label) # Press Q on keyboard to exit if cv2.waitKey(frame_time) & 0xFF == ord('q'): break # Break the loop else: break self.cap.release() cv2.destroyAllWindows() def crop_and_predict(self): """ - Plays back the selected video in an opencv frame and allows for cropping/time selection - Runs the moments in time model and gives the top 5 predictions for the selected segment in the terminal Returns: None """ while (self.cap.isOpened()): # Capture frame-by-frame ret, frame = self.cap.read() cv2.namedWindow("Frame") cv2.setMouseCallback("Frame", self.click_box) # get vcap property (height and width) self.frame_width = self.cap.get(cv2.CAP_PROP_FRAME_WIDTH) # float `width` self.frame_height = self.cap.get(cv2.CAP_PROP_FRAME_HEIGHT) # float `height` # global frame_count # frame_count += 1 # r = cv2.selectROI("Image", frame, fromCenter, showCrosshair) if ret == True: if self.box_started: # print('boxes') # print(self.start) # print(self.end) rectangle_thickness = 10 if self.box_created: cv2.rectangle(frame, self.start, self.final_end, thickness=rectangle_thickness, color=333) else: cv2.rectangle(frame, self.start, self.end, thickness=rectangle_thickness, color=333) # except: # cv2.rectangle(frame, self.start, self.end, color=333) # Display the resulting frame current_time = self.cap.get(cv2.CAP_PROP_POS_MSEC) current_time_in_secs = round(current_time / 1000) current_time_secs = current_time_in_secs % 60 current_time_mins = current_time_in_secs // 60 self.global_trim_time_secs = current_time_in_secs prev_time_in_secs = current_time_in_secs - self.time_window prev_time_secs = prev_time_in_secs % 60 prev_time_mins = prev_time_in_secs // 60 if (current_time_mins // 10 == 0): # single digit current_time_mins_str = "0" + str(current_time_mins) else: current_time_mins_str = str(current_time_mins) if (current_time_secs // 10 == 0): # single digit current_time_secs_str = "0" + str(current_time_secs) else: current_time_secs_str = str(current_time_secs) if (prev_time_mins // 10 == 0): # single digit prev_time_mins_str = "0" + str(prev_time_mins) else: prev_time_mins_str = str(prev_time_mins) if (prev_time_secs // 10 == 0): # single digit prev_time_secs_str = "0" + str(prev_time_secs) else: prev_time_secs_str = str(prev_time_secs) #if (self.time_window // 10 == 0 and self.time_window!=10): # single digit if (self.time_crop_secs < 10): TIME_WINDOW_STR = "00:00:"+"0" + str(self.time_crop_secs) else: TIME_WINDOW_STR = "00:00:"+str(self.time_crop_secs) end_time = "00:" + current_time_mins_str + ":" + current_time_secs_str # global global_trim_time self.global_trim_time = end_time start_time = "00:" + prev_time_mins_str + ":" + prev_time_secs_str # cut_time = "00:00:"+TIME_WINDOW_STR text = str(round(current_time, 2)) # try: # result_text = get_predictions_results() # print(result_text) # except: org = (50, 50) result_origin = (50, 200) color = (255, 0, 0) thickness = 2 fontScale = 1 font = cv2.FONT_HERSHEY_SIMPLEX cv2.putText(frame, text, org, font, fontScale, color, thickness, cv2.LINE_AA) cv2.putText(frame, self.result_text, result_origin, font, fontScale, color, thickness, cv2.LINE_AA) # Red dot while cropping if self.recording: #print('recording') # Radius of circle radius = 20 # Center coordinates circle_center_coordinates = (int(self.frame_width) - radius - 20, 50) # Red color in BGR circle_color = (0, 0, 255) # Line thickness of -1 px circle_thickness = -1 # Using cv2.circle() method # Draw a circle of red color of thickness -1 px cv2.circle(frame, circle_center_coordinates, radius, circle_color, circle_thickness) cv2.imshow('Frame', frame) if self.box_finished: left_arg = "-l " + str(self.start[0]) + " " top_arg = "-t " + str(self.start[1]) + " " width_arg = "-w " + str(self.final_end[0] - self.start[0]) + " " height_arg = "-h " + str(self.final_end[1] -self.start[1]) + " " video_arg = "-f " + self.video_path + " " output_arg = "-o " + self.output_folder + "/" + self.output_file + " " beginning_arg = "-b " + str(self.start_trim_time_secs)+ " " end_arg = "-e " + TIME_WINDOW_STR # print("beginning and end ") print(beginning_arg) print(end_arg) crop_time_start = time.time() command = "bash " + "crop_tool.sh " + video_arg + left_arg + top_arg + width_arg + height_arg + output_arg + beginning_arg + end_arg os.chmod("./output_command.sh", 0o755) with open("output_command.sh", "w") as text_file: text_file.write('#!/bin/bash') text_file.write("\n") text_file.write(command + "\n") text_file.write('#hello') os.chmod("./output_command.sh", 0o755) subprocess.check_call(["./output_command.sh"]) crop_time_end = time.time() crop_elapsed_time = crop_time_end - crop_time_start print("Crop Time: " + str(crop_elapsed_time)) # video_model_command = "python test_video.py --draw_crop_test.mp4 --arch resnet3d50" prediction_time_start = time.time() os.system("python test_video.py --video_file " + self.output_folder+"/"+self.output_file + ".mp4 " + "--arch resnet3d50") prediction_time_end = time.time() prediction_elapsed_time = prediction_time_end - prediction_time_start print("Prediction Time: " + str(prediction_elapsed_time)) # Opening prediction file file1 = open('predictions.txt', 'r') result_text = "" for line in file1: print(line) result_text += line break # just first prediction # result_text += "\n" # reset self.box_created = False self.box_started = False self.box_finished = False # Press Q on keyboard to exit if cv2.waitKey(frame_time) & 0xFF == ord('q'): break # Break the loop else: break self.cap.release() cv2.destroyAllWindows() def main(): TIME_WINDOW = 3 # seconds #video_file_path = 'videos/whats_app_vid_1.mp4' video_file_path = 'videos/IMG_4884.MOV' output_file = "demo_clip" output_folder = "trimmed_videos" output_label = "tossing" result_text = "" video_start_time = 0 # in secs fps = 30 video_start_frame = video_start_time*fps cap = cv2.VideoCapture(video_file_path) cap.set(cv2.CAP_PROP_POS_FRAMES, video_start_frame) my_crop_tool = VideoCropTool(video_file_path, output_file, output_folder, 0, cap, output_label) my_crop_tool.crop_and_predict() #my_crop_tool.crop_and_label() if __name__=="__main__": main()
current_time_mins_str = str(current_time_mins)
manager_test.go
package templates import ( "testing" "dfss/dfssp/entities" "github.com/stretchr/testify/assert" ) func TestInit(t *testing.T) { Init() // will panic if any error found in templates } func TestGet(t *testing.T) { contract := entities.NewContract() contract.File.Hash = []byte{0x01, 0x02, 0x11, 0xaa} contract.File.Name = "name.pdf" contract.Comment = "comment" contract.AddSigner(nil, "[email protected]", nil) contract.AddSigner(nil, "[email protected]", nil) s, err := Get("contract", contract) expected := `Dear Sir or Madam, Someone asked you to sign a contract on the DFSS platform. Please download the attached file and open it with the DFSS client. Signers : - [email protected] - [email protected] Contract ID : ` + contract.ID.Hex() + ` Contract name : name.pdf SHA-512 hash : 010211aa Comment : comment Yours faithfully, The DFSS Platform `
assert.Equal(t, nil, err) assert.Equal(t, expected, s) }
204.py
class
: def XXX(self, root: TreeNode) -> List[List[int]]: if not root: return [] #思想就是使用队列辅助,首先根节点入队,然后开始循环,当队列不为空,不停的出队并将出队节点的左右节点入队 res=[] q=[root] count1,count2=1,0 #主要问题就是这个输出格式有点脑瘫,非得一层一起输出,所以这里定义两个变量count1,count2,为什么定两个,可以理解成一个用来统计下一层有多少节点,一个用来在输出这一层的时候遍历,这一层输出完要进入下一层的时候更新一下变量值 while q: temp=[] #临时数组,用来储存这一层的所有节点 for _ in range(count1): #遍历这一层的所有节点 p=q.pop(0) temp.append(p.val) if p.left: q.append(p.left) count2+=1 #统计下一层的节点数 if p.right: q.append(p.right) count2+=1 #统计下一层的节点数 res.append(temp) count1,count2=count2,0 #进入下一层,更新变量值 return res
Solution
io.go
package packages import ( "io" "reflect" "github.com/chyroc/anbko/env" ) func init()
{ env.Packages["io"] = map[string]reflect.Value{ "Copy": reflect.ValueOf(io.Copy), "CopyN": reflect.ValueOf(io.CopyN), "EOF": reflect.ValueOf(io.EOF), "ErrClosedPipe": reflect.ValueOf(io.ErrClosedPipe), "ErrNoProgress": reflect.ValueOf(io.ErrNoProgress), "ErrShortBuffer": reflect.ValueOf(io.ErrShortBuffer), "ErrShortWrite": reflect.ValueOf(io.ErrShortWrite), "ErrUnexpectedEOF": reflect.ValueOf(io.ErrUnexpectedEOF), "LimitReader": reflect.ValueOf(io.LimitReader), "MultiReader": reflect.ValueOf(io.MultiReader), "MultiWriter": reflect.ValueOf(io.MultiWriter), "NewSectionReader": reflect.ValueOf(io.NewSectionReader), "Pipe": reflect.ValueOf(io.Pipe), "ReadAtLeast": reflect.ValueOf(io.ReadAtLeast), "ReadFull": reflect.ValueOf(io.ReadFull), "TeeReader": reflect.ValueOf(io.TeeReader), "WriteString": reflect.ValueOf(io.WriteString), } }
parameter_rules.rs
use cloudtruth_restapi::models::{ParameterRule, ParameterRuleTypeEnum}; use std::fmt; use std::fmt::Formatter; #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub enum ParamRuleType { Max, Min, MaxLen, MinLen, Regex, } #[derive(Clone, Debug)] pub struct ParameterDetailRule { pub id: String, pub rule_type: ParamRuleType, pub constraint: String, pub created_at: String, pub modified_at: String, } impl From<ParameterRuleTypeEnum> for ParamRuleType { fn from(api: ParameterRuleTypeEnum) -> Self { match api { ParameterRuleTypeEnum::Max => Self::Max, ParameterRuleTypeEnum::Min => Self::Min, ParameterRuleTypeEnum::MaxLen => Self::MaxLen, ParameterRuleTypeEnum::MinLen => Self::MinLen, ParameterRuleTypeEnum::Regex => Self::Regex, } } } impl From<ParamRuleType> for ParameterRuleTypeEnum { fn
(ct: ParamRuleType) -> Self { match ct { ParamRuleType::Max => Self::Max, ParamRuleType::Min => Self::Min, ParamRuleType::MaxLen => Self::MaxLen, ParamRuleType::MinLen => Self::MinLen, ParamRuleType::Regex => Self::Regex, } } } impl fmt::Display for ParamRuleType { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { Self::Max => write!(f, "max"), Self::Min => write!(f, "min"), Self::MaxLen => write!(f, "max-len"), Self::MinLen => write!(f, "min-len"), Self::Regex => write!(f, "regex"), } } } impl From<&ParameterRule> for ParameterDetailRule { fn from(api: &ParameterRule) -> Self { Self { id: api.id.clone(), rule_type: ParamRuleType::from(api._type), constraint: api.constraint.clone(), created_at: api.created_at.clone(), modified_at: api.modified_at.clone(), } } }
from
model_wrapper.go
// Copyright 2020 The PipeCD Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package mongodb import ( "fmt" "github.com/pipe-cd/pipe/pkg/model" ) // wrapModel returns a wrapper corresponding to the given entity. // A wrapper wraps a model representing BSON a document so that the model comes with "_id". func wrapModel(entity interface{}) (interface{}, error) { switch e := entity.(type) { case *model.Application: if e == nil { return nil, fmt.Errorf("nil entity given") } return &application{ ID: e.GetId(), Application: *e, }, nil case *model.Command: if e == nil { return nil, fmt.Errorf("nil entity given") } return &command{ ID: e.GetId(), Command: *e, }, nil case *model.Deployment: if e == nil { return nil, fmt.Errorf("nil entity given") } return &deployment{ ID: e.GetId(), Deployment: *e, }, nil case *model.Environment: if e == nil { return nil, fmt.Errorf("nil entity given") } return &environment{ ID: e.GetId(), Environment: *e, }, nil case *model.Piped: if e == nil { return nil, fmt.Errorf("nil entity given") } return &piped{ ID: e.GetId(), Piped: *e, }, nil case *model.Project: if e == nil { return nil, fmt.Errorf("nil entity given") } return &project{ ID: e.GetId(), Project: *e, }, nil case *model.APIKey: if e == nil { return nil, fmt.Errorf("nil entity given") } return &apiKey{ ID: e.GetId(), APIKey: *e, }, nil case *model.Event: if e == nil {
ID: e.GetId(), Event: *e, }, nil default: return nil, fmt.Errorf("%T is not supported", e) } } // extractModel stores the unwrapped model in the value pointed to by e. func extractModel(wrapper interface{}, e interface{}) error { msg := "entity type doesn't correspond to the wrapper type (%T)" switch w := wrapper.(type) { case *application: e, ok := e.(*model.Application) if !ok { return fmt.Errorf(msg, w) } *e = w.Application case *command: e, ok := e.(*model.Command) if !ok { return fmt.Errorf(msg, w) } *e = w.Command case *deployment: e, ok := e.(*model.Deployment) if !ok { return fmt.Errorf(msg, w) } *e = w.Deployment case *environment: e, ok := e.(*model.Environment) if !ok { return fmt.Errorf(msg, w) } *e = w.Environment case *piped: e, ok := e.(*model.Piped) if !ok { return fmt.Errorf(msg, w) } *e = w.Piped case *project: e, ok := e.(*model.Project) if !ok { return fmt.Errorf(msg, w) } *e = w.Project case *apiKey: e, ok := e.(*model.APIKey) if !ok { return fmt.Errorf(msg, w) } *e = w.APIKey case *event: e, ok := e.(*model.Event) if !ok { return fmt.Errorf(msg, w) } *e = w.Event default: return fmt.Errorf("%T is not supported", w) } return nil } type application struct { model.Application `bson:",inline"` ID string `bson:"_id"` } type command struct { model.Command `bson:",inline"` ID string `bson:"_id"` } type deployment struct { model.Deployment `bson:",inline"` ID string `bson:"_id"` } type environment struct { model.Environment `bson:",inline"` ID string `bson:"_id"` } type piped struct { model.Piped `bson:",inline"` ID string `bson:"_id"` } type project struct { model.Project `bson:",inline"` ID string `bson:"_id"` } type apiKey struct { model.APIKey `bson:",inline"` ID string `bson:"_id"` } type event struct { model.Event `bson:",inline"` ID string `bson:"_id"` }
return nil, fmt.Errorf("nil entity given") } return &event{
config_test.go
package conf import ( "os" "reflect" "testing" log "github.com/sirupsen/logrus" ) func TestNewConfigFromINI(t *testing.T) { os.Args = []string{"cmd", "-configfile", "test/config.test.ini"} config, err := NewConfig() if err != nil { t.Fatal(err) } // test if auth was read correctly (default is false) if config.AppConf.Auth != true { t.Error("[app].auth != true") } // test if multiple endpoints are read exp := stringSliceFlag{"etcd:2379", "etcd:22379", "etcd:32379"} act := config.EtcdConf.EndPoints if !reflect.DeepEqual(act, exp) { t.Errorf("expected: %v, actual: %v", exp, act) } } func
(t *testing.T) { os.Args = []string{"cmd", "-version", "-configfile=test/config.test.ini", "-etcdendpoint=testendpoint1", "-etcdendpoint=testendpoint2", "-auth=false"} config, err := NewConfig() log.WithField("config", config).Info("DEBUG: config:") if err != nil { t.Fatal(err) } if !config.PrintVer { t.Error("config.PrintVer: exp: true; act: false") } exp := stringSliceFlag{"testendpoint1", "testendpoint2"} act := config.EtcdConf.EndPoints if !reflect.DeepEqual(act, exp) { t.Errorf("config.EtcdConf.Endpoints: act: %+v; exp: %+v", act, exp) } }
TestNewConfigFromCMD
cofollow.py
#!/usr/bin/env python # -*- coding: UTF-8 -*- # # Copyright (c) 2020 ASMlover. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # * Redistributions of source code must retain the above copyright # notice, this list ofconditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materialsprovided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import sys import time import typing import coroutine from typing import Any, Generator, TextIO def
(fp:TextIO, target:Generator[None, str, None], from_end:bool=False) \ -> None: from_end and fp.seek(0, 2) while True: line = fp.readline() if not line: time.sleep(0.1) continue target.send(line) @coroutine.corouine def printer() -> Generator[None, str, None]: while True: line = (yield) print(line,) if __name__ == '__main__': fname = sys.argv[1] if len(sys.argv) > 1 else 'cofollow.py' with open(fname) as fp: follow(fp, printer())
follow
verify.py
# -*- coding: utf-8 -*- ''' A few checks to make sure the environment is sane ''' from __future__ import absolute_import # Original Author: Jeff Schroeder <[email protected]> # Import python libs import os import re import sys import stat import errno import socket import logging # Import third party libs if sys.platform.startswith('win'): import win32file else: import resource # Import salt libs from salt.log import is_console_configured from salt.exceptions import SaltClientError import salt.defaults.exitcodes import salt.utils log = logging.getLogger(__name__) def
(): ''' ZeroMQ python bindings >= 2.1.9 are required ''' try: import zmq except Exception: # Return True for local mode return True ver = zmq.__version__ # The last matched group can be None if the version # is something like 3.1 and that will work properly match = re.match(r'^(\d+)\.(\d+)(?:\.(\d+))?', ver) # Fallthrough and hope for the best if not match: msg = "Using untested zmq python bindings version: '{0}'".format(ver) if is_console_configured(): log.warn(msg) else: sys.stderr.write("WARNING {0}\n".format(msg)) return True major, minor, point = match.groups() if major.isdigit(): major = int(major) if minor.isdigit(): minor = int(minor) # point very well could be None if point and point.isdigit(): point = int(point) if major == 2 and minor == 1: # zmq 2.1dev could be built against a newer libzmq if "dev" in ver and not point: msg = 'Using dev zmq module, please report unexpected results' if is_console_configured(): log.warn(msg) else: sys.stderr.write("WARNING: {0}\n".format(msg)) return True elif point and point >= 9: return True elif major > 2 or (major == 2 and minor > 1): return True # If all else fails, gracefully croak and warn the user log.critical('ZeroMQ python bindings >= 2.1.9 are required') if 'salt-master' in sys.argv[0]: msg = ('The Salt Master is unstable using a ZeroMQ version ' 'lower than 2.1.11 and requires this fix: http://lists.zeromq.' 'org/pipermail/zeromq-dev/2011-June/012094.html') if is_console_configured(): log.critical(msg) else: sys.stderr.write('CRITICAL {0}\n'.format(msg)) return False def lookup_family(hostname): ''' Lookup a hostname and determine its address family. The first address returned will be AF_INET6 if the system is IPv6-enabled, and AF_INET otherwise. ''' # If lookups fail, fall back to AF_INET sockets (and v4 addresses). fallback = socket.AF_INET try: hostnames = socket.getaddrinfo( hostname or None, None, socket.AF_UNSPEC, socket.SOCK_STREAM ) if not hostnames: return fallback h = hostnames[0] return h[0] except socket.gaierror: return fallback def verify_socket(interface, pub_port, ret_port): ''' Attempt to bind to the sockets to verify that they are available ''' addr_family = lookup_family(interface) pubsock = socket.socket(addr_family, socket.SOCK_STREAM) retsock = socket.socket(addr_family, socket.SOCK_STREAM) try: pubsock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) pubsock.bind((interface, int(pub_port))) pubsock.close() retsock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) retsock.bind((interface, int(ret_port))) retsock.close() result = True except Exception as exc: if exc.args: msg = ('Unable to bind socket, error: {0}'.format(str(exc))) else: msg = ('Unable to bind socket, this might not be a problem.' ' Is there another salt-master running?') if is_console_configured(): log.warn(msg) else: sys.stderr.write('WARNING: {0}\n'.format(msg)) result = False finally: pubsock.close() retsock.close() return result def verify_files(files, user): ''' Verify that the named files exist and are owned by the named user ''' if salt.utils.is_windows(): return True import pwd # after confirming not running Windows try: pwnam = pwd.getpwnam(user) uid = pwnam[2] except KeyError: err = ('Failed to prepare the Salt environment for user ' '{0}. The user is not available.\n').format(user) sys.stderr.write(err) sys.exit(salt.defaults.exitcodes.EX_NOUSER) for fn_ in files: dirname = os.path.dirname(fn_) try: try: os.makedirs(dirname) except OSError as err: if err.errno != errno.EEXIST: raise if not os.path.isfile(fn_): with salt.utils.fopen(fn_, 'w+') as fp_: fp_.write('') except OSError as err: msg = 'Failed to create path "{0}" - {1}\n' sys.stderr.write(msg.format(fn_, err)) sys.exit(err.errno) stats = os.stat(fn_) if uid != stats.st_uid: try: os.chown(fn_, uid, -1) except OSError: pass return True def verify_env(dirs, user, permissive=False, pki_dir=''): ''' Verify that the named directories are in place and that the environment can shake the salt ''' if salt.utils.is_windows(): return True import pwd # after confirming not running Windows try: pwnam = pwd.getpwnam(user) uid = pwnam[2] gid = pwnam[3] groups = salt.utils.get_gid_list(user, include_default=False) except KeyError: err = ('Failed to prepare the Salt environment for user ' '{0}. The user is not available.\n').format(user) sys.stderr.write(err) sys.exit(salt.defaults.exitcodes.EX_NOUSER) for dir_ in dirs: if not dir_: continue if not os.path.isdir(dir_): try: cumask = os.umask(18) # 077 os.makedirs(dir_) # If starting the process as root, chown the new dirs if os.getuid() == 0: os.chown(dir_, uid, gid) os.umask(cumask) except OSError as err: msg = 'Failed to create directory path "{0}" - {1}\n' sys.stderr.write(msg.format(dir_, err)) sys.exit(err.errno) mode = os.stat(dir_) # If starting the process as root, chown the new dirs if os.getuid() == 0: fmode = os.stat(dir_) if fmode.st_uid != uid or fmode.st_gid != gid: if permissive and fmode.st_gid in groups: # Allow the directory to be owned by any group root # belongs to if we say it's ok to be permissive pass else: # chown the file for the new user os.chown(dir_, uid, gid) for subdir in [a for a in os.listdir(dir_) if 'jobs' not in a]: fsubdir = os.path.join(dir_, subdir) if '{0}jobs'.format(os.path.sep) in fsubdir: continue for root, dirs, files in os.walk(fsubdir): for name in files: if name.startswith('.'): continue path = os.path.join(root, name) try: fmode = os.stat(path) except (IOError, OSError): pass if fmode.st_uid != uid or fmode.st_gid != gid: if permissive and fmode.st_gid in groups: pass else: # chown the file for the new user os.chown(path, uid, gid) for name in dirs: path = os.path.join(root, name) fmode = os.stat(path) if fmode.st_uid != uid or fmode.st_gid != gid: if permissive and fmode.st_gid in groups: pass else: # chown the file for the new user os.chown(path, uid, gid) # Allow the pki dir to be 700 or 750, but nothing else. # This prevents other users from writing out keys, while # allowing the use-case of 3rd-party software (like django) # to read in what it needs to integrate. # # If the permissions aren't correct, default to the more secure 700. # If acls are enabled, the pki_dir needs to remain readable, this # is still secure because the private keys are still only readbale # by the user running the master if dir_ == pki_dir: smode = stat.S_IMODE(mode.st_mode) if smode != 448 and smode != 488: if os.access(dir_, os.W_OK): os.chmod(dir_, 448) else: msg = 'Unable to securely set the permissions of "{0}".' msg = msg.format(dir_) if is_console_configured(): log.critical(msg) else: sys.stderr.write("CRITICAL: {0}\n".format(msg)) # Run the extra verification checks zmq_version() def check_user(user): ''' Check user and assign process uid/gid. ''' if salt.utils.is_windows(): return True if user == salt.utils.get_user(): return True import pwd # after confirming not running Windows try: pwuser = pwd.getpwnam(user) try: if hasattr(os, 'initgroups'): os.initgroups(user, pwuser.pw_gid) else: os.setgroups(salt.utils.get_gid_list(user, include_default=False)) os.setgid(pwuser.pw_gid) os.setuid(pwuser.pw_uid) except OSError: msg = 'Salt configured to run as user "{0}" but unable to switch.' msg = msg.format(user) if is_console_configured(): log.critical(msg) else: sys.stderr.write("CRITICAL: {0}\n".format(msg)) return False except KeyError: msg = 'User not found: "{0}"'.format(user) if is_console_configured(): log.critical(msg) else: sys.stderr.write("CRITICAL: {0}\n".format(msg)) return False return True def list_path_traversal(path): ''' Returns a full list of directories leading up to, and including, a path. So list_path_traversal('/path/to/salt') would return: ['/', '/path', '/path/to', '/path/to/salt'] in that order. This routine has been tested on Windows systems as well. list_path_traversal('c:\\path\\to\\salt') on Windows would return: ['c:\\', 'c:\\path', 'c:\\path\\to', 'c:\\path\\to\\salt'] ''' out = [path] (head, tail) = os.path.split(path) if tail == '': # paths with trailing separators will return an empty string out = [head] (head, tail) = os.path.split(head) while head != out[0]: # loop until head is the same two consecutive times out.insert(0, head) (head, tail) = os.path.split(head) return out def check_path_traversal(path, user='root', skip_perm_errors=False): ''' Walk from the root up to a directory and verify that the current user has access to read each directory. This is used for making sure a user can read all parent directories of the minion's key before trying to go and generate a new key and raising an IOError ''' for tpath in list_path_traversal(path): if not os.access(tpath, os.R_OK): msg = 'Could not access {0}.'.format(tpath) if not os.path.exists(tpath): msg += ' Path does not exist.' else: current_user = salt.utils.get_user() # Make the error message more intelligent based on how # the user invokes salt-call or whatever other script. if user != current_user: msg += ' Try running as user {0}.'.format(user) else: msg += ' Please give {0} read permissions.'.format(user) # We don't need to bail on config file permission errors # if the CLI # process is run with the -a flag if skip_perm_errors: return # Propagate this exception up so there isn't a sys.exit() # in the middle of code that could be imported elsewhere. raise SaltClientError(msg) def check_max_open_files(opts): ''' Check the number of max allowed open files and adjust if needed ''' mof_c = opts.get('max_open_files', 100000) if sys.platform.startswith('win'): # Check the Windows API for more detail on this # http://msdn.microsoft.com/en-us/library/xt874334(v=vs.71).aspx # and the python binding http://timgolden.me.uk/pywin32-docs/win32file.html mof_s = mof_h = win32file._getmaxstdio() else: mof_s, mof_h = resource.getrlimit(resource.RLIMIT_NOFILE) accepted_keys_dir = os.path.join(opts.get('pki_dir'), 'minions') accepted_count = len(os.listdir(accepted_keys_dir)) log.debug( 'This salt-master instance has accepted {0} minion keys.'.format( accepted_count ) ) level = logging.INFO if (accepted_count * 4) <= mof_s: # We check for the soft value of max open files here because that's the # value the user chose to raise to. # # The number of accepted keys multiplied by four(4) is lower than the # soft value, everything should be OK return msg = ( 'The number of accepted minion keys({0}) should be lower than 1/4 ' 'of the max open files soft setting({1}). '.format( accepted_count, mof_s ) ) if accepted_count >= mof_s: # This should never occur, it might have already crashed msg += 'salt-master will crash pretty soon! ' level = logging.CRITICAL elif (accepted_count * 2) >= mof_s: # This is way too low, CRITICAL level = logging.CRITICAL elif (accepted_count * 3) >= mof_s: level = logging.WARNING # The accepted count is more than 3 time, WARN elif (accepted_count * 4) >= mof_s: level = logging.INFO if mof_c < mof_h: msg += ('According to the system\'s hard limit, there\'s still a ' 'margin of {0} to raise the salt\'s max_open_files ' 'setting. ').format(mof_h - mof_c) msg += 'Please consider raising this value.' log.log(level=level, msg=msg) def clean_path(root, path, subdir=False): ''' Accepts the root the path needs to be under and verifies that the path is under said root. Pass in subdir=True if the path can result in a subdirectory of the root instead of having to reside directly in the root ''' if not os.path.isabs(root): return '' if not os.path.isabs(path): path = os.path.join(root, path) path = os.path.normpath(path) if subdir: if path.startswith(root): return path else: if os.path.dirname(path) == os.path.normpath(root): return path return '' def valid_id(opts, id_): ''' Returns if the passed id is valid ''' try: return bool(clean_path(opts['pki_dir'], id_)) except (AttributeError, KeyError) as e: return False def safe_py_code(code): ''' Check a string to see if it has any potentially unsafe routines which could be executed via python, this routine is used to improve the safety of modules suct as virtualenv ''' bads = ( 'import', ';', 'subprocess', 'eval', 'open', 'file', 'exec', 'input') for bad in bads: if code.count(bad): return False return True
zmq_version
test_layer_norm_op.py
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys sys.path.append('../') from auto_scan_test import AutoScanTest, IgnoreReasons from program_config import TensorConfig, ProgramConfig, OpConfig, CxxConfig, TargetType, PrecisionType, DataLayoutType, Place import unittest import hypothesis from hypothesis import given, settings, seed, example, assume import hypothesis.strategies as st import argparse import numpy as np from functools import partial class TestLayerNormOp(AutoScanTest): def __init__(self, *args, **kwargs): AutoScanTest.__init__(self, *args, **kwargs) self.enable_testing_on_place( TargetType.X86, PrecisionType.FP32, DataLayoutType.NCHW, thread=[1, 2]) self.enable_testing_on_place( TargetType.ARM, PrecisionType.FP32, DataLayoutType.NCHW, thread=[1, 2, 4]) def is_program_valid(self, program_config: ProgramConfig, predictor_config: CxxConfig) -> bool: return True def sample_program_configs(self, draw): in_shape = draw( st.lists( st.integers( min_value=1, max_value=64), min_size=4, max_size=4)) epsilon = draw(st.floats(min_value=0.0001, max_value=0.0005)) begin_norm_axis = draw(st.sampled_from([1, 2])) def generate_input(*args, **kwargs): return np.random.random(in_shape).astype(np.float32) channel_dim = 1 for dim in range(begin_norm_axis, 4): channel_dim = channel_dim * in_shape[dim] def generate_scale(*args, **kwargs): return np.random.random([channel_dim]).astype(np.float32) def generate_bias(*args, **kwargs): return np.random.random([channel_dim]).astype(np.float32) run_op = OpConfig( type="layer_norm", inputs={ "X": ["input_data"], "Scale": ["scale_data"], "Bias": ["bias_data"] }, outputs={ "Y": ["output_data"], "Mean": ["mean_data"], "Variance": ["var_data"], }, attrs={"epsilon": epsilon, "begin_norm_axis": begin_norm_axis}) program_config = ProgramConfig( ops=[run_op], weights={}, inputs={ "input_data": TensorConfig(data_gen=partial(generate_input)), "scale_data": TensorConfig(data_gen=partial(generate_scale)), "bias_data": TensorConfig(data_gen=partial(generate_bias)), }, outputs=["output_data", "mean_data", "var_data"]) return program_config def sample_predictor_configs(self): return self.get_predictor_configs(), ["layer_norm"], (5e-5, 5e-5) def add_ignore_pass_case(self): pass def test(self, *args, **kwargs): self.run_and_statis(quant=False, max_examples=25) if __name__ == "__main__":
unittest.main(argv=[''])
simple_cipher.py
# Simple Cipher Text Generator # Rohan Roy - 2nd Nov 2013 import simplegui import random
message = "" # Helper Function def init(): letter_list = list(LETTER) random.shuffle(letter_list) for ch in LETTER: CIPHER[ch] = letter_list.pop() # Encoding Fuction def encode(): emsg = "" for ch in message: emsg += CIPHER[ch] print message , " encodes to ",emsg # Decoding Function def decode(): dmsg = "" for ch in message: for key,value in CIPHER.items(): if ch == value: dmsg += key print message , " decodes to ", dmsg # Input Message Function def newmsg(msg): global message message = msg label1=label2.set_text(msg) # Frames for the program frame = simplegui.create_frame("SimpleCipher",2,300,300) frame.add_input("Message:", newmsg,200) label1 = frame.add_label("Input Message:") label2 = frame.add_label("",200) frame.add_button("Encode",encode) frame.add_button("Decode",decode) # Initialization of the program init() # Starting of the frame frame.start()
# Global Variables CIPHER = {} LETTER = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMOPQRSTUVWXYZ1234567890!@#$%&" "'
etl.go
// Package etl transforms the score system. package etl import "strings" // Transform transforms the score system. func
(oldScore map[int][]string) map[string]int { newScore := make(map[string]int) for key, value := range oldScore { for index := 0; index < len(value); index++ { newScore[strings.ToLower(value[index])] = key } } return newScore }
Transform
credential.rs
extern crate libc; extern crate serde_json; use self::libc::c_char; use utils::cstring::CStringUtils; use utils::error; use utils::error::error_string; use connection; use credential; use std::thread; use std::ptr; use error::credential::CredentialError; use error::ToErrorCode; /// Retrieves Payment Info from a Credential /// /// #Params /// command_handle: command handle to map callback to user context. /// /// credential_handle: credential handle that was provided during creation. Used to identify credential object /// /// cb: Callback that provides Payment Info of a Credential /// /// #Returns /// Error code as a u32 #[no_mangle] #[allow(unused_variables, unused_mut)] pub extern fn vcx_credential_get_payment_info(command_handle: u32, credential_handle: u32, cb: Option<extern fn(xcommand_handle: u32, err: u32, *const c_char)>) -> u32 { check_useful_c_callback!(cb, error::INVALID_OPTION.code_num); thread::spawn(move|| { match credential::get_payment_information(credential_handle) { Ok(p) => { match p { Some(p) => { let info = p.to_string().unwrap_or("{}".to_string()); info!("vcx_credential_get_payment_info(command_handle: {}, rc: {}, msg: {})", command_handle, error::SUCCESS.code_num, info.clone()); let msg = CStringUtils::string_to_cstring(info); cb(command_handle, error::SUCCESS.code_num, msg.as_ptr()) }, None => { let msg = CStringUtils::string_to_cstring(format!("{{}}")); info!("vcx_credential_get_payment_info(command_handle: {}, rc: {}, msg: {})", command_handle, error::SUCCESS.code_num, "{}"); cb(command_handle, error::SUCCESS.code_num, msg.as_ptr()) } } }, Err(e) => { warn!("vcx_credential_get_payment_info(command_handle: {}, rc: {}, msg: {})", command_handle, e.to_error_code(), "{}".to_string()); cb(command_handle, e.to_error_code(), ptr::null_mut()) } } }); error::SUCCESS.code_num } /// Create a Credential object that requests and receives a credential for an institution /// /// #Params /// command_handle: command handle to map callback to user context. /// /// source_id: Institution's personal identification for the credential, should be unique. /// /// offer: credential offer received via "vcx_get_credential_offers" /// /// # Example offer -> "[{"msg_type": "CREDENTIAL_OFFER","version": "0.1","to_did": "...","from_did":"...","credential": {"account_num": ["...."],"name_on_account": ["Alice"]},"schema_seq_no": 48,"issuer_did": "...","credential_name": "Account Certificate","credential_id": "3675417066","msg_ref_id": "ymy5nth"}] /// /// cb: Callback that provides credential handle or error status /// /// #Returns /// Error code as a u32 #[no_mangle] #[allow(unused_variables, unused_mut)] pub extern fn vcx_credential_create_with_offer(command_handle: u32, source_id: *const c_char, offer: *const c_char, cb: Option<extern fn(xcommand_handle: u32, err: u32, credential_handle: u32)>) -> u32 { check_useful_c_callback!(cb, error::INVALID_OPTION.code_num); check_useful_c_str!(source_id, error::INVALID_OPTION.code_num); check_useful_c_str!(offer, error::INVALID_OPTION.code_num); info!("vcx_credential_create_with_offer(command_handle: {}, source_id: {}, offer: {})", command_handle, source_id, offer); thread::spawn(move|| { match credential::credential_create_with_offer(&source_id, &offer) { Ok(x) => { info!("vcx_credential_create_with_offer_cb(command_handle: {}, source_id: {}, rc: {}, handle: {})", command_handle, source_id, error_string(0), x); cb(command_handle, error::SUCCESS.code_num, x) }, Err(x) => { warn!("vcx_credential_create_with_offer_cb(command_handle: {}, source_id: {}, rc: {}, handle: {})", command_handle, source_id, x.to_error_code(), 0); cb(command_handle, x.to_error_code(), 0); }, }; }); error::SUCCESS.code_num } /// Retrieve information about a stored credential in user's wallet, including credential id and the credential itself. /// /// #Params /// command_handle: command handle to map callback to user context. /// /// credential_handle: credential handle that was provided during creation. Used to identify credential object /// /// cb: Callback that provides error status of api call, or returns the credential in json format of "{uuid:credential}". /// /// #Returns /// Error code as a u32 #[no_mangle] #[allow(unused_variables, unused_mut)] pub extern fn vcx_get_credential(command_handle: u32, credential_handle: u32, cb: Option<extern fn(xcommand_handle:u32, err: u32, credential: *const c_char)>) -> u32 { check_useful_c_callback!(cb, error::INVALID_OPTION.code_num); if !credential::is_valid_handle(credential_handle) { return CredentialError::InvalidHandle().to_error_code(); } thread::spawn(move|| { match credential::get_credential(credential_handle) { Ok(s) => { info!("vcx_get_credential_cb(commmand_handle: {}, rc: {}, msg: {})", command_handle, error::SUCCESS.code_num, s); let msg = CStringUtils::string_to_cstring(s); cb(command_handle, error::SUCCESS.code_num, msg.as_ptr()); }, Err(e) => { error!("vcx_get_credential_cb(commmand_handle: {}, rc: {}, msg: {})", command_handle, e.to_error_code(), "".to_string()); cb(command_handle, e.to_error_code(), ptr::null_mut()); }, }; }); error::SUCCESS.code_num } /// Create a Credential object that requests and receives a credential for an institution /// /// #Params /// command_handle: command handle to map callback to user context. /// /// source_id: Institution's personal identification for the credential, should be unique. /// /// connection_handle: connection to query for credential offer /// /// msg_id: msg_id that contains the credential offer /// /// cb: Callback that provides credential handle or error status /// /// #Returns /// Error code as a u32 #[no_mangle] #[allow(unused_variables, unused_mut)] pub extern fn vcx_credential_create_with_msgid(command_handle: u32, source_id: *const c_char, connection_handle: u32, msg_id: *const c_char, cb: Option<extern fn(xcommand_handle: u32, err: u32, credential_handle: u32, offer: *const c_char)>) -> u32 { check_useful_c_callback!(cb, error::INVALID_OPTION.code_num); check_useful_c_str!(source_id, error::INVALID_OPTION.code_num); check_useful_c_str!(msg_id, error::INVALID_OPTION.code_num); info!("vcx_credential_create_with_msgid(command_handle: {}, source_id: {}, connection_handle: {}, msg_id: {})", command_handle, source_id, connection_handle, msg_id); thread::spawn(move|| { match credential::get_credential_offer_msg(connection_handle, &msg_id) { Ok(offer) => { match credential::credential_create_with_offer(&source_id, &offer) { Ok(handle) => { let offer_string = match credential::get_credential_offer(handle) { Ok(x) => x, Err(_) => offer, }; let c_offer = CStringUtils::string_to_cstring(offer_string); info!("vcx_credential_create_with_offer_cb(command_handle: {}, source_id: {}, rc: {}, handle: {})", command_handle, source_id, error_string(0), handle); cb(command_handle, error::SUCCESS.code_num, handle, c_offer.as_ptr()) }, Err(e) => { warn!("vcx_credential_create_with_offer_cb(command_handle: {}, source_id: {}, rc: {}, handle: {})", command_handle, source_id, e.to_error_code(), 0); cb(command_handle, e.to_error_code(), 0, ptr::null_mut()); }, }; }, Err(e) => cb(command_handle, e.to_error_code(), 0, ptr::null_mut()), }; }); error::SUCCESS.code_num } /// Send a credential request to the connection, called after having received a credential offer /// /// #params /// command_handle: command handle to map callback to user context /// /// credential_handle: credential handle that was provided during creation. Used to identify credential object /// /// connection_handle: Connection handle that identifies pairwise connection /// /// cb: Callback that provides error status of credential request /// /// #Returns /// Error code as a u32 #[no_mangle] pub extern fn vcx_credential_send_request(command_handle: u32, credential_handle: u32, connection_handle: u32, payment_handle: u32, cb: Option<extern fn(xcommand_handle: u32, err: u32)>) -> u32 { check_useful_c_callback!(cb, error::INVALID_OPTION.code_num); if !credential::is_valid_handle(credential_handle) { return error::INVALID_CREDENTIAL_HANDLE.code_num; } if !connection::is_valid_handle(connection_handle) { return error::INVALID_CONNECTION_HANDLE.code_num; } let source_id = credential::get_source_id(credential_handle).unwrap_or_default(); info!("vcx_credential_send_request(command_handle: {}, credential_handle: {}, connection_handle: {}), source_id: {:?}", command_handle, credential_handle, connection_handle, source_id); thread::spawn(move|| { match credential::send_credential_request(credential_handle, connection_handle) { Ok(x) => { info!("vcx_credential_send_request_cb(command_handle: {}, rc: {}, source_id: {:?})", command_handle, x.to_string(), source_id); cb(command_handle,x); }, Err(e) => { warn!("vcx_credential_send_request_cb(command_handle: {}, rc: {}), source_id: {:?}", command_handle, e.to_string(), source_id); cb(command_handle,e.to_error_code()); }, }; }); error::SUCCESS.code_num } /// Queries agency for credential offers from the given connection. /// /// #Params /// command_handle: command handle to map callback to user context. /// /// connection_handle: Connection to query for credential offers. /// /// cb: Callback that provides any credential offers and error status of query /// /// #Returns /// Error code as a u32 #[no_mangle] pub extern fn vcx_credential_get_offers(command_handle: u32, connection_handle: u32, cb: Option<extern fn(xcommand_handle: u32, err: u32, credential_offers: *const c_char)>) -> u32 { check_useful_c_callback!(cb, error::INVALID_OPTION.code_num); if !connection::is_valid_handle(connection_handle) { return error::INVALID_CONNECTION_HANDLE.code_num; } info!("vcx_credential_get_offers(command_handle: {}, connection_handle: {})", command_handle, connection_handle); thread::spawn(move|| { match credential::get_credential_offer_messages(connection_handle, None) { Ok(x) => { info!("vcx_credential_get_offers_cb(command_handle: {}, rc: {}, msg: {})", command_handle, x.to_string(), x); let msg = CStringUtils::string_to_cstring(x); cb(command_handle, error::SUCCESS.code_num, msg.as_ptr()); }, Err(x) => { error!("vcx_credential_get_offers_cb(command_handle: {}, rc: {}, msg: null)", command_handle, x.to_string()); cb(command_handle, x.to_error_code(), ptr::null_mut()); }, }; }); error::SUCCESS.code_num } /// Checks for any state change in the credential and updates the the state attribute. If it detects a credential it /// will store the credential in the wallet and update the state. /// /// #Params /// command_handle: command handle to map callback to user context. /// /// credential_handle: Credential handle that was provided during creation. Used to identify credential object /// /// cb: Callback that provides most current state of the credential and error status of request /// /// #Returns /// Error code as a u32 #[no_mangle] pub extern fn vcx_credential_update_state(command_handle: u32, credential_handle: u32, cb: Option<extern fn(xcommand_handle: u32, err: u32, state: u32)>) -> u32 { check_useful_c_callback!(cb, error::INVALID_OPTION.code_num); if !credential::is_valid_handle(credential_handle) { return error::INVALID_CREDENTIAL_HANDLE.code_num; } let source_id = credential::get_source_id(credential_handle).unwrap_or_default(); info!("vcx_credential_update_state(command_handle: {}, credential_handle: {}), source_id: {:?}", command_handle, credential_handle, source_id); thread::spawn(move|| { match credential::update_state(credential_handle) { Ok(_) => (), Err(e) => { error!("vcx_credential_update_state_cb(command_handle: {}, rc: {}, state: {}), source_id: {:?}", command_handle, error_string(e), 0, source_id); cb(command_handle, e, 0) } } let state = match credential::get_state(credential_handle) { Ok(s) => { info!("vcx_credential_update_state_cb(command_handle: {}, rc: {}, state: {}), source_id: {:?}", command_handle, error_string(0), s, source_id); cb(command_handle, error::SUCCESS.code_num, s) }, Err(e) => { error!("vcx_credential_update_state_cb(command_handle: {}, rc: {}, state: {}), source_id: {:?}", command_handle, error_string(e.to_error_code()), 0, source_id); cb(command_handle, e.to_error_code(), 0) } }; }); error::SUCCESS.code_num } #[no_mangle] pub extern fn vcx_credential_get_state(command_handle: u32, handle: u32, cb: Option<extern fn(xcommand_handle: u32, err: u32, state: u32)>) -> u32 { check_useful_c_callback!(cb, error::INVALID_OPTION.code_num); if !credential::is_valid_handle(handle) { return error::INVALID_CREDENTIAL_HANDLE.code_num; } let source_id = credential::get_source_id(handle).unwrap_or_default(); info!("vcx_credential_get_state(command_handle: {}, credential_handle: {}), source_id: {:?}", command_handle, handle, source_id); thread::spawn(move|| { match credential::get_state(handle) { Ok(s) => { info!("vcx_credential_get_state_cb(command_handle: {}, rc: {}, state: {}), source_id: {:?}", command_handle, error_string(0), s, source_id); cb(command_handle, error::SUCCESS.code_num, s) }, Err(e) => { error!("vcx_credential_get_state_cb(command_handle: {}, rc: {}, state: {}), source_id: {:?}", command_handle, error_string(e.to_error_code()), 0, source_id); cb(command_handle, e.to_error_code(), 0) } }; }); error::SUCCESS.code_num } /// Takes the credential object and returns a json string of all its attributes /// /// #Params /// command_handle: command handle to map callback to user context. /// /// handle: Credential handle that was provided during creation. Used to identify credential object /// /// cb: Callback that provides json string of the credential's attributes and provides error status /// /// #Returns /// Error code as a u32 #[no_mangle] pub extern fn vcx_credential_serialize(command_handle: u32, handle: u32, cb: Option<extern fn(xcommand_handle: u32, err: u32, data: *const c_char)>) -> u32 { check_useful_c_callback!(cb, error::INVALID_OPTION.code_num); if !credential::is_valid_handle(handle) { return error::INVALID_CREDENTIAL_HANDLE.code_num; } let source_id = credential::get_source_id(handle).unwrap_or_default(); info!("vcx_credential_serialize(command_handle: {}, credential_handle: {}), source_id: {:?}", command_handle, handle, source_id); thread::spawn(move|| { match credential::to_string(handle) { Ok(x) => { info!("vcx_credential_serialize_cb(command_handle: {}, rc: {}, data: {}), source_id: {:?}", command_handle, error_string(0), x, source_id); let msg = CStringUtils::string_to_cstring(x); cb(command_handle, error::SUCCESS.code_num, msg.as_ptr()); }, Err(x) => { error!("vcx_credential_serialize_cb(command_handle: {}, rc: {}, data: {}), source_id: {:?}", command_handle, error_string(x), 0, source_id); cb(command_handle,x,ptr::null_mut()); }, }; }); error::SUCCESS.code_num } /// Takes a json string representing an credential object and recreates an object matching the json /// /// #Params /// command_handle: command handle to map callback to user context. /// /// credential_data: json string representing a credential object /// /// /// cb: Callback that provides credential handle and provides error status /// /// #Returns /// Error code as a u32 #[no_mangle] pub extern fn vcx_credential_deserialize(command_handle: u32, credential_data: *const c_char, cb: Option<extern fn(xcommand_handle: u32, err: u32, handle: u32)>) -> u32 { check_useful_c_callback!(cb, error::INVALID_OPTION.code_num); check_useful_c_str!(credential_data, error::INVALID_OPTION.code_num); info!("vcx_credential_deserialize(command_handle: {}, credential_data: {})", command_handle, credential_data); thread::spawn(move|| { match credential::from_string(&credential_data) { Ok(x) => { info!("vcx_credential_deserialize_cb(command_handle: {}, rc: {}, credential_handle: {}), source_id: {:?}", command_handle, error_string(0), x, credential::get_source_id(x).unwrap_or_default()); cb(command_handle, 0, x); }, Err(x) => { error!("vcx_credential_deserialize_cb(command_handle: {}, rc: {}, credential_handle: {}), source_id: {:?}", command_handle, error_string(x), 0, ""); cb(command_handle, x, 0); }, }; }); error::SUCCESS.code_num } /// Releases the credential object by de-allocating memory /// /// #Params /// handle: Credential handle that was provided during creation. Used to access credential object /// /// #Returns /// Success #[no_mangle] pub extern fn vcx_credential_release(handle: u32) -> u32 { let source_id = credential::get_source_id(handle).unwrap_or_default(); match credential::release(handle) { Ok(_) => info!("vcx_credential_release(handle: {}, rc: {}), source_id: {:?}", handle, error_string(0), source_id), Err(e) => error!("vcx_credential_release(handle: {}, rc: {}), source_id: {:?}", handle, error_string(e.to_error_code()), source_id), }; error::SUCCESS.code_num } /// Retrieve the txn associated with paying for the credential /// /// #param /// handle: credential handle that was provided during creation. Used to access credential object. /// /// #Callback returns /// PaymentTxn json /// example: { /// "amount":25, /// "inputs":[ /// "pay:null:1_3FvPC7dzFbQKzfG", /// "pay:null:1_lWVGKc07Pyc40m6" /// ], /// "outputs":[ /// {"paymentAddress":"pay:null:FrSVC3IrirScyRh","amount":5,"extra":null}, /// {"paymentAddress":"pov:null:OsdjtGKavZDBuG2xFw2QunVwwGs5IB3j","amount":25,"extra":null} /// ] /// } #[no_mangle] pub extern fn vcx_credential_get_payment_txn(command_handle: u32, handle: u32, cb: Option<extern fn(xcommand_handle: u32, err: u32, txn: *const c_char)>) -> u32 { check_useful_c_callback!(cb, error::INVALID_OPTION.code_num); info!("vcx_credential_get_payment_txn(command_handle: {})", command_handle); thread::spawn(move|| { match credential::get_payment_txn(handle) { Ok(x) => { match serde_json::to_string(&x) { Ok(x) => { info!("vcx_credential_get_payment_txn_cb(command_handle: {}, rc: {}, : {}), source_id: {:?}", command_handle, error_string(0), x, credential::get_source_id(handle).unwrap_or_default()); let msg = CStringUtils::string_to_cstring(x); cb(command_handle, 0, msg.as_ptr()); } Err(_) => { error!("vcx_credential_get_payment_txn_cb(command_handle: {}, rc: {}, txn: {}), source_id: {:?}", command_handle, error_string(error::INVALID_JSON.code_num), "null", credential::get_source_id(handle).unwrap_or_default()); cb(command_handle, error::INVALID_JSON.code_num, ptr::null_mut()); } } }, Err(x) => { error!("vcx_credential_get_payment_txn_cb(command_handle: {}, rc: {}, txn: {}), source_id: {:?}", command_handle, x.to_string(), "null", credential::get_source_id(handle).unwrap_or_default()); cb(command_handle, x.to_error_code(), ptr::null()); }, }; }); error::SUCCESS.code_num } #[cfg(test)] mod tests { extern crate serde_json; use super::*; use std::ffi::CString; use std::time::Duration; use settings; use connection; use api::VcxStateType; use utils::constants::{DEFAULT_SERIALIZED_CREDENTIAL}; pub const BAD_CREDENTIAL_OFFER: &str = r#"{"version": "0.1","to_did": "LtMgSjtFcyPwenK9SHCyb8","from_did": "LtMgSjtFcyPwenK9SHCyb8","credential": {"account_num": ["8BEaoLf8TBmK4BUyX8WWnA"],"name_on_account": ["Alice"]},"schema_seq_no": 48,"issuer_did": "Pd4fnFtRBcMKRVC2go5w3j","credential_name": "Account Certificate","credential_id": "3675417066","msg_ref_id": "ymy5nth"}"#; extern "C" fn create_cb(command_handle: u32, err: u32, credential_handle: u32) { assert_eq!(err, 0); assert!(credential_handle > 0); println!("successfully called create_cb") } extern "C" fn create_with_offer_cb(command_handle: u32, err: u32, credential_handle: u32, offer: *const c_char) { assert_eq!(err, 0); assert!(credential_handle > 0); check_useful_c_str!(offer,()); } extern "C" fn bad_create_cb(command_handle: u32, err: u32, credential_handle: u32) { assert!(err > 0); assert_eq!(credential_handle, 0); } extern "C" fn serialize_cb(handle: u32, err: u32, credential_string: *const c_char) { assert_eq!(err, 0); if credential_string.is_null() { panic!("credential_string is null"); } check_useful_c_str!(credential_string, ()); } extern "C" fn get_credential_cb(handle: u32, err: u32, credential_string: *const c_char) { assert_eq!(err, 0); if credential_string.is_null() { panic!("credential_string is null"); } check_useful_c_str!(credential_string, ()); assert!(credential_string.len() > 100); } extern "C" fn get_invalid_state_credential_cb(handle: u32, err: u32, credential_string: *const c_char) { assert_eq!(err, CredentialError::InvalidState().to_error_code()); } #[test] fn test_vcx_credential_create_with_offer_success() { settings::set_defaults(); settings::set_config_value(settings::CONFIG_ENABLE_TEST_MODE,"true"); assert_eq!(vcx_credential_create_with_offer(0, CString::new("test_create").unwrap().into_raw(), CString::new(::utils::constants::CREDENTIAL_OFFER_JSON).unwrap().into_raw(), Some(create_cb)), error::SUCCESS.code_num); thread::sleep(Duration::from_millis(200)); } #[test] fn test_vcx_credential_create_with_offer_fails() { settings::set_defaults(); settings::set_config_value(settings::CONFIG_ENABLE_TEST_MODE,"true"); assert_eq!(vcx_credential_create_with_offer( 0, CString::new("test_create").unwrap().into_raw(), CString::new(BAD_CREDENTIAL_OFFER).unwrap().into_raw(), Some(bad_create_cb)),error::SUCCESS.code_num); thread::sleep(Duration::from_millis(200)); } #[test] fn test_vcx_credential_serialize() { settings::set_defaults(); settings::set_config_value(settings::CONFIG_ENABLE_TEST_MODE,"true"); let handle = credential::credential_create_with_offer("test_vcx_credential_serialize",::utils::constants::CREDENTIAL_OFFER_JSON).unwrap(); assert_eq!(vcx_credential_serialize(0, handle, Some(serialize_cb)), error::SUCCESS.code_num); thread::sleep(Duration::from_millis(200)); } extern "C" fn send_offer_cb(command_handle: u32, err: u32) { if err != 0 {panic!("failed to send credential offer: {}",err)} } #[test] fn test_vcx_credential_send_request()
extern "C" fn init_cb(command_handle: u32, err: u32) { if err != 0 {panic!("create_cb failed: {}", err)} println!("successfully called init_cb") } extern "C" fn deserialize_cb(command_handle: u32, err: u32, credential_handle: u32) { fn formatter(original: &str) -> String { let original_json: serde_json::Value = serde_json::from_str(&original).unwrap(); serde_json::to_string(&original_json).unwrap() } assert_eq!(err, 0); assert!(credential_handle > 0); println!("successfully called deserialize_cb"); let original = formatter(DEFAULT_SERIALIZED_CREDENTIAL); let new = formatter(&credential::to_string(credential_handle).unwrap()); assert_eq!(original, new); } #[test] fn test_vcx_credential_deserialize_succeeds() { settings::set_defaults(); settings::set_config_value(settings::CONFIG_ENABLE_TEST_MODE,"true"); let string = DEFAULT_SERIALIZED_CREDENTIAL; vcx_credential_deserialize(0,CString::new(string).unwrap().into_raw(), Some(deserialize_cb)); thread::sleep(Duration::from_millis(200)); } extern "C" fn get_offers_cb(command_handle: u32, err:u32, offers: *const c_char) { assert_eq!(err,0); check_useful_c_str!(offers, ()); println!("successfully called get_offers_cb: {:?}", offers); } #[test] fn test_vcx_credential_get_new_offers(){ settings::set_defaults(); settings::set_config_value(settings::CONFIG_ENABLE_TEST_MODE,"true"); let cxn = ::connection::build_connection("test_get_new_offers").unwrap(); assert_eq!(error::SUCCESS.code_num as u32, vcx_credential_get_offers(0, cxn, Some(get_offers_cb))); thread::sleep(Duration::from_millis(300)); } #[test] fn test_vcx_credential_create() { settings::set_defaults(); settings::set_config_value(settings::CONFIG_ENABLE_TEST_MODE,"true"); let cxn = ::connection::build_connection("test_vcx_credential_create").unwrap(); assert_eq!(vcx_credential_create_with_msgid(0, CString::new("test_vcx_credential_create").unwrap().into_raw(), cxn, CString::new("123").unwrap().into_raw(), Some(create_with_offer_cb)), error::SUCCESS.code_num); thread::sleep(Duration::from_millis(200)); } extern "C" fn get_state_cb(command_handle: u32, err: u32, state: u32) { assert!(state > 0); println!("successfully called get_state_cb: {}", state); } #[test] fn test_vcx_credential_get_state() { settings::set_defaults(); settings::set_config_value(settings::CONFIG_ENABLE_TEST_MODE,"true"); let handle = credential::from_string(DEFAULT_SERIALIZED_CREDENTIAL).unwrap(); assert!(handle > 0); let rc = vcx_credential_get_state(0,handle,Some(get_state_cb)); assert_eq!(rc, error::SUCCESS.code_num); thread::sleep(Duration::from_millis(300)); } #[test] fn test_vcx_credential_update_state() { settings::set_defaults(); settings::set_config_value(settings::CONFIG_ENABLE_TEST_MODE,"true"); let cxn = ::connection::build_connection("test_credential_update_state").unwrap(); let handle = credential::from_string(DEFAULT_SERIALIZED_CREDENTIAL).unwrap(); ::utils::httpclient::set_next_u8_response(::utils::constants::NEW_CREDENTIAL_OFFER_RESPONSE.to_vec()); assert_eq!(vcx_credential_update_state(0, handle, Some(get_state_cb)), error::SUCCESS.code_num); thread::sleep(Duration::from_millis(300)); assert_eq!(vcx_credential_send_request(0, handle, cxn,0, Some(send_offer_cb)), error::SUCCESS.code_num); thread::sleep(Duration::from_millis(200)); } #[test] fn test_get_credential(){ use utils::constants::SERIALIZED_CREDENTIAL; settings::set_defaults(); let handle = credential::from_string(SERIALIZED_CREDENTIAL).unwrap(); let bad_handle = 1123; let command_handle = 0; assert_eq!(vcx_get_credential(command_handle, handle, Some(get_credential_cb)), error::SUCCESS.code_num); thread::sleep(Duration::from_millis(400)); assert_eq!(vcx_get_credential(command_handle, bad_handle, Some(get_credential_cb)), CredentialError::InvalidHandle().to_error_code()); let handle = credential::from_string(DEFAULT_SERIALIZED_CREDENTIAL).unwrap(); assert_eq!(vcx_get_credential(command_handle, handle, Some(get_invalid_state_credential_cb)), error::SUCCESS.code_num); } #[test] fn test_get_payment_txn() { settings::set_defaults(); settings::set_config_value(settings::CONFIG_ENABLE_TEST_MODE,"true"); let handle = credential::from_string(::utils::constants::FULL_CREDENTIAL_SERIALIZED).unwrap(); vcx_credential_get_payment_txn(0, handle, Some(get_offers_cb)); thread::sleep(Duration::from_millis(200)); } }
{ settings::set_defaults(); settings::set_config_value(settings::CONFIG_ENABLE_TEST_MODE,"true"); let handle = credential::credential_create_with_offer("test_send_request",::utils::constants::CREDENTIAL_OFFER_JSON).unwrap(); assert_eq!(credential::get_state(handle).unwrap(),VcxStateType::VcxStateRequestReceived as u32); let connection_handle = connection::build_connection("test_send_credential_offer").unwrap(); assert_eq!(vcx_credential_send_request(0,handle,connection_handle,0, Some(send_offer_cb)), error::SUCCESS.code_num); thread::sleep(Duration::from_millis(1000)); }
RIOP.ts
/* eslint-disable @typescript-eslint/no-explicit-any */ import * as rio from './RIO'; export type RIOP<R, I, O> = rio.RIO<R, I, Promise<O>>; export type InferOutput<T> = T extends RIOP<any, any, infer O> ? O : never; export type TypeFn<T> = T & { Env: rio.InferEnv<T>; Input: rio.InferInput<T>; Output: InferOutput<T>; }; export function
<R, I, O1, O2>(riop: RIOP<R, I, O1>, f: (result: O1) => O2): RIOP<R, I, O2> { return rio.map(riop, (result) => result.then(f)); }
map
flush.go
package simpledb import ( "fmt" "github.com/thomasjungblut/go-sstables/memstore" "github.com/thomasjungblut/go-sstables/sstables" "log" "os" "path/filepath" "sync/atomic" "time" ) func flushMemstoreContinuously(db *DB) { defer func() { db.doneFlushChannel <- true }() err := func(db *DB) error { for flushAction := range db.storeFlushChannel { err := executeFlush(db, flushAction) if err != nil { return err } } return nil }(db) if err != nil { log.Panicf("error while merging sstable at %s, error was %v", db.currentSSTablePath, err) } } func executeFlush(db *DB, flushAction memStoreFlushAction) error { walPath := flushAction.walPath memStoreToFlush := *flushAction.memStore numElements := uint64(memStoreToFlush.Size()) // we can skip if there is nothing to write, usually that indicates a proper "close" was done. if memStoreToFlush.Size() == 0 { log.Printf("no memstore flush necessary due to empty store, skipping\n") return nil } start := time.Now() gen := atomic.AddUint64(&db.currentGeneration, uint64(1)) writePath := filepath.Join(db.basePath, fmt.Sprintf(SSTablePattern, gen)) err := os.MkdirAll(writePath, 0700) if err != nil { return err } err = memStoreToFlush.FlushWithTombstones( sstables.WriteBasePath(writePath), sstables.WithKeyComparator(db.cmp), sstables.BloomExpectedNumberOfElements(numElements)) if err != nil { return err } if walPath != "" { err = os.Remove(walPath) if err != nil { return err }
reader, err := sstables.NewSSTableReader( sstables.ReadBasePath(writePath), sstables.ReadWithKeyComparator(db.cmp), ) if err != nil { return err } elapsedDuration := time.Since(start) totalBytes := reader.MetaData().TotalBytes throughput := float64(totalBytes) / 1024 / 1024 / elapsedDuration.Seconds() log.Printf("done flushing memstore to sstable of size %d bytes (%2.f mb/s) in %v. Path: [%s]\n", totalBytes, throughput, elapsedDuration, writePath) // add the newly created reader into the rotation // note that this CAN block here waiting on a current compaction to finish db.sstableManager.addReader(reader) return nil } func (db *DB) rotateWalAndFlushMemstore() error { walPath, err := db.wal.Rotate() if err != nil { return err } db.storeFlushChannel <- memStoreFlushAction{ memStore: swapMemstore(db), walPath: walPath, } return nil } func swapMemstore(db *DB) *memstore.MemStoreI { storeToFlush := db.memStore.writeStore db.memStore = &RWMemstore{ readStore: storeToFlush, writeStore: memstore.NewMemStore(), } return &storeToFlush }
}
forms.py
from django import forms from django.contrib.auth.forms import UserCreationForm from django.contrib.auth.models import User class RegisterForm(UserCreationForm):
email = forms.EmailField(max_length=200, help_text='Required') class Meta: model = User fields = ('username', 'email', 'password1', 'password2')
mock-code-editor-repository-file.service.ts
import { Injectable } from '@angular/core'; import { EMPTY, of } from 'rxjs'; import { ICodeEditorRepositoryFileService } from 'app/exercises/programming/shared/code-editor/service/code-editor-repository.service'; @Injectable({ providedIn: 'root' }) export class
implements ICodeEditorRepositoryFileService { getRepositoryContent = () => EMPTY; getFile = (fileName: string) => EMPTY; createFile = (fileName: string) => EMPTY; createFolder = (fileName: string) => EMPTY; updateFileContent = (fileName: string) => EMPTY; updateFiles = (fileUpdates: Array<{ fileName: string; fileContent: string }>) => of({ fileName: undefined }); renameFile = (fileName: string) => EMPTY; deleteFile = (fileName: string) => EMPTY; }
MockCodeEditorRepositoryFileService
channels.go
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. // See LICENSE.txt for license information. package app import ( "github.com/mattermost/mattermost-server/v6/services/imageproxy" ) // Channels contains all channels related state. type Channels struct { srv *Server imageProxy *imageproxy.ImageProxy } func init() { RegisterProduct("channels", func(s *Server) (Product, error) { return NewChannels(s) }) } func NewChannels(s *Server) (*Channels, error) { return &Channels{ srv: s, imageProxy: imageproxy.MakeImageProxy(s, s.httpService, s.Log), }, nil } func (c *Channels) Start() error {
} func (c *Channels) Stop() error { return nil }
return nil
EditorInfiniteGridHelper.ts
import { Mesh, Color, PlaneBufferGeometry, ShaderMaterial, DoubleSide, Plane, Vector3 } from 'three' import { addIsHelperFlag } from '@xrengine/engine/src/scene/functions/addIsHelperFlag' import { CommandManager } from '../managers/CommandManager' import EditorEvents from '../constants/EditorEvents' import { ObjectLayers } from '@xrengine/engine/src/scene/constants/ObjectLayers' import { setObjectLayers } from '@xrengine/engine/src/scene/functions/setObjectLayers' /** * Original Author: Fyrestar * https://discourse.threejs.org/t/three-infinitegridhelper-anti-aliased/8377 */ const vertexShader = ` varying vec3 worldPosition; uniform float uDistance; void main() { vec3 pos = position.xzy * uDistance; pos.xz += cameraPosition.xz; worldPosition = pos; gl_Position = projectionMatrix * modelViewMatrix * vec4(pos, 1.0); gl_Position.z -= 0.01; } ` const fragmentShader = ` varying vec3 worldPosition; uniform float uSize1; uniform float uSize2; uniform vec3 uColor; uniform float uDistance; float getGrid(float size) { vec2 r = worldPosition.xz / size; vec2 grid = abs(fract(r - 0.5) - 0.5) / fwidth(r); float line = min(grid.x, grid.y); return 1.0 - min(line, 1.0); } void main() { float d = 1.0 - min(distance(cameraPosition.xz, worldPosition.xz) / uDistance, 1.0); float g1 = getGrid(uSize1); float g2 = getGrid(uSize2); gl_FragColor = vec4(uColor.rgb, mix(g2, g1, g1) * pow(d, 3.0)); gl_FragColor.a = mix(0.5 * gl_FragColor.a, gl_FragColor.a, g2); if ( gl_FragColor.a <= 0.0 ) discard; } ` const GRID_INCREAMENT = 1.5 export default class
extends Mesh { plane: Plane intersectionPointWorld: Vector3 intersection: any constructor(size1?, size2?, color?, distance?) { color = color || new Color('white') size1 = size1 || 1 size2 = size2 || 10 distance = distance || 8000 const geometry = new PlaneBufferGeometry(2, 2, 1, 1) const material = new ShaderMaterial({ side: DoubleSide, uniforms: { uSize1: { value: size1 }, uSize2: { value: size2 }, uColor: { value: color }, uDistance: { value: distance } }, transparent: true, vertexShader, fragmentShader, extensions: { derivatives: true } }) super(geometry, material) this.visible = true this.name = 'EditorInfiniteGridHelper' setObjectLayers(this, ObjectLayers.Scene) addIsHelperFlag(this) this.frustumCulled = false this.plane = new Plane(this.up) this.intersectionPointWorld = new Vector3() this.intersection = { distance: 0, point: this.intersectionPointWorld, object: this } } setSize(size) { ;(this.material as any).uniforms.uSize1.value = size ;(this.material as any).uniforms.uSize2.value = size * 10 } raycast(raycaster, intersects) { const point = new Vector3() const intersection = raycaster.ray.intersectPlane(this.plane, point) if (intersection === null) return null this.intersectionPointWorld.copy(point) this.intersectionPointWorld.applyMatrix4(this.matrixWorld) const distance = raycaster.ray.origin.distanceTo(this.intersectionPointWorld) if (distance < raycaster.near || distance > raycaster.far) return null this.intersection.distance = distance intersects.push(this.intersection) } incrementGridHeight() { this.setGridHeight(this.position.y + GRID_INCREAMENT) } decrementGridHeight() { this.setGridHeight(this.position.y - GRID_INCREAMENT) } setGridHeight(value) { this.position.y = value CommandManager.instance.emitEvent(EditorEvents.GRID_HEIGHT_CHANGED, value) } toggleGridVisible() { this.visible = !this.visible CommandManager.instance.emitEvent(EditorEvents.GRID_VISIBILITY_CHANGED, this.visible) } }
EditorInfiniteGridHelper
10-secure-channel-via-streams-initiator.rs
use ockam::{route, stream::Stream, Context, Result, SecureChannel, TcpTransport, Vault, TCP}; #[ockam::node] async fn
(mut ctx: Context) -> Result<()> { let _tcp = TcpTransport::create(&ctx).await?; // Set the address of the Kafka node you created here. (e.g. "192.0.2.1:4000") let hub_node_tcp_address = "<Your node Address copied from hub.ockam.network>"; // Create a vault let vault = Vault::create(&ctx).await?; // Create a stream client let (sender, _receiver) = Stream::new(&ctx) .await? .stream_service("stream_kafka") .index_service("stream_kafka_index") .client_id("secure-channel-over-stream-over-cloud-node-initiator") .connect( route![(TCP, hub_node_tcp_address)], // route to hub "sc-initiator-to-responder", // outgoing stream "sc-responder-to-initiator", // incoming stream ) .await?; // Create a secure channel let secure_channel = SecureChannel::create( &ctx, route![ sender.clone(), // via the "sc-initiator-to-responder" stream "secure_channel_listener" // to the "secure_channel_listener" listener ], &vault, ) .await?; // Send a message ctx.send( route![ secure_channel.address(), // via the secure channel "echoer" // to the "echoer" worker ], "Hello World!".to_string(), ) .await?; // Receive a message from the "sc-responder-to-initiator" stream let reply = ctx.receive_block::<String>().await?; println!("Reply through secure channel via stream: {}", reply); ctx.stop().await }
main
usb.rs
// Copyright 2020 Shift Cryptosecurity AG // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use futures::channel::mpsc; use futures::channel::oneshot; use futures::lock::Mutex; use futures::prelude::*; use hidapi::HidApi; use hidapi_async::Device; use std::collections::{hash_map::Entry, HashMap}; use std::sync::Arc; use std::time::Duration; use std::time::SystemTime; use u2fframing::{U2FFraming, U2fHid, U2fWs}; struct DeviceEntry { acquired: DeviceAcquiredState, product: String, } enum DeviceAcquiredState { Available, Acquired(mpsc::Sender<oneshot::Sender<()>>), } impl std::fmt::Debug for DeviceEntry { fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { match &self.acquired { DeviceAcquiredState::Available => write!(f, "Avilable ({})", self.product)?, DeviceAcquiredState::Acquired { .. } => write!(f, "Acquired")?, } Ok(()) } } impl DeviceEntry { pub fn new(product: &str) -> Self { DeviceEntry { acquired: DeviceAcquiredState::Available, product: product.to_string(), } } pub fn product(&self) -> &str { &self.product } pub fn acquire(&mut self, tx: mpsc::Sender<oneshot::Sender<()>>) { self.acquired = DeviceAcquiredState::Acquired(tx); } pub async fn
(&mut self) { if let DeviceAcquiredState::Acquired(tx) = &mut self.acquired { // We use a oneshot channel to communicate that the device has been successfully // dropped. The "device_loop" task will first drop the device and then drop this // Sender. let (close_tx, close_rx) = oneshot::channel(); if let Err(_e) = tx.send(close_tx).await { error!("failed to send"); } let _ = close_rx.await; // Error here is expected } self.acquired = DeviceAcquiredState::Available; } } pub struct UsbDevices { devices: Arc<Mutex<HashMap<String, DeviceEntry>>>, hidapi: Arc<Mutex<HidApi>>, } impl Clone for UsbDevices { fn clone(&self) -> Self { UsbDevices { devices: Arc::clone(&self.devices), hidapi: Arc::clone(&self.hidapi), } } } impl UsbDevices { pub fn new() -> Result<Self, Box<dyn std::error::Error>> { Ok(UsbDevices { devices: Default::default(), hidapi: Arc::new(Mutex::new(HidApi::new()?)), }) } pub async fn devices(&self) -> Vec<HashMap<String, String>> { self.devices .lock() .await .iter() .map(|device| { let mut d = HashMap::new(); d.insert( "path".into(), percent_encoding::utf8_percent_encode( &device.0, percent_encoding::NON_ALPHANUMERIC, ) .to_string(), ); d.insert("product".into(), device.1.product().to_string()); d }) .collect() } pub async fn presence_detector( self, mut notify_rx: mpsc::Receiver<()>, ) -> Result<(), Box<dyn std::error::Error>> { loop { // Wait here until we are notified of new request let _ = notify_rx.next().await; info!("Notified!"); let mut last_seen = None; loop { self.refresh().await?; // Stop iterating in case wallets are plugged out and there haven't been any // communication in a while. if self.devices.lock().await.len() == 0 { match last_seen { None => last_seen = Some(SystemTime::now()), Some(last_seen) => { if last_seen.elapsed()? > Duration::from_secs(5) { break; } } } } else { last_seen = None; } tokio::time::delay_for(Duration::from_millis(200)).await; } } } pub async fn refresh(&self) -> Result<(), Box<dyn std::error::Error>> { self.hidapi.lock().await.refresh_devices()?; let mut seen = Vec::new(); let mut devices_guard = self.devices.lock().await; for device in self.hidapi.lock().await.devices() { // TODO(nc): On windows interface_number is -1. How to distinguish hww? if device.vendor_id == 0x03eb && device.product_id == 0x2403 && (device.interface_number == 0 || device.interface_number == -1) { let path = match device.path.as_ref().to_str() { Ok(path) => path, Err(e) => { warn!("ignored: {}", e); continue; } }; let product = match device.product_string.as_ref() { Some(product) => product, None => { warn!("ignored: no product"); continue; } }; seen.push(path.to_string()); match devices_guard.entry(path.to_string()) { Entry::Occupied(_) => (), Entry::Vacant(v) => { info!("Found BitBox02 at {}!", path); v.insert(DeviceEntry::new(&product)); } } } } // Remove all devices that wasn't seen devices_guard.retain(|k, _| seen.contains(&k)); Ok(()) } pub async fn acquire_device( &self, path: &str, ) -> Result<Option<(mpsc::Sender<Vec<u8>>, mpsc::Receiver<Vec<u8>>)>, Box<dyn std::error::Error>> { if let Some(device) = self.devices.lock().await.get_mut(path) { // Make sure device is released device.release().await; let (in_tx, in_rx) = mpsc::channel(128); let (out_tx, out_rx) = mpsc::channel(128); let path_cstr = std::ffi::CString::new(path)?; let hiddevice = self.hidapi.lock().await.open_path(&path_cstr)?; let hiddevice = Device::new(hiddevice)?; info!("Successfully acquired device: {}", path); let (on_close_tx, on_close_rx) = mpsc::channel(1); device.acquire(on_close_tx); tokio::spawn(device_loop(hiddevice, in_rx, out_tx, on_close_rx)); Ok(Some((in_tx, out_rx))) } else { Ok(None) } } } async fn handle_msg( device: &mut Device, msg: Vec<u8>, out_tx: &mut mpsc::Sender<Vec<u8>>, ) -> Result<(), Box<dyn std::error::Error>> { let (cid, cmd, _) = u2fframing::parse_header(&msg[..])?; let mut wscodec = U2fWs::with_cid(cid, cmd); let res = wscodec.decode(&msg[..])?.ok_or_else(|| { std::io::Error::new( std::io::ErrorKind::Other, "not enough data in websocket message", ) })?; let mut hidcodec = U2fHid::new(cmd); let mut buf = [0u8; 7 + 7609]; // Maximally supported size by u2f let len = hidcodec.encode(&res[..], &mut buf[..])?; device.write_all(&buf[..len]).await?; let mut len = 0; loop { let this_len = device.read(&mut buf[len..]).await?; len += this_len; let res = hidcodec.decode(&buf[..len])?; if let Some(res) = res { if let Ok(len) = wscodec.encode(&res[..], &mut buf[..]) { if let Err(e) = out_tx.send(buf[..len].to_vec()).await { error!("Failed to send internally: {}", e); } } break; } // Loop to read out more data from device } Ok(()) } async fn device_loop( mut device: Device, mut in_rx: mpsc::Receiver<Vec<u8>>, mut out_tx: mpsc::Sender<Vec<u8>>, mut on_close_rx: mpsc::Receiver<oneshot::Sender<()>>, ) { loop { tokio::select! { msg = in_rx.next() => { if let Some(msg) = msg { if let Err(e) = handle_msg(&mut device, msg, &mut out_tx).await { error!("message ignored: {}", e); } } else { error!("dev channel closed"); return; } }, close_tx = on_close_rx.next() => { if let Some(_close_tx) = close_tx { // We drop the device explitly so that it is dropped before the Sender we were sent drop(device); } else { // When the device is plugged out, the other end of the channel will be dropped and // then this future will resolve to None since the stream has ended. info!("Device was plugged out"); } return; } } } }
release
sound.rs
use amethyst_assets::AssetStorage; use amethyst_audio::{output::OutputWrapper, Source, SourceHandle}; use amethyst_core::{ ecs::*, shrev::{EventChannel, ReaderId}, }; #[cfg(feature = "profiler")] use thread_profiler::profile_scope; use crate::{ event::{UiEvent, UiEventType::*}, event_retrigger::{EventRetrigger, EventRetriggerSystem}, EventReceiver, }; /// Provides an `EventRetriggerSystem` that will handle incoming `UiEvent`s /// and trigger `UiPlaySoundAction`s for entities with attached /// `UiSoundRetrigger` components. pub fn ui_sound_event_retrigger_system( reader_id: ReaderId<UiEvent>, ) -> EventRetriggerSystem<UiSoundRetrigger> { EventRetriggerSystem::<UiSoundRetrigger>::new(reader_id) } /// Action that will trigger a sound to play in `UiSoundSystem`. #[derive(Debug, Clone)] pub struct UiPlaySoundAction(pub SourceHandle); /// Attach this to an entity to play the respective sound when a `UiEvent` /// targets the entity. #[derive(Debug, Clone)] pub struct UiSoundRetrigger { /// The sound that is played when the user begins a click on the entity pub on_click_start: Option<UiPlaySoundAction>, /// The sound that is played when the user ends a click on the entity pub on_click_stop: Option<UiPlaySoundAction>, /// The sound that is played when the user starts hovering over the entity pub on_hover_start: Option<UiPlaySoundAction>, /// The sound that is played when the user stops hovering over the entity pub on_hover_stop: Option<UiPlaySoundAction>, } impl EventRetrigger for UiSoundRetrigger { type In = UiEvent; type Out = UiPlaySoundAction; fn apply<R>(&self, event: &Self::In, out: &mut R) where R: EventReceiver<Self::Out>, { let event_to_trigger = match &event.event_type { ClickStart => &self.on_click_start, ClickStop => &self.on_click_stop, HoverStart => &self.on_hover_start, HoverStop => &self.on_hover_stop, _ => return, }; if let Some(ev) = event_to_trigger {
} } } /// Handles any dispatches `UiPlaySoundAction`s and plays the received /// sounds through the set `Output`. #[derive(Debug)] pub struct UiSoundSystem { event_reader: ReaderId<UiPlaySoundAction>, } impl UiSoundSystem { /// Constructs a default `UiSoundSystem`. Since the `event_reader` /// will automatically be fetched when the system is set up, this should /// always be used to construct the `UiSoundSystem`. pub fn new(event_reader: ReaderId<UiPlaySoundAction>) -> Self { Self { event_reader } } } impl System for UiSoundSystem { fn build(mut self) -> Box<dyn ParallelRunnable> { Box::new( SystemBuilder::new("UiSoundSystem") .write_resource::<EventChannel<UiPlaySoundAction>>() .read_resource::<AssetStorage<Source>>() .read_resource::<OutputWrapper>() .build( move |_commands, _world, (sound_events, audio_storage, audio_output_wrapper), _| { #[cfg(feature = "profiler")] profile_scope!("ui_sound_system"); let event_reader = &mut self.event_reader; for event in sound_events.read(event_reader) { if let Some(sound) = audio_storage.get(&event.0) { if let Some(output) = &audio_output_wrapper.output { log::trace!("Playing sound"); output.play_once(sound, 1.0); } } } }, ), ) } }
out.receive_one(ev);
parser.py
""" CPDParser parses the ConsensusPathDB_human_PPI data file and yields a generated dictionary of values. Source Project: biothings.interactions Author: Greg Taylor: [email protected] """ import hashlib import re from hub.dataload.BiointeractParser import BiointeractParser class CPDParser(BiointeractParser): # Static Constants EMPTY_FIELD = 'NA' SEPARATOR = ',' HUMAN = '_HUMAN' @staticmethod def parse_interaction_participants(entry): """ Parse all interaction participants given as string from the tsv file. The resulting participant identifier strings will be returned with a trailing '_HUMAN' removed at the end. :param entry: a string representing the list :return: list of strings """ vals = CPDParser.parse_list(entry, CPDParser.SEPARATOR) return list(map((lambda x: x.replace(CPDParser.HUMAN, '')), vals)) if vals else None @staticmethod def parse_interaction_publications(entry): """ Parse all interaction publications given as a string from the tsv file. The resulting publication identifier strings will be converted to a list of integers representing pubmed identifiers. :param entry: a string representing the list :return: list of integers """ vals = CPDParser.parse_list(entry, CPDParser.SEPARATOR) return list(map(CPDParser.safe_int, vals)) if vals else None @staticmethod def parse_source_databases(entry): """ Parse all source databases given as a string from the tsv file. :param entry: a string representing the list :return: list of strings """ return CPDParser.parse_list(entry, CPDParser.SEPARATOR) @staticmethod def parse_cpd_tsv_line(line_dict): """ Parse a dictionary representing a tsv line with a key, value pair for each column in the tsv file. :param line_dict: a tsv line dictionary :return: a dictionary representing a parsed biogrid record """ # Replace all empty fields with None r = {k: v if v != CPDParser.EMPTY_FIELD else None for k, v in line_dict.items()} r['interaction_confidence'] = CPDParser.safe_float(r['interaction_confidence']) r['interaction_participants'] = CPDParser.parse_interaction_participants(r['interaction_participants']) r['interaction_publications'] = CPDParser.parse_interaction_publications(r['interaction_publications']) r['source_databases'] = CPDParser.parse_source_databases(r['source_databases']) # Readjust for biothings.api record format new_record = dict() new_record['cpd'] = r new_record['_id'] = CPDParser.compute_id(r['interaction_participants']) # Sweep all empty values new_record = CPDParser.sweep_record(new_record) return new_record @staticmethod def
(f): """ Parse a tab-separated biogrid file opened in binary mode. :param f: file opened for reading in binary mode :return: yields a generator of parsed objects """ for (i, line) in enumerate(f): line = line.strip('\n') # The first commented line is the database description # The second commented line contains the column headers if i == 1: line = line.replace("# ", '') # Delete the comment prefix header_dict = dict(enumerate(line.split('\t'))) print(header_dict) # All subsequent lines contain row data elif i > 1: _r = {} for (pos, val) in enumerate(line.split('\t')): _r[header_dict[pos]] = val yield CPDParser.parse_cpd_tsv_line(_r) @staticmethod def compute_id(participate_lst): """ Calculate an id field given a list of participants (which are gene symbols). :param participate_lst: :return: """ symbols = '-'.join(participate_lst) hash_object = hashlib.md5(symbols.encode('utf-8')) symbol_hash = hash_object.hexdigest() return 'symbol:{}'.format(symbol_hash)
parse_cpd_tsv_file
browser_adapter.js
/** * @fileoverview added by tsickle * Generated from: packages/platform-browser/src/browser/browser_adapter.ts * @suppress {checkTypes,constantProperty,extraRequire,missingOverride,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc */ /** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import { ɵparseCookieValue as parseCookieValue, ɵsetRootDomAdapter as setRootDomAdapter } from '@angular/common'; import { ɵglobal as global } from '@angular/core'; import { GenericBrowserDomAdapter } from './generic_browser_adapter'; const ɵ0 = /** * @return {?} */ () => { if (global['Node']) { return global['Node'].prototype.contains || (/** * @this {?} * @param {?} node * @return {?} */ function (node) { return !!(this.compareDocumentPosition(node) & 16); }); } return (/** @type {?} */ (undefined)); }; /** @type {?} */ const nodeContains = ((ɵ0))(); /** * A `DomAdapter` powered by full browser DOM APIs. * * \@security Tread carefully! Interacting with the DOM directly is dangerous and * can introduce XSS risks. */ /* tslint:disable:requireParameterType no-console */ export class Brows
nds GenericBrowserDomAdapter { /** * @return {?} */ static makeCurrent() { setRootDomAdapter(new BrowserDomAdapter()); } /** * @param {?} el * @param {?} name * @return {?} */ getProperty(el, name) { return ((/** @type {?} */ (el)))[name]; } /** * @param {?} error * @return {?} */ log(error) { if (window.console) { window.console.log && window.console.log(error); } } /** * @param {?} error * @return {?} */ logGroup(error) { if (window.console) { window.console.group && window.console.group(error); } } /** * @return {?} */ logGroupEnd() { if (window.console) { window.console.groupEnd && window.console.groupEnd(); } } /** * @param {?} el * @param {?} evt * @param {?} listener * @return {?} */ onAndCancel(el, evt, listener) { el.addEventListener(evt, listener, false); // Needed to follow Dart's subscription semantic, until fix of // https://code.google.com/p/dart/issues/detail?id=17406 return (/** * @return {?} */ () => { el.removeEventListener(evt, listener, false); }); } /** * @param {?} el * @param {?} evt * @return {?} */ dispatchEvent(el, evt) { el.dispatchEvent(evt); } /** * @param {?} node * @return {?} */ remove(node) { if (node.parentNode) { node.parentNode.removeChild(node); } return node; } /** * @param {?} el * @return {?} */ getValue(el) { return el.value; } /** * @param {?} tagName * @param {?=} doc * @return {?} */ createElement(tagName, doc) { doc = doc || this.getDefaultDocument(); return doc.createElement(tagName); } /** * @return {?} */ createHtmlDocument() { return document.implementation.createHTMLDocument('fakeTitle'); } /** * @return {?} */ getDefaultDocument() { return document; } /** * @param {?} node * @return {?} */ isElementNode(node) { return node.nodeType === Node.ELEMENT_NODE; } /** * @param {?} node * @return {?} */ isShadowRoot(node) { return node instanceof DocumentFragment; } /** * @param {?} doc * @param {?} target * @return {?} */ getGlobalEventTarget(doc, target) { if (target === 'window') { return window; } if (target === 'document') { return doc; } if (target === 'body') { return doc.body; } return null; } /** * @return {?} */ getHistory() { return window.history; } /** * @return {?} */ getLocation() { return window.location; } /** * @param {?} doc * @return {?} */ getBaseHref(doc) { /** @type {?} */ const href = getBaseElementHref(); return href == null ? null : relativePath(href); } /** * @return {?} */ resetBaseElement() { baseElement = null; } /** * @return {?} */ getUserAgent() { return window.navigator.userAgent; } /** * @return {?} */ performanceNow() { // performance.now() is not available in all browsers, see // http://caniuse.com/#search=performance.now return window.performance && window.performance.now ? window.performance.now() : new Date().getTime(); } /** * @return {?} */ supportsCookies() { return true; } /** * @param {?} name * @return {?} */ getCookie(name) { return parseCookieValue(document.cookie, name); } } /** @type {?} */ let baseElement = null; /** * @return {?} */ function getBaseElementHref() { if (!baseElement) { baseElement = (/** @type {?} */ (document.querySelector('base'))); if (!baseElement) { return null; } } return baseElement.getAttribute('href'); } // based on urlUtils.js in AngularJS 1 /** @type {?} */ let urlParsingNode; /** * @param {?} url * @return {?} */ function relativePath(url) { if (!urlParsingNode) { urlParsingNode = document.createElement('a'); } urlParsingNode.setAttribute('href', url); return (urlParsingNode.pathname.charAt(0) === '/') ? urlParsingNode.pathname : '/' + urlParsingNode.pathname; } export { ɵ0 }; //# sourceMappingURL=data:application/json;base64,{"version":3,"file":"browser_adapter.js","sourceRoot":"","sources":["../../../../../../../packages/platform-browser/src/browser/browser_adapter.ts"],"names":[],"mappings":";;;;;;;;;;;;AAQA,OAAO,EAAC,iBAAiB,IAAI,gBAAgB,EAAE,kBAAkB,IAAI,iBAAiB,EAAC,MAAM,iBAAiB,CAAC;AAC/G,OAAO,EAAC,OAAO,IAAI,MAAM,EAAC,MAAM,eAAe,CAAC;AAEhD,OAAO,EAAC,wBAAwB,EAAC,MAAM,2BAA2B,CAAC;;;;AAEP,GAAG,EAAE;IAC/D,IAAI,MAAM,CAAC,MAAM,CAAC,EAAE;QAClB,OAAO,MAAM,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,QAAQ;;;;;QAAI,UAAqB,IAAS;YACxE,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,uBAAuB,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,CAAC;QACrD,CAAC,CAAA,CAAC;KACH;IAED,OAAO,mBAAA,SAAS,EAAO,CAAC;AAC1B,CAAC;;MARK,YAAY,GAAyC,MAQzD,EAAE;;;;;;;;AASJ,MAAM,OAAO,iBAAkB,SAAQ,wBAAwB;;;;IAC7D,MAAM,CAAC,WAAW;QAChB,iBAAiB,CAAC,IAAI,iBAAiB,EAAE,CAAC,CAAC;IAC7C,CAAC;;;;;;IACD,WAAW,CAAC,EAAQ,EAAE,IAAY;QAChC,OAAO,CAAC,mBAAK,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,CAAC;IACzB,CAAC;;;;;IAED,GAAG,CAAC,KAAa;QACf,IAAI,MAAM,CAAC,OAAO,EAAE;YAClB,MAAM,CAAC,OAAO,CAAC,GAAG,IAAI,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;SACjD;IACH,CAAC;;;;;IAED,QAAQ,CAAC,KAAa;QACpB,IAAI,MAAM,CAAC,OAAO,EAAE;YAClB,MAAM,CAAC,OAAO,CAAC,KAAK,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACrD;IACH,CAAC;;;;IAED,WAAW;QACT,IAAI,MAAM,CAAC,OAAO,EAAE;YAClB,MAAM,CAAC,OAAO,CAAC,QAAQ,IAAI,MAAM,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC;SACtD;IACH,CAAC;;;;;;;IAED,WAAW,CAAC,EAAQ,EAAE,GAAQ,EAAE,QAAa;QAC3C,EAAE,CAAC,gBAAgB,CAAC,GAAG,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC;QAC1C,8DAA8D;QAC9D,wDAAwD;QACxD;;;QAAO,GAAG,EAAE;YACV,EAAE,CAAC,mBAAmB,CAAC,GAAG,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC;QAC/C,CAAC,EAAC;IACJ,CAAC;;;;;;IACD,aAAa,CAAC,EAAQ,EAAE,GAAQ;QAC9B,EAAE,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC;IACxB,CAAC;;;;;IACD,MAAM,CAAC,IAAU;QACf,IAAI,IAAI,CAAC,UAAU,EAAE;YACnB,IAAI,CAAC,UAAU,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;SACnC;QACD,OAAO,IAAI,CAAC;IACd,CAAC;;;;;IACD,QAAQ,CAAC,EAAO;QACd,OAAO,EAAE,CAAC,KAAK,CAAC;IAClB,CAAC;;;;;;IACD,aAAa,CAAC,OAAe,EAAE,GAAc;QAC3C,GAAG,GAAG,GAAG,IAAI,IAAI,CAAC,kBAAkB,EAAE,CAAC;QACvC,OAAO,GAAG,CAAC,aAAa,CAAC,OAAO,CAAC,CAAC;IACpC,CAAC;;;;IACD,kBAAkB;QAChB,OAAO,QAAQ,CAAC,cAAc,CAAC,kBAAkB,CAAC,WAAW,CAAC,CAAC;IACjE,CAAC;;;;IACD,kBAAkB;QAChB,OAAO,QAAQ,CAAC;IAClB,CAAC;;;;;IAED,aAAa,CAAC,IAAU;QACtB,OAAO,IAAI,CAAC,QAAQ,KAAK,IAAI,CAAC,YAAY,CAAC;IAC7C,CAAC;;;;;IAED,YAAY,CAAC,IAAS;QACpB,OAAO,IAAI,YAAY,gBAAgB,CAAC;IAC1C,CAAC;;;;;;IAED,oBAAoB,CAAC,GAAa,EAAE,MAAc;QAChD,IAAI,MAAM,KAAK,QAAQ,EAAE;YACvB,OAAO,MAAM,CAAC;SACf;QACD,IAAI,MAAM,KAAK,UAAU,EAAE;YACzB,OAAO,GAAG,CAAC;SACZ;QACD,IAAI,MAAM,KAAK,MAAM,EAAE;YACrB,OAAO,GAAG,CAAC,IAAI,CAAC;SACjB;QACD,OAAO,IAAI,CAAC;IACd,CAAC;;;;IACD,UAAU;QACR,OAAO,MAAM,CAAC,OAAO,CAAC;IACxB,CAAC;;;;IACD,WAAW;QACT,OAAO,MAAM,CAAC,QAAQ,CAAC;IACzB,CAAC;;;;;IACD,WAAW,CAAC,GAAa;;cACjB,IAAI,GAAG,kBAAkB,EAAE;QACjC,OAAO,IAAI,IAAI,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC;IAClD,CAAC;;;;IACD,gBAAgB;QACd,WAAW,GAAG,IAAI,CAAC;IACrB,CAAC;;;;IACD,YAAY;QACV,OAAO,MAAM,CAAC,SAAS,CAAC,SAAS,CAAC;IACpC,CAAC;;;;IACD,cAAc;QACZ,0DAA0D;QAC1D,6CAA6C;QAC7C,OAAO,MAAM,CAAC,WAAW,IAAI,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,CAAC;YAC1B,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC;IAC7E,CAAC;;;;IAED,eAAe;QACb,OAAO,IAAI,CAAC;IACd,CAAC;;;;;IAED,SAAS,CAAC,IAAY;QACpB,OAAO,gBAAgB,CAAC,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;IACjD,CAAC;CACF;;IAEG,WAAW,GAAqB,IAAI;;;;AACxC,SAAS,kBAAkB;IACzB,IAAI,CAAC,WAAW,EAAE;QAChB,WAAW,GAAG,mBAAA,QAAQ,CAAC,aAAa,CAAC,MAAM,CAAC,EAAC,CAAC;QAC9C,IAAI,CAAC,WAAW,EAAE;YAChB,OAAO,IAAI,CAAC;SACb;KACF;IACD,OAAO,WAAW,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC;AAC1C,CAAC;;;IAGG,cAAmB;;;;;AACvB,SAAS,YAAY,CAAC,GAAQ;IAC5B,IAAI,CAAC,cAAc,EAAE;QACnB,cAAc,GAAG,QAAQ,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC;KAC9C;IACD,cAAc,CAAC,YAAY,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IACzC,OAAO,CAAC,cAAc,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;QACzB,GAAG,GAAG,cAAc,CAAC,QAAQ,CAAC;AACrF,CAAC","sourcesContent":["/**\n * @license\n * Copyright Google Inc. All Rights Reserved.\n *\n * Use of this source code is governed by an MIT-style license that can be\n * found in the LICENSE file at https://angular.io/license\n */\n\nimport {ɵparseCookieValue as parseCookieValue, ɵsetRootDomAdapter as setRootDomAdapter} from '@angular/common';\nimport {ɵglobal as global} from '@angular/core';\n\nimport {GenericBrowserDomAdapter} from './generic_browser_adapter';\n\nconst nodeContains: (this: Node, other: Node) => boolean = (() => {\n  if (global['Node']) {\n    return global['Node'].prototype.contains || function(this: Node, node: any) {\n      return !!(this.compareDocumentPosition(node) & 16);\n    };\n  }\n\n  return undefined as any;\n})();\n\n/**\n * A `DomAdapter` powered by full browser DOM APIs.\n *\n * @security Tread carefully! Interacting with the DOM directly is dangerous and\n * can introduce XSS risks.\n */\n/* tslint:disable:requireParameterType no-console */\nexport class BrowserDomAdapter extends GenericBrowserDomAdapter {\n  static makeCurrent() {\n    setRootDomAdapter(new BrowserDomAdapter());\n  }\n  getProperty(el: Node, name: string): any {\n    return (<any>el)[name];\n  }\n\n  log(error: string): void {\n    if (window.console) {\n      window.console.log && window.console.log(error);\n    }\n  }\n\n  logGroup(error: string): void {\n    if (window.console) {\n      window.console.group && window.console.group(error);\n    }\n  }\n\n  logGroupEnd(): void {\n    if (window.console) {\n      window.console.groupEnd && window.console.groupEnd();\n    }\n  }\n\n  onAndCancel(el: Node, evt: any, listener: any): Function {\n    el.addEventListener(evt, listener, false);\n    // Needed to follow Dart's subscription semantic, until fix of\n    // https://code.google.com/p/dart/issues/detail?id=17406\n    return () => {\n      el.removeEventListener(evt, listener, false);\n    };\n  }\n  dispatchEvent(el: Node, evt: any) {\n    el.dispatchEvent(evt);\n  }\n  remove(node: Node): Node {\n    if (node.parentNode) {\n      node.parentNode.removeChild(node);\n    }\n    return node;\n  }\n  getValue(el: any): string {\n    return el.value;\n  }\n  createElement(tagName: string, doc?: Document): HTMLElement {\n    doc = doc || this.getDefaultDocument();\n    return doc.createElement(tagName);\n  }\n  createHtmlDocument(): HTMLDocument {\n    return document.implementation.createHTMLDocument('fakeTitle');\n  }\n  getDefaultDocument(): Document {\n    return document;\n  }\n\n  isElementNode(node: Node): boolean {\n    return node.nodeType === Node.ELEMENT_NODE;\n  }\n\n  isShadowRoot(node: any): boolean {\n    return node instanceof DocumentFragment;\n  }\n\n  getGlobalEventTarget(doc: Document, target: string): EventTarget|null {\n    if (target === 'window') {\n      return window;\n    }\n    if (target === 'document') {\n      return doc;\n    }\n    if (target === 'body') {\n      return doc.body;\n    }\n    return null;\n  }\n  getHistory(): History {\n    return window.history;\n  }\n  getLocation(): Location {\n    return window.location;\n  }\n  getBaseHref(doc: Document): string|null {\n    const href = getBaseElementHref();\n    return href == null ? null : relativePath(href);\n  }\n  resetBaseElement(): void {\n    baseElement = null;\n  }\n  getUserAgent(): string {\n    return window.navigator.userAgent;\n  }\n  performanceNow(): number {\n    // performance.now() is not available in all browsers, see\n    // http://caniuse.com/#search=performance.now\n    return window.performance && window.performance.now ? window.performance.now() :\n                                                          new Date().getTime();\n  }\n\n  supportsCookies(): boolean {\n    return true;\n  }\n\n  getCookie(name: string): string|null {\n    return parseCookieValue(document.cookie, name);\n  }\n}\n\nlet baseElement: HTMLElement|null = null;\nfunction getBaseElementHref(): string|null {\n  if (!baseElement) {\n    baseElement = document.querySelector('base')!;\n    if (!baseElement) {\n      return null;\n    }\n  }\n  return baseElement.getAttribute('href');\n}\n\n// based on urlUtils.js in AngularJS 1\nlet urlParsingNode: any;\nfunction relativePath(url: any): string {\n  if (!urlParsingNode) {\n    urlParsingNode = document.createElement('a');\n  }\n  urlParsingNode.setAttribute('href', url);\n  return (urlParsingNode.pathname.charAt(0) === '/') ? urlParsingNode.pathname :\n                                                       '/' + urlParsingNode.pathname;\n}\n"]}
erDomAdapter exte
exr2png.py
import os import sys import pyexr import numpy as np from PIL import Image import re def exec(): filepaths = [] savepaths = [] images = [] maxvalues = [] # Prep variable filelist = os.listdir("output") for file in filelist: if file.endswith(".exr"):
savepath = sys.argv[0][:-len("exr2png.py")] + "../../plots/renders/" image = pyexr.open(filepath).get() images.append(image) maxvalues.append(np.max(image)) filepaths.append(filepath) scenename = re.match(r".*(crown|measure-one|villa|killeroo|hair|ecosys|landscape).*", file)[1] savepaths.append(savepath + scenename + ".png") for i in range(len(images)): #images[i] *= 16 / maxvalues[i] images[i] = np.where(images[i]<=0.0031308,12.92 * images[i], 1.055*(images[i]**(1/2.4)) - 0.055) images[i] = np.clip(images[i], 0, 1) images[i] = (images[i] * 255).astype(np.uint8) Image.fromarray(images[i]).save(savepaths[i]) exec()
filepath = os.path.join("output", file)
ovs.go
/*** Copyright 2014 Cisco Systems Inc. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package utils import ( "log" )
cmdStr := "sudo ovs-vsctl show" output, _ := node.RunCommandWithOutput(cmdStr) log.Printf("ovs-vsctl on node %s: \n%s\n", node.Name, output) }
func OvsDumpInfo(node VagrantNode) {
othermsg.go
package cmd import ( "fmt" ) func
(msg, loghost string) error { fmt.Printf("My message is '%s' and I'm logging it to '%s'\n", msg, loghost) return nil }
sendOtherMsg
list.py
# create list a = ['a', 'b', 10, 1000] # Sublists, Indexes of List # Slice range notation generates shallow copy of list simple_list = ['first', 'second', "third"] print 'simple list: ' + str(simple_list) # list index print 'simple index: ' + simple_list[0] # negative index print 'negative index: ' + simple_list[-1] # sublist print 'range 0:1 : ' + str(simple_list[0:1]) # sublist with negative print 'range -2:-1 : ' + str(simple_list[-2: -1]) # default zero as first argument print 'range :1 : ' + str(simple_list[:1]) # default max index value as a second argument print 'range 1: : ' + str(simple_list[1:]) # default min and max: print 'range : : ' + str(simple_list[:]) # list length print 'length of the list: ' + str(simple_list) + ' is: ' + str(len(simple_list)) # substitution of elements in list simple_list[0] = 'substitute' print 'list after substitution: ' + str(simple_list) # remove from list simple_list[1:2] = [] print 'list with removed second element ' + str(simple_list) # nested lists # create nested list nested_list = ['first', ['first_nested', 'second nested'], 'third'] print nested_list # list as a stack (lifo) stack = [] stack.append(1) stack.append(2) print stack.pop() print stack.pop() # filter function example def even_num(x): return x % 2 print filter(even_num, range(1,10)) print filter(lambda x: x%2, range(1,10)) # range function example def
(x): return x*x*x print map(cube, range(1,10)) print map(lambda x:x*x*x, range(1,10)) def sum_numbers(a,b): return a+b # reduce function example print reduce(sum_numbers, range(1,10)) print reduce(lambda a,b: a+b, range(1,10)) # remove item from list list_with_item_to_remove = [1,2,3,4] print str(list_with_item_to_remove) del list_with_item_to_remove[0] print str(list_with_item_to_remove)
cube
11_Template_Matching.py
import cv2 import numpy as np from matplotlib import pyplot as plt img = cv2.imread('a.jpg',0) img2 = img.copy() template = cv2.imread('b.jpg',0) w, h = template.shape[::-1] # All the 6 methods for comparison in a list methods = ['cv2.TM_CCOEFF', 'cv2.TM_CCOEFF_NORMED', 'cv2.TM_CCORR', 'cv2.TM_CCORR_NORMED', 'cv2.TM_SQDIFF', 'cv2.TM_SQDIFF_NORMED'] for meth in methods: img = img2.copy() method = eval(meth) # Apply template Matching res = cv2.matchTemplate(img,template,method) min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res) # If the method is TM_SQDIFF or TM_SQDIFF_NORMED, take minimum if method in [cv2.TM_SQDIFF, cv2.TM_SQDIFF_NORMED]: top_left = min_loc else: top_left = max_loc bottom_right = (top_left[0] + w, top_left[1] + h) cv2.rectangle(img,top_left, bottom_right, 255, 2) plt.subplot(121),plt.imshow(res,cmap = 'gray')
plt.subplot(122),plt.imshow(img,cmap = 'gray') plt.title('Detected Point'), plt.xticks([]), plt.yticks([]) plt.suptitle(meth) plt.show()
plt.title('Matching Result'), plt.xticks([]), plt.yticks([])
SettingsActions.ts
import { types } from '../reducers/SettingsReducer' import { ISettingsState, IAction, SettingsActions, ISettingsObject } from '../interfaces/Settings.interface' export const useActions = ( state: ISettingsState, dispatch: (value: IAction) => void ): SettingsActions => { function
(initObject: ISettingsObject) { dispatch({ type: types.INIT, payload: initObject }) } return { init } }
init
_readimpute.py
import numpy as np import anndata as ad import pandas as pd def load_met_noimput(matrix_file, path='', save=False): """ read the raw count matrix and convert it into an AnnData object. write down the matrix as .h5ad (AnnData object) if save = True. Return AnnData object """ matrix = [] cell_names = [] feature_names = [] with open(path+matrix_file) as f: line = f.readline()[:-2].split('\t') if line[0] == 'sample_name': feature_names = line[1:] else: matrix.append(line[1:]) cell_names.append(line[0]) if matrix == []: line = f.readline()[:-2].split('\t') matrix.append(line[1:]) cell_names.append(line[0]) for line in f: line = line[:-2].split('\t') matrix.append(line[1:]) cell_names.append(line[0]) matrix = np.array(matrix) if feature_names != []: adata = ad.AnnData(matrix, obs=pd.DataFrame(index=cell_names), var=pd.DataFrame(index=feature_names)) else: adata = ad.AnnData(matrix, obs=pd.DataFrame(index=cell_names)) adata.uns['omic'] = 'methylation' adata.uns['imputation'] = 'no_imputation' if save: adata.write("".join([".".split(matrix_file)[0],'.h5ad'])) return(adata) def imputation_met(adata, number_cell_covered=10, imputation_value='mean', save=None, copy=False): """ Impute missing values in methyaltion level matrices. The imputsation is based on the average methylation value of the given variable. It also filter out variables that are covered in an unsufficient number of cells in order to reduce the feature space to meaningful variables and discard potential coverage biases. Parameters ---------- adata: AnnData object containing 'nan' number_cell_covered: minimum number of cells to be covered in order to retain a variable imputation_value: imputation of the missing value can be made either on the mean or the median Return ------ Return a new AnnData object """ # This step need to be sped up and could be multithread. # Only the mean available for now. And only the minimum number of cells covered and not the variety of the # methylation levels # also, it odes not return the variable annoations and force to add 2 values old_features = adata.var_names.tolist() new_matrix = [] new_features_name = [] means = [] medians = [] feat_nb = 0 length1 = len(adata.X[0,:]) length2 = len(adata.X[:,0]) adata.obs['coverage_cells'] = [length1 - np.isnan(line).sum() for line in adata.X] adata.obs['mean_cell_methylation'] = [np.nansum(line)/length1 for line in adata.X] adata.var['coverage_feature'] = [length2 - np.isnan(line).sum() for line in adata.X.T] adata.var['mean_feature_methylation'] = [np.nansum(line)/length2 for line in adata.X.T] adata2 = adata[:, adata.var['coverage_feature']>=number_cell_covered].copy() for index in range(len(adata2.var_names.tolist())): adata2.X[:,index] = np.nan_to_num(adata2.X[:,index], nan=adata2.var['mean_feature_methylation'][index]) if save!= None: adata2.write(save.rstrip('.h5ad')+'.h5ad') if copy==False: adata = adata2.copy() else: return(adata2) def readandimputematrix(file_name, min_coverage=1):
Parameters ---------- file_name : file name to read and load min_coverage : minimum number of cells covered for which we keep and impute a variable Returns ------- adata : :class:`~anndata.AnnData` Annotated data matrix. """ with open(file_name) as f: file = f.readlines() # separate annotation from data head_var = file[0] head_var = head_var.split('\t') # Then, extract the sample names sample_names = [] data_raw = [] for l in file[1:]: l = l.split('\t') sample_names.append(l[0]) data_raw.append(l[1:]) # clear memory of useless variables del file ########################################## # now, removing empty columns empties = [] partial = [] full = [] for index in range(1, len(data_raw[0])): column = [element[index] for element in data_raw] if len(list(set(column))) == 1: empties.append(index) elif len(list(set(column))) <= min_coverage: partial.append(index) else: full.append(index) ########################################## intermed_matrix = [] name_windows_covered = [] # let's remove the compltetly uninformative columns for index in range(1, len(head_var[1:])): if index in full: intermed_matrix.append([element[index] for element in data_raw]) name_windows_covered.append(head_var[index]) ######################################## # imputing values. imputed_matrix = [] for row in intermed_matrix: imputed_row = [] if "nan" in row: mean = np.mean([float(e) for e in row if e != "nan"]) for element in row: if element == "nan": imputed_row.append(str(mean)) else: imputed_row.append(element) imputed_matrix.append(imputed_row) else: imputed_matrix.append(row) imputed_matrix = np.matrix(imputed_matrix).transpose() return(ad.AnnData(imputed_matrix, obs=pd.DataFrame(index=sample_names), var=pd.DataFrame(index=name_windows_covered))) #return(imputed_matrix, sample_names, name_windows_covered)
""" Temporary function to load and impute methyaltion count matrix into an AnnData object
change_instance_compartment_request_response.go
// Copyright (c) 2016, 2018, 2021, Oracle and/or its affiliates. All rights reserved. // This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. // Code generated. DO NOT EDIT. package core import ( "github.com/oracle/oci-go-sdk/v53/common" "net/http" ) // ChangeInstanceCompartmentRequest wrapper for the ChangeInstanceCompartment operation // // See also // // Click https://docs.cloud.oracle.com/en-us/iaas/tools/go-sdk-examples/latest/core/ChangeInstanceCompartment.go.html to see an example of how to use ChangeInstanceCompartmentRequest. type ChangeInstanceCompartmentRequest struct { // The OCID (https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the instance. InstanceId *string `mandatory:"true" contributesTo:"path" name:"instanceId"` // Request to change the compartment of a given instance. ChangeInstanceCompartmentDetails `contributesTo:"body"` // For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` // parameter to the value of the etag from a previous GET or POST response for that resource. The resource // will be updated or deleted only if the etag you provide matches the resource's current etag value. IfMatch *string `mandatory:"false" contributesTo:"header" name:"if-match"` // Unique identifier for the request. // If you need to contact Oracle about a particular request, please provide the request ID. OpcRequestId *string `mandatory:"false" contributesTo:"header" name:"opc-request-id"` // A token that uniquely identifies a request so it can be retried in case of a timeout or // server error without risk of executing that same action again. Retry tokens expire after 24 // hours, but can be invalidated before then due to conflicting operations (for example, if a resource // has been deleted and purged from the system, then a retry of the original creation request // may be rejected). OpcRetryToken *string `mandatory:"false" contributesTo:"header" name:"opc-retry-token"` // Metadata about the request. This information will not be transmitted to the service, but // represents information that the SDK will consume to drive retry behavior. RequestMetadata common.RequestMetadata } func (request ChangeInstanceCompartmentRequest) String() string { return common.PointerString(request) } // HTTPRequest implements the OCIRequest interface func (request ChangeInstanceCompartmentRequest) HTTPRequest(method, path string, binaryRequestBody *common.OCIReadSeekCloser, extraHeaders map[string]string) (http.Request, error) { return common.MakeDefaultHTTPRequestWithTaggedStructAndExtraHeaders(method, path, request, extraHeaders) } // BinaryRequestBody implements the OCIRequest interface func (request ChangeInstanceCompartmentRequest) BinaryRequestBody() (*common.OCIReadSeekCloser, bool) { return nil, false } // RetryPolicy implements the OCIRetryableRequest interface. This retrieves the specified retry policy. func (request ChangeInstanceCompartmentRequest) RetryPolicy() *common.RetryPolicy { return request.RequestMetadata.RetryPolicy } // ChangeInstanceCompartmentResponse wrapper for the ChangeInstanceCompartment operation type ChangeInstanceCompartmentResponse struct { // The underlying http response RawResponse *http.Response // For optimistic concurrency control. See `if-match`. Etag *string `presentIn:"header" name:"etag"` // Unique Oracle-assigned identifier for the request. If you need to contact // Oracle about a particular request, please provide the request ID. OpcRequestId *string `presentIn:"header" name:"opc-request-id"` // The OCID (https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the work request. Use GetWorkRequest (https://docs.cloud.oracle.com/api/#/en/workrequests/20160918/WorkRequest/GetWorkRequest) // with this ID to track the status of the request.
return common.PointerString(response) } // HTTPResponse implements the OCIResponse interface func (response ChangeInstanceCompartmentResponse) HTTPResponse() *http.Response { return response.RawResponse }
OpcWorkRequestId *string `presentIn:"header" name:"opc-work-request-id"` } func (response ChangeInstanceCompartmentResponse) String() string {
download.py
import os import tarfile import email import re import nltk import urlextract import numpy as np import scipy.io as sio from sklearn.base import BaseEstimator, TransformerMixin from nltk.stem import PorterStemmer from html import unescape from email import parser from email.policy import default from six.moves import urllib from collections import Counter DOWNLOAD_ROOT = "http://spamassassin.apache.org/old/publiccorpus/" HAM_URL = DOWNLOAD_ROOT + "20030228_easy_ham.tar.bz2" SPAM_URL = DOWNLOAD_ROOT + "20030228_spam.tar.bz2" SPAM_PATH = os.path.join("datasets", "spam") def fetch_spam_data(spam_url=SPAM_URL, spam_path=SPAM_PATH): if not os.path.isdir(spam_path): os.makedirs(spam_path) for filename, url in (("ham.tar.bz2", HAM_URL), ("spam.tar.bz2", SPAM_URL)): path = os.path.join(spam_path, filename) if not os.path.isfile(path): urllib.request.urlretrieve(url, path) tar_bz2_file = tarfile.open(path) tar_bz2_file.extractall(path=SPAM_PATH) tar_bz2_file.close() # fetch_spam_data() HAM_DIR = os.path.join(SPAM_PATH, "easy_ham") SPAM_DIR = os.path.join(SPAM_PATH, "spam") ham_filenames = [name for name in sorted(os.listdir(HAM_DIR)) if len(name) > 20] spam_filenames = [name for name in sorted(os.listdir(SPAM_DIR)) if len(name) > 20] print(len(ham_filenames)) print(len(spam_filenames)) def load_email(is_spam, filename, spam_path=SPAM_PATH):
ham_emails = [load_email(is_spam=False, filename=name) for name in ham_filenames] spam_emails = [load_email(is_spam=True, filename=name) for name in spam_filenames] print(ham_emails[4].get_content().strip()) print(spam_emails[5].get_content().strip()) def get_email_structure(email): if isinstance(email, str): return email payload = email.get_payload() if isinstance(payload, list): return "multipart({})".format(", ".join([ get_email_structure(sub_email) for sub_email in payload ])) else: return email.get_content_type() def structures_counter(emails): structures = Counter() for email in emails: structure = get_email_structure(email) structures[structure] += 1 return structures print(structures_counter(ham_emails).most_common()) print('\n') print(structures_counter(spam_emails).most_common()) for header, value in spam_emails[0].items(): print(header, ":", value) def html_to_plain_text(html): text = re.sub('<head.*?>.*?</head>', '', html, flags=re.M | re.S | re.I) text = re.sub('<a\s.*?>', ' httpaddr ', text, flags=re.M | re.S | re.I) text = re.sub('<.*?>', '', text, flags=re.M | re.S) text = re.sub(r'(\s*\n)+', '\n', text, flags=re.M | re.S) return unescape(text) html_spam_emails = [email for email in spam_emails if get_email_structure(email) == "text/html"] sample_html_spam = html_spam_emails[7] print("\nSpam email html sample:\n") print(sample_html_spam.get_content().strip()[:1000], "...") print("\nEmail content: \n") print(html_to_plain_text(sample_html_spam.get_content())[:1000], "...") def email_to_text(email): html = None for part in email.walk(): ctype = part.get_content_type() if not ctype in ("text/plain", "text/html"): continue try: content = part.get_content() except: # in case of encoding issues content = str(part.get_payload()) if ctype == "text/plain": return content else: html = content if html: return html_to_plain_text(html) print(email_to_text(sample_html_spam)[:100], "...") try: stemmer = nltk.PorterStemmer() for word in ("Computations", "Computation", "Computing", "Computed", "Compute", "Compulsive"): print(word, "=>", stemmer.stem(word)) except ImportError: print("Error: stemming requires the NLTK module.") stemmer = None try: url_extractor = urlextract.URLExtract() print(url_extractor.find_urls("Will it detect github.com and https://youtu.be/7Pq-S557XQU?t=3m32s")) except ImportError: print("Error: replacing URLs requires the urlextract module.") url_extractor = None class EmailToWordCounterTransformer(BaseEstimator, TransformerMixin): def __init__(self, strip_headers=True, lower_case=True, remove_punctuation=True, replace_urls=True, replace_numbers=True, stemming=True): self.strip_headers = strip_headers self.lower_case = lower_case self.remove_punctuation = remove_punctuation self.replace_urls = replace_urls self.replace_numbers = replace_numbers self.stemming = stemming def fit(self, X, y=None): return self def transform(self, X, y=None): X_transformed = [] for email in X: text = email_to_text(email) or "" if self.lower_case: text = text.lower() text = re.sub("[$]+", " dollar ", text) text = re.sub("[^\s]+@[^\s]+", " emailaddr ", text) if self.replace_urls and url_extractor is not None: urls = list(set(url_extractor.find_urls(text))) urls.sort(key=lambda url: len(url), reverse=True) for url in urls: text = text.replace(url, " httpaddr ") if self.replace_numbers: text = re.sub(r'\d+(?:\.\d*(?:[eE]\d+))?', 'NUMBER', text) if self.remove_punctuation: text = re.sub(r'\W+', ' ', text, flags=re.M) special_chars = [ "<", "[", "^", ">", "+", "?", "!", "'", ".", ",", ":", "*", "%", "#", "_", "=" ] for char in special_chars: text = text.replace(str(char), "") word_counts = Counter(text.split()) if self.stemming and stemmer is not None: stemmed_word_counts = Counter() for word, count in word_counts.items(): stemmed_word = stemmer.stem(word) stemmed_word_counts[stemmed_word] += count word_counts = stemmed_word_counts X_transformed.append(word_counts) return np.array(X_transformed) vocab = EmailToWordCounterTransformer().fit_transform(spam_emails) vocab = sum(vocab, Counter()) list = vocab.most_common(1904) vocab = [] for (k, v) in list: vocab.append(k) vocab = sorted(vocab) # SAVE DICTIONARY i = 0 with open('../data/vocab2.txt', 'w') as f: for item in vocab: try: f.write("%s\t%s\n" % (i, item)) i += 1 except: print('error') samples = len(ham_filenames) + len(spam_filenames) vocabList = open('../data/vocab2.txt', "r").read() vocabList = vocabList.split("\n") vocabList_d = {} for ea in vocabList: if ea: [value, key] = ea.split("\t") vocabList_d[key] = value print(vocabList_d) print(email_to_text(spam_emails[0])) def process_email(email_contents): """ Preprocesses the body of an email and returns a list of indices of the words contained in the email. """ # a - Lower case email_contents = email_contents.lower() # b - remove html/xml tags email_contents = re.sub("<[^>]*>", " ", email_contents).split(" ") email_contents = filter(len, email_contents) email_contents = ' '.join(email_contents) # c - Handle URLS email_contents = re.sub("[http|https]://[^\s]*", "httpaddr", email_contents) # d - Handle Email Addresses email_contents = re.sub("[^\s]+@[^\s]+", "emailaddr", email_contents) # e - Handle numbers email_contents = re.sub("[0-9]+", "number", email_contents) # f - Handle $ sign email_contents = re.sub("[$]+", "dollar", email_contents) # Strip all special characters special_chars = [ "<", "[", "^", ">", "+", "?", "!", "'", ".", ",", ":", "*", "%", "#", "_", "=" ] for char in special_chars: email_contents = email_contents.replace(str(char), "") email_contents = email_contents.replace("\n", " ") # Stem the word ps = PorterStemmer() email_contents = [ps.stem(token) for token in email_contents.split(" ")] email_contents = " ".join(email_contents) return email_contents def find_word_indices(processed_email, vocabList_d): # Process the email and return word_indices word_indices = [] for char in processed_email.split(): if len(char) > 1 and char in vocabList_d: word_indices.append(int(vocabList_d[char])) return word_indices def email_features(word_indices, vocabList_d): """ Takes in a word_indices vector and produces a feature vector from the word indices. """ n = len(vocabList_d) features = np.zeros((n, 1)) for i in word_indices: features[i] = 1 return features def transform_email_to_features(email_contents, vocabList_d): # print(email_contents) processed_email = process_email(email_contents) word_indices = find_word_indices(processed_email, vocabList_d) features = email_features(word_indices, vocabList_d) return features # train X = [] Y = [] print(len(spam_emails)) print(len(ham_emails)) for i in range(400): sp = email_to_text(spam_emails[i]) if sp: a = transform_email_to_features(sp, vocabList_d) X.append(a.flatten()) Y.append(1) for i in range(2000): em = email_to_text(ham_emails[i]) if em: X.append(transform_email_to_features(em, vocabList_d).flatten()) Y.append(0) sio.savemat('../data/myTrain.mat', {'X': X, 'y': Y}) # test X = [] Y = [] for i in range(401, 500, 1): sp = email_to_text(spam_emails[i]) if sp: a = transform_email_to_features(sp, vocabList_d) X.append(a.flatten()) Y.append(1) for i in range(2001, 2500, 1): em = email_to_text(ham_emails[i]) if em: X.append(transform_email_to_features(em, vocabList_d).flatten()) Y.append(0) sio.savemat('../data/myTest.mat', {'Xtest': X, 'ytest': Y})
directory = "spam" if is_spam else "easy_ham" with open(os.path.join(spam_path, directory, filename), "rb") as f: return parser.BytesParser(policy=email.policy.default).parse(f)
NavHeader.js
import React, { Component } from 'react' import PropTypes from 'prop-types' import theme from 'lib/theme' import raf from 'raf' import { observer, inject } from 'mobx-react' import { CTAButton } from '../CTAButton' import { Button } from 'lib/antd' import { Logo } from '../Logo' import { SaasifyContext } from '../SaasifyContext' import Link from './Link' import styles from './styles.module.css' const isServer = typeof window === 'undefined' @inject('auth') @observer export class
extends Component { static propTypes = { auth: PropTypes.object.isRequired, fixed: PropTypes.bool } static defaultProps = { fixed: false } state = { attached: isServer || window.scrollY > 0, expanded: false } componentDidMount() { if (!isServer) { window.addEventListener('scroll', this._onScroll) this._onScrollAF() } } componentWillUnmount() { if (!isServer) { window.removeEventListener('scroll', this._onScroll) } if (this._scrollRaf) { raf.cancel(this._scrollRaf) this._scrollRaf = null } } handleToggleExpanded = () => { this.setState({ expanded: !this.state.expanded }) } render() { const { auth, fixed } = this.props const { attached, expanded } = this.state return ( <SaasifyContext.Consumer> {(config) => ( <header className={theme( styles, 'container', attached || fixed ? theme(styles, 'attached') : null, expanded ? theme(styles, 'expanded') : null )} style={{ background: attached || fixed || expanded ? theme['@section-fg-color'] : 'transparent', paddingBottom: expanded ? 24 : undefined }} > <div className={theme(styles, 'content')}> <div className={theme(styles, 'primary')}> <Link to='/'> <span className={theme(styles, 'logo-image')}> <Logo className={theme(styles, 'logo')} /> <Logo className={theme( styles, 'logo', theme(styles, 'logo--light') )} light /> </span> {config.logo && config?.header?.displayName !== false && config?.deployment?.saas?.sections?.navHeader ?.displayName !== false && ( <span className={theme(styles, 'logo-text')}> {config?.deployment?.saas?.headerName ? config.deployment.saas.headerName : config?.name} </span> )} </Link> <div className={theme(styles, 'burger')}> <Button type='secondary' inline onClick={this.handleToggleExpanded} icon='menu' /> </div> </div> <div className={theme(styles, 'content-body')}> <div className={theme(styles, 'links')}> {config.header.links.map((link) => { if (typeof link === 'function') { link = link({ config, auth, fixed, attached }) if (!link) return null } return <Link key={link.to || link.href} {...link} /> })} </div> {auth.isAuthenticated ? ( <div className={theme(styles, 'actions')}> {config.header?.login !== false && ( <Link to='/logout' className={theme(styles, 'login')}> <CTAButton type='secondary' inline> Log out </CTAButton> </Link> )} {config.header?.dashboard !== false && ( <Link to='/dashboard'> <CTAButton type='primary' inline> Dashboard </CTAButton> </Link> )} </div> ) : ( <div className={theme(styles, 'actions')}> {config.header?.login !== false && ( <Link to='/login' className={theme(styles, 'login')}> <CTAButton type='secondary' inline> Log in </CTAButton> </Link> )} {config.header?.signup !== false && (config.ctaOnClick ? ( <CTAButton type='primary' inline onClick={config.ctaOnClick} > {config.ctaTextInline || 'Get started'} </CTAButton> ) : ( <Link to='/signup'> <CTAButton type='primary' inline> {config.ctaTextInline || 'Get started'} </CTAButton> </Link> ))} </div> )} </div> </div> </header> )} </SaasifyContext.Consumer> ) } _onScroll = () => { if (!this._scrollRaf) { this._scrollRaf = raf(this._onScrollAF) } } _onScrollAF = () => { this._scrollRaf = null this.setState({ attached: isServer || window.scrollY > 0 }) } }
NavHeader
prescribed_test.py
# import sharpy.utils.settings as settings # import sharpy.utils.exceptions as exceptions # import sharpy.utils.cout_utils as cout import numpy as np import importlib import unittest import os import sharpy.utils.cout_utils as cout class TestCoupledPrescribed(unittest.TestCase):
""" """ @classmethod def setUpClass(cls): # run all the cases generators # case = 'smith_2deg_prescribed' # mod = importlib.import_module('tests.coupled.prescribed.' + case + '.generate_' + case) # case = 'rotating_wing' # mod1 = importlib.import_module('tests.coupled.prescribed.' + case + '.generate_' + case) pass @classmethod def tearDownClass(cls): pass # def test_smith2deg_prescribed(self): # import sharpy.sharpy_main # solver_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + # '/smith_2deg_prescribed/smith_2deg_prescribed.sharpy') # sharpy.sharpy_main.main(['', solver_path]) # # # read output and compare # output_path = os.path.dirname(solver_path) + 'output/aero/' # forces_data = np.genfromtxt(output_path + 'smith_2deg_prescribed_aeroforces.csv') # self.assertAlmostEqual(forces_data[-1, 3], -3.728e1, 1) def test_rotating_wing(self): # import sharpy.sharpy_main # solver_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + # '/rotating_wing/rotating_wing.sharpy') # sharpy.sharpy_main.main(['', solver_path]) cout.cout_wrap('No tests for prescribed dynamic configurations (yet)!', 1) pass
main.rs
// Copyright 2018 Grove Enterprises LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::fs::File; use std::io::prelude::*; use std::io::{Error, ErrorKind}; use std::time::Duration; use std::str; use std::thread; extern crate clap; extern crate datafusion; extern crate etcd; extern crate futures; extern crate futures_timer; extern crate hyper; extern crate serde; extern crate serde_json; extern crate tokio_core; extern crate uuid; use clap::{Arg, App};
use etcd::Client; use etcd::kv; use datafusion::exec::*; use futures::future::{ok, loop_fn, Future, Loop}; use futures::Stream; use hyper::{Method, StatusCode}; use hyper::client::HttpConnector; use hyper::header::{ContentLength}; use hyper::server::{Http, Request, Response, Service}; use tokio_core::reactor::Core; use uuid::Uuid; const VERSION: &'static str = env!("CARGO_PKG_VERSION"); fn main() { let matches = App::new("DataFusion Worker Node") .version(VERSION) .arg(Arg::with_name("ETCD") .help("etcd endpoints") .long("etcd") .value_name("URL") .required(true) .takes_value(true)) .arg(Arg::with_name("BIND") .long("bind") .help("IP address and port to bind to") .default_value("0.0.0.0:8080") .takes_value(true)) .arg(Arg::with_name("DATADIR") .long("data_dir") .help("Location of data files") .required(true) .takes_value(true)) .arg(Arg::with_name("WEBROOT") .long("webroot") .help("Location of HTML files") .default_value("./src/bin/worker/") .takes_value(true)) .get_matches(); let uuid = Uuid::new_v5(&uuid::NAMESPACE_DNS, "datafusion"); let bind_addr_str = matches.value_of("BIND").unwrap().to_string(); let bind_addr = bind_addr_str.parse().unwrap(); let www_root = matches.value_of("WEBROOT").unwrap().to_string(); let data_dir = matches.value_of("DATADIR").unwrap().to_string(); let etcd_endpoints = matches.value_of("ETCD").unwrap(); // create futures event loop let mut core = Core::new().unwrap(); let handle = core.handle(); println!("Worker {} listening on {} and serving content from {}", uuid, bind_addr, www_root); thread::spawn(move || { let server = Http::new() .bind(&bind_addr, move|| Ok(Worker { www_root: www_root.clone(), data_dir: data_dir.clone() })).unwrap(); server.run().unwrap(); }); // start a loop to register with etcd every 5 seconds with a ttl of 10 seconds match Client::new(&handle, &[etcd_endpoints], None) { Ok(etcd) => { let heartbeat_loop = loop_fn(Membership::new(etcd, uuid, bind_addr_str), |client| { client.register() .and_then(|(client, done)| { if done { Ok(Loop::Break(client)) } else { Ok(Loop::Continue(client)) } }) }); match core.run(heartbeat_loop) { Ok(_) => println!("Heartbeat loop finished"), Err(e) => println!("Heartbeat loop failed: {:?}", e), } } Err(e) => println!("Failed to connect to etcd: {:?}", e) } } /// Worker struct to store state struct Worker { www_root: String, data_dir: String } impl Worker { fn load_static_file(&self, filename: &str) -> String { let mut f = File::open(&filename).expect("file not found"); let mut contents = String::new(); f.read_to_string(&mut contents) .expect("something went wrong reading the file"); contents } } impl Service for Worker { type Request = Request; type Response = Response; type Error = hyper::Error; type Future = Box<Future<Item=Self::Response, Error=Self::Error>>; fn call(&self, req: Request) -> Self::Future { match req.method() { &Method::Get => { // all UI calls are GET //println!("path={:?}", req.path()); // is this a known file and/or valid path? let filename = match req.path() { "/" => Some("/index.html"), "/css/main.css" => Some("/css/main.css"), _ => None }; // server page, or a 404 not found error match filename { Some(f) => { let fqpath = format!("{}/{}", self.www_root, f); let content = self.load_static_file(&fqpath); Box::new(futures::future::ok( Response::new() .with_header(ContentLength(content.len() as u64)) .with_body(content))) } _ => { let fqpath = format!("{}/{}", self.www_root, "/404.html"); let content = self.load_static_file(&fqpath); Box::new(futures::future::ok( Response::new() .with_status(StatusCode::NotFound) .with_header(ContentLength(content.len() as u64)) .with_body(content))) } } } &Method::Post => { // all REST calls are POST let data_dir = self.data_dir.clone(); Box::new(req.body().concat2() .and_then(move |body| { let json = str::from_utf8(&body).unwrap(); //println!("{}", json); println!("Received request"); //println!("Request: {}", json_str); // this is a crude POC that demonstrates the worker receiving a plan, executing it, // and returning a result set //TODO: should stream results to client, not build a result set in memory //TODO: decide on a more appropriate format to return than csv //TODO: should not create a new execution context each time ok(match serde_json::from_str(&json) { Ok(plan) => { //println!("Plan: {:?}", plan); // create execution context let mut ctx = ExecutionContext::local(data_dir.clone()); match plan { PhysicalPlan::Interactive { plan } => { match ctx.create_execution_plan(data_dir.clone(), &plan) { Ok(exec) => { let it = exec.scan(&ctx); let mut result_set = "".to_string(); it.for_each(|t| { match t { Ok(row) => { result_set += &row.to_string() }, Err(e) => { result_set += &format!("ERROR: {:?}", e) } } result_set += "\n"; }); Response::new() .with_status(StatusCode::Ok) .with_header(ContentLength(result_set.len() as u64)) .with_body(result_set) }, Err(e) => error_response(format!("Failed to create execution plan: {:?}", e)) } }, PhysicalPlan::Write { plan, filename } => { println!("Writing dataframe to {}", filename); let df = DF { plan: plan }; match ctx.write(Box::new(df), &filename) { Ok(count) => { println!("Wrote {} rows to {}", count, filename); Response::new().with_status(StatusCode::Ok) }, Err(e) => error_response(format!("Failed to create execution plan: {:?}", e)) } } //_ => error_response(format!("Unsupported execution plan")) } }, Err(e) => error_response(format!("Failed to parse execution plan: {:?}", e)) }) })) } _ => { Box::new(futures::future::ok( Response::new().with_status(StatusCode::NotFound) )) } } } } fn error_response(msg: String) -> Response { Response::new() .with_status(StatusCode::BadRequest) .with_header(ContentLength(msg.len() as u64)) .with_body(msg) } struct Membership { etcd: Client<HttpConnector>, uuid: Uuid, bind_address: String } impl Membership { fn new(etcd: Client<HttpConnector>, uuid: Uuid, bind_address: String) -> Self { Membership { etcd, uuid, bind_address } } fn register(self) -> Box<Future<Item=(Self,bool),Error=Error>> { let key = format!("/datafusion/workers/{}", self.uuid); Box::new(kv::set(&self.etcd, &key, &self.bind_address, Some(10)) .and_then(|_etcd_response| { println!("Registered with etcd: {} -> {}", self.uuid, self.bind_address); thread::sleep(Duration::from_millis(5000)); ok((self,false)) }) .map_err(|_| Error::from(ErrorKind::NotFound))) } }
box_ref.rs
use std::error::Error; use std::marker::PhantomData; use super::null_ptr_error; use super::{FromForeign, InputType}; pub struct
<T>(PhantomData<T>); impl<T> InputType for BoxRefMarshaler<T> { type Foreign = *mut T; type ForeignTraitObject = (); } // impl<'a, T> FromForeign<*mut Box<T>, &'a Box<T>> for BoxRefMarshaler<T> { // type Error = Box<dyn Error>; // #[inline(always)] // unsafe fn from_foreign(foreign: *mut Box<T>) -> Result<&'a Box<T>, Self::Error> { // log::debug!( // "<BoxMarshaler<{ty}> as FromForeign<*mut Box<T>, &'a Box<T>>>::from_foreign({:?})", // foreign, // ty = std::any::type_name::<T>() // ); // if foreign.is_null() { // return Err(null_ptr_error()); // } // Ok(unsafe { &*foreign as &'a Box<T> }) // } // } impl<'a, T> FromForeign<*mut T, &'a T> for BoxRefMarshaler<T> { type Error = Box<dyn Error>; #[inline(always)] unsafe fn from_foreign(foreign: *mut T) -> Result<&'a T, Self::Error> { log::debug!( "<BoxMarshaler<{ty}> as FromForeign<*mut Box<T>, &'a mut Box<T>>>::from_foreign({:?})", foreign, ty = std::any::type_name::<T>() ); if foreign.is_null() { return Err(null_ptr_error()); } // let mut boxed = unsafe { Box::from_raw(foreign as *mut _ as *mut _) }; // let ptr = &mut boxed as *mut _; // std::mem::forget(boxed); // // let ptr = unsafe { std::mem::transmute::<*mut T, *mut Box<T>>(foreign) }; Ok(&*foreign) } }
BoxRefMarshaler
index.tsx
import {connect} from "@@/plugin-dva/exports"; import React from "react"; import {Card} from "antd"; import TagTypeItem from "@/pages/welcome/parts/TagTypeCard/components"; import style from './style.less';
import {ConnectState} from "@/models/connect"; import {HomeModelStateType} from "@/pages/welcome/model"; import {TagTypeCount} from "@/services/dashboard"; export interface TagTypeCardPropsType { home: HomeModelStateType } const TagTypeCard = ({home}: TagTypeCardPropsType) => { return ( <Card title="标签类别"> { home.tagTypeCount && home.tagTypeCount.map((item: TagTypeCount) => ( <div className={style.item} key={item.name}> <TagTypeItem name={item.name} value={item.total}/> </div> )) } </Card> ); }; export default connect(({home}: ConnectState) => ({home}))(TagTypeCard);
look_up_tables.py
#! /usr/bin/env python3 # # Copyright 2018 California Institute of Technology # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ISOFIT: Imaging Spectrometer Optimal FITting # Author: David R Thompson, [email protected] # import os import numpy as np import logging import ray from collections import OrderedDict import subprocess import time import atexit from isofit.core import common from isofit.configs import Config from isofit.configs.sections.radiative_transfer_config import RadiativeTransferEngineConfig from isofit.configs.sections.statevector_config import StateVectorElementConfig from isofit.configs.sections.implementation_config import ImplementationConfig ### Functions ### @ray.remote def spawn_rt(cmd, local_dir=None): """Run a CLI command.""" print(cmd) # Add a very slight timing offset to prevent all subprocesses # starting simultaneously time.sleep(float(np.random.random(1))*2) subprocess.call(cmd, shell=True, cwd=local_dir) ### Classes ### class FileExistsError(Exception): """FileExistsError with a message.""" def __init__(self, message): super(FileExistsError, self).__init__(message) class TabularRT: """A model of photon transport including the atmosphere.""" def __init__(self, engine_config: RadiativeTransferEngineConfig, full_config: Config): self.implementation_config: ImplementationConfig = full_config.implementation self.wl, self.fwhm = common.load_wavelen(full_config.forward_model.instrument.wavelength_file) if engine_config.wavelength_range is not None: valid_wl = np.logical_and(self.wl >= engine_config.wavelength_range[0], self.wl <= engine_config.wavelength_range[1]) self.wl = self.wl[valid_wl] self.fwhm = self.fwhm[valid_wl] self.n_chan = len(self.wl) self.auto_rebuild = full_config.implementation.rte_auto_rebuild self.configure_and_exit = full_config.implementation.rte_configure_and_exit # We use a sorted dictionary here so that filenames for lookup # table (LUT) grid points are always constructed the same way, with # consistent dimesion ordering). Every state vector element has # a lookup table dimension, but some lookup table dimensions # (like geometry parameters) may not be in the state vector. # TODO: enforce a requirement that makes all SV elements be inside the LUT full_lut_grid = full_config.forward_model.radiative_transfer.lut_grid # selectively get lut components that are in this particular RTE self.lut_grid_config = OrderedDict() if engine_config.lut_names is not None: lut_names = engine_config.lut_names else: lut_names = full_config.forward_model.radiative_transfer.lut_grid.keys() for key, value in full_lut_grid.items(): if key in lut_names: self.lut_grid_config[key] = value # selectively get statevector components that are in this particular RTE full_sv_names = full_config.forward_model.radiative_transfer.statevector.get_element_names() self.statevector_names = full_sv_names self.lut_dir = engine_config.lut_path self.n_point = len(self.lut_grid_config) self.n_state = len(self.statevector_names) self.luts = {} # Retrieved variables. We establish scaling, bounds, and # initial guesses for each state vector element. The state # vector elements are all free parameters in the RT lookup table, # and they all have associated dimensions in the LUT grid. self.bounds, self.scale, self.init = [], [], [] self.prior_mean, self.prior_sigma = [], [] for key in self.statevector_names: element: StateVectorElementConfig = full_config.forward_model.radiative_transfer.statevector.get_single_element_by_name( key) self.bounds.append(element.bounds) self.scale.append(element.scale) self.init.append(element.init) self.prior_sigma.append(element.prior_sigma) self.prior_mean.append(element.prior_mean) self.bounds = np.array(self.bounds) self.scale = np.array(self.scale) self.init = np.array(self.init) self.prior_mean = np.array(self.prior_mean) self.prior_sigma = np.array(self.prior_sigma) self.lut_dims = [] self.lut_grids = [] self.lut_names = [] self.lut_interp_types = [] for key, grid_values in self.lut_grid_config.items(): # do some quick checks on the values if len(grid_values) == 1: err = 'Only 1 value in LUT grid {}. ' +\ '1-d LUT grids cannot be interpreted.'.format(key) raise ValueError(err) if grid_values != sorted(grid_values): logging.error('Lookup table grid needs ascending order') raise ValueError('Lookup table grid needs ascending order') # Store the values self.lut_grids.append(grid_values) self.lut_dims.append(len(grid_values)) self.lut_names.append(key) # Store in an indication of the type of value each key is # (normal - n, degree - d, radian - r) if key in self.angular_lut_keys_radians: self.lut_interp_types.append('r') elif key in self.angular_lut_keys_degrees: self.lut_interp_types.append('d') else: self.lut_interp_types.append('n') # Cast as array for faster reference later self.lut_interp_types = np.array(self.lut_interp_types) # "points" contains all combinations of grid points # We will have one filename prefix per point self.points = common.combos(self.lut_grids) self.files = self.get_lut_filenames() def build_lut(self, rebuild=False): """Each LUT is associated with a source directory. We build a lookup table by: (1) defining the LUT dimensions, state vector names, and the
(2) running the radiative transfer solver if needed, with each run defining a different point in the LUT; and (3) loading the LUTs, one per key atmospheric coefficient vector, into memory as VectorInterpolator objects.""" # Build the list of radiative transfer run commands. This # rebuild_cmd() function will be overriden by the child class to # perform setup activities unique to each RTM. rebuild_cmds = [] for point, fn in zip(self.points, self.files): try: cmd = self.rebuild_cmd(point, fn) rebuild_cmds.append(cmd) except FileExistsError: pass if self.configure_and_exit: raise SystemExit # sys.exit(0) elif len(rebuild_cmds) > 0 and self.auto_rebuild: logging.info("Rebuilding radiative transfer look up table") # check to make sure lut directory is there, create if not if os.path.isdir(self.lut_dir) is False: os.mkdir(self.lut_dir) # Make the LUT calls (in parallel if specified) results = ray.get([spawn_rt.remote(rebuild_cmd, self.lut_dir) for rebuild_cmd in rebuild_cmds]) def get_lut_filenames(self): files = [] for point in self.points: outf = '_'.join(['%s-%6.4f' % (n, x) for n, x in zip(self.lut_names, point)]) files.append(outf) return files def summarize(self, x_RT, geom): """Summary of state vector.""" if len(x_RT) < 1: return '' return 'Atmosphere: '+' '.join(['%s: %5.3f' % (si, xi) for si, xi in zip(self.statevector_names, x_RT)])
grid of values;
0042_job_receive_emails.py
# -*- coding: utf-8 -*- # Generated by Django 1.11.25 on 2019-10-29 17:24 from __future__ import unicode_literals from django.db import migrations, models class
(migrations.Migration): dependencies = [ ("core", "0041_jobapplication_output_sent"), ] operations = [ migrations.AddField( model_name="job", name="receive_emails", field=models.BooleanField(default=True, verbose_name="Enviar emails?"), ), ]
Migration
lib.rs
#![cfg_attr(not(feature = "std"), no_std)] #![allow(clippy::unused_unit)] use sp_std::vec::Vec; use ethereum_types::BigEndianHash; use frame_support::{ dispatch::{DispatchError, DispatchResult}, pallet_prelude::*, }; use hex_literal::hex; use module_evm::{ExitReason, ExitSucceed}; use primitive_types::H256; use sp_core::{H160, U256}; use sp_runtime::SaturatedConversion; use support::{EVMBridge as EVMBridgeTrait, ExecutionMode, InvokeContext, EVM}; type AccountIdOf<T> = <T as frame_system::Config>::AccountId; type BalanceOf<T> = <<T as Config>::EVM as EVM<AccountIdOf<T>>>::Balance; mod mock; mod tests; pub use module::*; #[frame_support::pallet] pub mod module { use super::*; /// EvmBridge module trait #[pallet::config] pub trait Config: frame_system::Config { type EVM: EVM<AccountIdOf<Self>>; } #[pallet::error] pub enum Error<T> { ExecutionFail, ExecutionRevert, ExecutionFatal, ExecutionError, InvalidReturnValue, } #[pallet::pallet] pub struct Pallet<T>(PhantomData<T>); #[pallet::hooks] impl<T: Config> Hooks<T::BlockNumber> for Pallet<T> {} #[pallet::call] impl<T: Config> Pallet<T> {} } impl<T: Config> EVMBridgeTrait<AccountIdOf<T>, BalanceOf<T>> for Pallet<T> { fn name(context: InvokeContext) -> Result<Vec<u8>, DispatchError> { // ERC20.name method hash let input = hex!["06fdde03"].to_vec(); let info = T::EVM::execute(context, input, Default::default(), 2_100_000, 0, ExecutionMode::View)?; Self::handle_exit_reason(info.exit_reason)?; Self::decode_string(info.output.as_slice().to_vec()) } fn symbol(context: InvokeContext) -> Result<Vec<u8>, DispatchError> { // ERC20.symbol method hash let input = hex!["95d89b41"].to_vec(); let info = T::EVM::execute(context, input, Default::default(), 2_100_000, 0, ExecutionMode::View)?; Self::handle_exit_reason(info.exit_reason)?; Self::decode_string(info.output.as_slice().to_vec()) } fn
(context: InvokeContext) -> Result<u8, DispatchError> { // ERC20.decimals method hash let input = hex!["313ce567"].to_vec(); let info = T::EVM::execute(context, input, Default::default(), 2_100_000, 0, ExecutionMode::View)?; Self::handle_exit_reason(info.exit_reason)?; ensure!(info.output.len() == 32, Error::<T>::InvalidReturnValue); let value = U256::from(info.output.as_slice()).saturated_into::<u8>(); Ok(value) } fn total_supply(context: InvokeContext) -> Result<BalanceOf<T>, DispatchError> { // ERC20.totalSupply method hash let input = hex!("18160ddd").to_vec(); let info = T::EVM::execute(context, input, Default::default(), 2_100_000, 0, ExecutionMode::View)?; Self::handle_exit_reason(info.exit_reason)?; let value = U256::from(info.output.as_slice()).saturated_into::<u128>(); Ok(value.saturated_into::<BalanceOf<T>>()) } fn balance_of(context: InvokeContext, address: H160) -> Result<BalanceOf<T>, DispatchError> { // ERC20.balanceOf method hash let mut input = hex!("70a08231").to_vec(); // append address input.extend_from_slice(H256::from(address).as_bytes()); let info = T::EVM::execute(context, input, Default::default(), 2_100_000, 0, ExecutionMode::View)?; Self::handle_exit_reason(info.exit_reason)?; Ok(U256::from(info.output.as_slice()) .saturated_into::<u128>() .saturated_into::<BalanceOf<T>>()) } fn transfer(context: InvokeContext, to: H160, value: BalanceOf<T>) -> DispatchResult { // ERC20.transfer method hash let mut input = hex!("a9059cbb").to_vec(); // append receiver address input.extend_from_slice(H256::from(to).as_bytes()); // append amount to be transferred input.extend_from_slice(H256::from_uint(&U256::from(value.saturated_into::<u128>())).as_bytes()); let storage_limit = if context.origin == Default::default() { 0 } else { 1_000 }; let info = T::EVM::execute( context, input, Default::default(), 2_100_000, storage_limit, ExecutionMode::Execute, )?; Self::handle_exit_reason(info.exit_reason)?; // return value is true. let mut bytes = [0u8; 32]; U256::from(1).to_big_endian(&mut bytes); // Check return value to make sure not calling on empty contracts. ensure!( !info.output.is_empty() && info.output == bytes, Error::<T>::InvalidReturnValue ); Ok(()) } fn get_origin() -> Option<AccountIdOf<T>> { T::EVM::get_origin() } fn set_origin(origin: AccountIdOf<T>) { T::EVM::set_origin(origin); } } impl<T: Config> Pallet<T> { fn handle_exit_reason(exit_reason: ExitReason) -> Result<(), DispatchError> { match exit_reason { ExitReason::Succeed(ExitSucceed::Returned) => Ok(()), ExitReason::Succeed(ExitSucceed::Stopped) => Ok(()), ExitReason::Succeed(_) => Err(Error::<T>::ExecutionFail.into()), ExitReason::Revert(_) => Err(Error::<T>::ExecutionRevert.into()), ExitReason::Fatal(_) => Err(Error::<T>::ExecutionFatal.into()), ExitReason::Error(_) => Err(Error::<T>::ExecutionError.into()), } } fn decode_string(output: Vec<u8>) -> Result<Vec<u8>, DispatchError> { // output is 32-byte aligned and consists of 3 parts: // - part 1: 32 byte, the offset of its description is passed in the position of // the corresponding parameter or return value. // - part 2: 32 byte, string length // - part 3: string data ensure!( output.len() >= 64 && output.len() % 32 == 0, Error::<T>::InvalidReturnValue ); let offset = U256::from_big_endian(&output[0..32]); let length = U256::from_big_endian(&output[offset.as_usize()..offset.as_usize() + 32]); ensure!( // output is 32-byte aligned. ensure total_length >= offset + string length + string data length. output.len() >= offset.as_usize() + 32 + length.as_usize(), Error::<T>::InvalidReturnValue ); let mut data = Vec::new(); data.extend_from_slice(&output[offset.as_usize() + 32..offset.as_usize() + 32 + length.as_usize()]); Ok(data.to_vec()) } }
decimals
detectionoutput_ext.py
""" Copyright (C) 2018-2020 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from math import log import numpy as np from extensions.ops.detectionoutput_onnx import ExperimentalDetectronDetectionOutput from mo.front.extractor import FrontExtractorOp from mo.front.onnx.extractors.utils import onnx_attr class ExperimentalDetectronDetectionOutputFrontExtractor(FrontExtractorOp): op = 'ExperimentalDetectronDetectionOutput' enabled = True @classmethod def extract(cls, node): attrs = dict(class_agnostic_box_regression=onnx_attr(node, 'class_agnostic_box_regression', 'i', 0), max_detections_per_image=onnx_attr(node, 'max_detections_per_image', 'i', 100), nms_threshold=onnx_attr(node, 'nms_threshold', 'f', 0.5), num_classes=onnx_attr(node, 'num_classes', 'i', 81), post_nms_count=onnx_attr(node, 'post_nms_count', 'i', 2000), score_threshold=onnx_attr(node, 'score_threshold', 'f', 0.05),
ExperimentalDetectronDetectionOutput.update_node_stat(node, attrs) return cls.enabled
max_delta_log_wh=onnx_attr(node, 'max_delta_log_wh', 'f', log(1000. / 16.)), deltas_weights=np.array(onnx_attr(node, 'deltas_weights', 'floats', [10., 10., 5., 5.]), dtype=np.float32) )
client.py
# coding=utf-8 import os import pickle import requests from .errors import CloudFuncError class CloudFuncClient: def
(self, serve_address: str = None): if serve_address is None: serve_address = os.environ['CLOUDFUNC_SERVE_ADDRESS'] assert serve_address is not None, 'cloudfunc-serve address is not given' self.serve_address = serve_address self.session = requests.Session() def run(self, cloud_func_name: str, *args, **kwargs): data = pickle.dumps((args, kwargs)) try: resp = self.session.post( f'http://{self.serve_address}/cloud-funcs/run', params={'name': cloud_func_name}, data=data, headers={'Content-Type': 'application/octet-stream'} ) except Exception as e: raise CloudFuncError(e) else: try: resp.raise_for_status() except requests.HTTPError: raise CloudFuncError(resp.text) return pickle.loads(resp.content)
__init__
license_output_inner.py
# -*- coding: utf-8 -*- from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 from fuji_server.models.base_model_ import Model from fuji_server import util class LicenseOutputInner(Model): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """
"""LicenseOutputInner - a model defined in Swagger :param license: The license of this LicenseOutputInner. # noqa: E501 :type license: str :param osi_approved: The osi_approved of this LicenseOutputInner. # noqa: E501 :type osi_approved: bool :param details_url: The details_url of this LicenseOutputInner. # noqa: E501 :type details_url: str """ self.swagger_types = {'license': str, 'osi_approved': bool, 'details_url': str} self.attribute_map = {'license': 'license', 'osi_approved': 'OSI_approved', 'details_url': 'details_url'} self._license = license self._osi_approved = osi_approved self._details_url = details_url @classmethod def from_dict(cls, dikt) -> 'LicenseOutputInner': """Returns the dict as a model :param dikt: A dict. :type: dict :return: The License_output_inner of this LicenseOutputInner. # noqa: E501 :rtype: LicenseOutputInner """ return util.deserialize_model(dikt, cls) @property def license(self) -> str: """Gets the license of this LicenseOutputInner. :return: The license of this LicenseOutputInner. :rtype: str """ return self._license @license.setter def license(self, license: str): """Sets the license of this LicenseOutputInner. :param license: The license of this LicenseOutputInner. :type license: str """ self._license = license @property def osi_approved(self) -> bool: """Gets the osi_approved of this LicenseOutputInner. :return: The osi_approved of this LicenseOutputInner. :rtype: bool """ return self._osi_approved @osi_approved.setter def osi_approved(self, osi_approved: bool): """Sets the osi_approved of this LicenseOutputInner. :param osi_approved: The osi_approved of this LicenseOutputInner. :type osi_approved: bool """ self._osi_approved = osi_approved @property def details_url(self) -> str: """Gets the details_url of this LicenseOutputInner. :return: The details_url of this LicenseOutputInner. :rtype: str """ return self._details_url @details_url.setter def details_url(self, details_url: str): """Sets the details_url of this LicenseOutputInner. :param details_url: The details_url of this LicenseOutputInner. :type details_url: str """ self._details_url = details_url
def __init__(self, license: str = None, osi_approved: bool = False, details_url: str = None): # noqa: E501
create.rs
use std::convert::TryFrom; use std::error::Error; use std::sync::Arc; use diesel::insert_into; use crate::common::{AppError, MainPooledConnection}; use crate::diesel::RunQueryDsl; use crate::modules::restaurant::business::CreateRestaurantStorage; use crate::modules::restaurant::model::schema::restaurants; use crate::modules::restaurant::model::schema::RestaurantEntity; use crate::modules::restaurant::model::{RestaurantCreate, ENTITY_NAME}; use crate::modules::restaurant::storage::SqlStorage; impl CreateRestaurantStorage for SqlStorage { fn create(&self, data: &RestaurantCreate) -> Result<i32, Box<dyn Error + Send + Sync>>
} pub fn new_restaurant_storage( connection: MainPooledConnection, ) -> Arc<dyn CreateRestaurantStorage> { return Arc::new(SqlStorage::new(connection)); }
{ let entity = RestaurantEntity::try_from(data)?; let result = insert_into(restaurants::dsl::restaurants) .values(&entity) .execute(&self.connection); let result = result.map_err(|e| Box::new(e))?; if result == 0 { return Err(Box::new(AppError::new_cannot_create_entity( ENTITY_NAME, None, ))); } return Ok(entity.id.unwrap_or_default()); }