code
stringlengths
26
870k
docstring
stringlengths
1
65.6k
func_name
stringlengths
1
194
language
stringclasses
1 value
repo
stringlengths
8
68
path
stringlengths
5
182
url
stringlengths
46
251
license
stringclasses
4 values
def MessageToFlatDict( msg: message.Message, transform: Callable[[descriptor.FieldDescriptor, Any], Any], ) -> Dict[str, Any]: """Converts the given Protocol Buffers message to a flat dictionary. Fields of nested messages will be represented through keys of a path with dots. Consider the following Protocol Buffers message: foo { bar: 42 baz { quux: "thud" } } Its representation as a flat Python dictionary is the following: { "foo.bar": 42, "foo.baz.quux": "thud" } Args: msg: A message to convert. transform: A transformation to apply to primitive values. Returns: A flat dictionary corresponding to the given message. """ # Using ordered dictionary guarantees stable order of fields in the result. result = dict() def Recurse(msg: message.Message, prev: Tuple[str, ...]) -> None: fields = sorted(msg.ListFields(), key=lambda field: field[0].name) for field, value in fields: curr = prev + (field.name,) if field.type == descriptor.FieldDescriptor.TYPE_MESSAGE: Recurse(value, curr) else: result[".".join(curr)] = transform(field, value) Recurse(msg, ()) return result
Converts the given Protocol Buffers message to a flat dictionary. Fields of nested messages will be represented through keys of a path with dots. Consider the following Protocol Buffers message: foo { bar: 42 baz { quux: "thud" } } Its representation as a flat Python dictionary is the following: { "foo.bar": 42, "foo.baz.quux": "thud" } Args: msg: A message to convert. transform: A transformation to apply to primitive values. Returns: A flat dictionary corresponding to the given message.
MessageToFlatDict
python
google/grr
api_client/python/grr_api_client/utils.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/utils.py
Apache-2.0
def Xor(bytestr: bytes, key: int) -> bytes: """Returns a `bytes` object where each byte has been xored with key.""" return bytes([byte ^ key for byte in bytestr])
Returns a `bytes` object where each byte has been xored with key.
Xor
python
google/grr
api_client/python/grr_api_client/utils.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/utils.py
Apache-2.0
def __init__(self, chunks: Iterator[bytes]) -> None: """Initializes the object.""" super().__init__() self._chunks = chunks self._buf = io.BytesIO()
Initializes the object.
__init__
python
google/grr
api_client/python/grr_api_client/utils.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/utils.py
Apache-2.0
def AEADDecrypt(stream: IO[bytes], key: bytes) -> IO[bytes]: """Decrypts given file-like object using AES algorithm in GCM mode. Refer to the encryption documentation to learn about the details of the format that this function allows to decode. Args: stream: A file-like object to decrypt. key: A secret key used for decrypting the data. Returns: A file-like object with decrypted data. """ aesgcm = aead.AESGCM(key) def Generate() -> Iterator[bytes]: # Buffered reader should accept `IO[bytes]` but for now it accepts only # `RawIOBase` (which is a concrete base class for all I/O implementations). reader = io.BufferedReader(stream) # pytype: disable=wrong-arg-types # We abort early if there is no data in the stream. Otherwise we would try # to read nonce and fail. if not reader.peek(): return for idx in itertools.count(): nonce = reader.read(_AEAD_NONCE_SIZE) # As long there is some data in the buffer (and there should be because of # the initial check) there should be a fixed-size nonce prepended to each # chunk. if len(nonce) != _AEAD_NONCE_SIZE: raise EOFError(f"Incorrect nonce length: {len(nonce)}") chunk = reader.read(_AEAD_CHUNK_SIZE + 16) # `BufferedReader#peek` will return non-empty byte string if there is more # data available in the stream. is_last = reader.peek() == b"" # pylint: disable=g-explicit-bool-comparison adata = _AEAD_ADATA_FORMAT.pack(idx, is_last) yield aesgcm.decrypt(nonce, chunk, adata) if is_last: break return io.BufferedReader(_Unchunked(Generate()))
Decrypts given file-like object using AES algorithm in GCM mode. Refer to the encryption documentation to learn about the details of the format that this function allows to decode. Args: stream: A file-like object to decrypt. key: A secret key used for decrypting the data. Returns: A file-like object with decrypted data.
AEADDecrypt
python
google/grr
api_client/python/grr_api_client/utils.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/utils.py
Apache-2.0
def RegisterProtoDescriptors( db: symbol_database.SymbolDatabase, *additional_descriptors: descriptor.FileDescriptor, ) -> None: """Registers all API-related descriptors in a given symbol DB.""" # keep-sorted start db.RegisterFileDescriptor(artifact_pb2.DESCRIPTOR) db.RegisterFileDescriptor(client_pb2.DESCRIPTOR) db.RegisterFileDescriptor(config_pb2.DESCRIPTOR) db.RegisterFileDescriptor(containers_pb2.DESCRIPTOR) db.RegisterFileDescriptor(cron_pb2.DESCRIPTOR) db.RegisterFileDescriptor(crowdstrike_pb2.DESCRIPTOR) db.RegisterFileDescriptor(deprecated_pb2.DESCRIPTOR) db.RegisterFileDescriptor(dummy_pb2.DESCRIPTOR) db.RegisterFileDescriptor(flow_pb2.DESCRIPTOR) db.RegisterFileDescriptor(flows_pb2.DESCRIPTOR) db.RegisterFileDescriptor(hunt_pb2.DESCRIPTOR) db.RegisterFileDescriptor(jobs_pb2.DESCRIPTOR) db.RegisterFileDescriptor(large_file_pb2.DESCRIPTOR) db.RegisterFileDescriptor(metadata_pb2.DESCRIPTOR) db.RegisterFileDescriptor(osquery_pb2.DESCRIPTOR) db.RegisterFileDescriptor(output_plugin_pb2.DESCRIPTOR) db.RegisterFileDescriptor(pipes_pb2.DESCRIPTOR) db.RegisterFileDescriptor(read_low_level_pb2.DESCRIPTOR) db.RegisterFileDescriptor(reflection_pb2.DESCRIPTOR) db.RegisterFileDescriptor(signed_commands_pb2.DESCRIPTOR) db.RegisterFileDescriptor(stats_pb2.DESCRIPTOR) db.RegisterFileDescriptor(timeline_pb2.DESCRIPTOR) db.RegisterFileDescriptor(user_pb2.DESCRIPTOR) db.RegisterFileDescriptor(vfs_pb2.DESCRIPTOR) db.RegisterFileDescriptor(yara_pb2.DESCRIPTOR) # keep-sorted end db.RegisterFileDescriptor( wrappers_pb2.DESCRIPTOR ) # type: ignore[attr-defined] for d in additional_descriptors: db.RegisterFileDescriptor(d)
Registers all API-related descriptors in a given symbol DB.
RegisterProtoDescriptors
python
google/grr
api_client/python/grr_api_client/utils.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/utils.py
Apache-2.0
def ListGrrBinaries( context: api_context.GrrApiContext, ) -> utils.ItemsIterator[GrrBinary]: """Lists all registered Grr binaries.""" items = context.SendIteratorRequest("ListGrrBinaries", None) return utils.MapItemsIterator( lambda data: GrrBinary(data=data, context=context), items )
Lists all registered Grr binaries.
ListGrrBinaries
python
google/grr
api_client/python/grr_api_client/config.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/config.py
Apache-2.0
def CreateFlowRunnerArgs(self) -> flows_pb2.FlowRunnerArgs: """Creates flow runner args object.""" return flows_pb2.FlowRunnerArgs()
Creates flow runner args object.
CreateFlowRunnerArgs
python
google/grr
api_client/python/grr_api_client/types.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/types.py
Apache-2.0
def CreateHuntRunnerArgs(self) -> flows_pb2.HuntRunnerArgs: """Creates hunt runner args object.""" return flows_pb2.HuntRunnerArgs()
Creates hunt runner args object.
CreateHuntRunnerArgs
python
google/grr
api_client/python/grr_api_client/types.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/types.py
Apache-2.0
def CreateFlowArgs( self, flow_name: str, ) -> Any: """Creates flow arguments object for a flow with a given name.""" if not self._flow_descriptors: self._flow_descriptors = {} result = self._context.SendRequest("ListFlowDescriptors", None) if not isinstance(result, flow_pb2.ApiListFlowDescriptorsResult): raise TypeError(f"Unexpected response type: {type(result)}") for item in result.items: self._flow_descriptors[item.name] = item try: flow_descriptor = self._flow_descriptors[flow_name] except KeyError: raise UnknownFlowName(flow_name) return utils.CopyProto(utils.UnpackAny(flow_descriptor.default_args))
Creates flow arguments object for a flow with a given name.
CreateFlowArgs
python
google/grr
api_client/python/grr_api_client/types.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/types.py
Apache-2.0
def UnpackAny( self, proto_any: any_pb2.Any, ) -> Union[message.Message, utils.UnknownProtobuf]: """Resolves the type and unpacks the given protobuf Any object.""" return utils.UnpackAny(proto_any)
Resolves the type and unpacks the given protobuf Any object.
UnpackAny
python
google/grr
api_client/python/grr_api_client/types.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/types.py
Apache-2.0
def UploadArtifact(self, yaml: str) -> None: # pylint: disable=line-too-long # fmt: off """Uploads the given [YAML artifact definition][1] to the GRR server. [1]: https://artifacts.readthedocs.io/en/latest/sources/Format-specification.html Args: yaml: YAML with the artifact definition. Returns: Nothing. """ # pylint: enable=line-too-long # fmt: on return artifact.UploadArtifact(context=self._context, yaml=yaml)
Uploads the given [YAML artifact definition][1] to the GRR server. [1]: https://artifacts.readthedocs.io/en/latest/sources/Format-specification.html Args: yaml: YAML with the artifact definition. Returns: Nothing.
UploadArtifact
python
google/grr
api_client/python/grr_api_client/api.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/api.py
Apache-2.0
def UploadYaraSignature(self, signature: str) -> bytes: """Uploads the specified YARA signature. Args: signature: A YARA signature to upload. Returns: A reference to the uploaded blob. """ return yara.UploadYaraSignature(signature, context=self._context)
Uploads the specified YARA signature. Args: signature: A YARA signature to upload. Returns: A reference to the uploaded blob.
UploadYaraSignature
python
google/grr
api_client/python/grr_api_client/api.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/api.py
Apache-2.0
def GetOpenApiDescription(self) -> Dict[str, Any]: """Returns the OpenAPI description of the GRR API as a dictionary.""" return metadata.GetOpenApiDescription(context=self._context)
Returns the OpenAPI description of the GRR API as a dictionary.
GetOpenApiDescription
python
google/grr
api_client/python/grr_api_client/api.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/api.py
Apache-2.0
def InitHttp( api_endpoint: str, page_size: Optional[int] = None, auth: Optional[Tuple[str, str]] = None, proxies: Optional[Dict[str, str]] = None, verify: Optional[bool] = None, cert: Optional[bytes] = None, trust_env: Optional[bool] = None, validate_version: Optional[bool] = None, ) -> GrrApi: """Inits an GRR API object with a HTTP connector.""" connector = connectors.HttpConnector( api_endpoint=api_endpoint, page_size=page_size, auth=auth, proxies=proxies, verify=verify, cert=cert, trust_env=trust_env, validate_version=validate_version, ) return GrrApi(connector=connector)
Inits an GRR API object with a HTTP connector.
InitHttp
python
google/grr
api_client/python/grr_api_client/api.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/api.py
Apache-2.0
def Get(self): """Fetch and return a proper ClientApproval object.""" args = user_pb2.ApiGetClientApprovalArgs( client_id=self.client_id, approval_id=self.approval_id, username=self.username, ) result = self._context.SendRequest("GetClientApproval", args) return ClientApproval( data=result, username=self._context.username, context=self._context )
Fetch and return a proper ClientApproval object.
Get
python
google/grr
api_client/python/grr_api_client/client.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/client.py
Apache-2.0
def WaitUntilValid(self, timeout=None): """Wait until the approval is valid (i.e. - approved). Args: timeout: timeout in seconds. None means default timeout (1 hour). 0 means no timeout (wait forever). Returns: Operation object with refreshed target_file. Raises: PollTimeoutError: if timeout is reached. """ return utils.Poll( generator=self.Get, condition=lambda f: f.data.is_valid, timeout=timeout )
Wait until the approval is valid (i.e. - approved). Args: timeout: timeout in seconds. None means default timeout (1 hour). 0 means no timeout (wait forever). Returns: Operation object with refreshed target_file. Raises: PollTimeoutError: if timeout is reached.
WaitUntilValid
python
google/grr
api_client/python/grr_api_client/client.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/client.py
Apache-2.0
def File(self, path): """Returns a reference to a file with a given path on client's VFS.""" return vfs.FileRef( client_id=self.client_id, path=path, context=self._context )
Returns a reference to a file with a given path on client's VFS.
File
python
google/grr
api_client/python/grr_api_client/client.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/client.py
Apache-2.0
def Flow(self, flow_id): """Return a reference to a flow with a given id on this client.""" return flow.FlowRef( client_id=self.client_id, flow_id=flow_id, context=self._context )
Return a reference to a flow with a given id on this client.
Flow
python
google/grr
api_client/python/grr_api_client/client.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/client.py
Apache-2.0
def CreateFlow(self, name=None, args=None, runner_args=None): """Create new flow on this client.""" if not name: raise ValueError("name can't be empty") request = flow_pb2.ApiCreateFlowArgs(client_id=self.client_id) request.flow.name = name if runner_args: request.flow.runner_args.CopyFrom(runner_args) if args: request.flow.args.value = args.SerializeToString() request.flow.args.type_url = utils.GetTypeUrl(args) data = self._context.SendRequest("CreateFlow", request) return flow.Flow(data=data, context=self._context)
Create new flow on this client.
CreateFlow
python
google/grr
api_client/python/grr_api_client/client.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/client.py
Apache-2.0
def Interrogate(self): """Run an Interrogate Flow on this client.""" request = client_pb2.ApiInterrogateClientArgs(client_id=self.client_id) data = self._context.SendRequest("InterrogateClient", request) # Return a populated Flow, similar to the behavior of CreateFlow(). return self.Flow(data.operation_id).Get()
Run an Interrogate Flow on this client.
Interrogate
python
google/grr
api_client/python/grr_api_client/client.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/client.py
Apache-2.0
def ListFlows(self): """List flows that ran on this client.""" args = flow_pb2.ApiListFlowsArgs(client_id=self.client_id) items = self._context.SendIteratorRequest("ListFlows", args) return utils.MapItemsIterator( lambda data: flow.Flow(data=data, context=self._context), items )
List flows that ran on this client.
ListFlows
python
google/grr
api_client/python/grr_api_client/client.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/client.py
Apache-2.0
def Approval(self, username, approval_id): """Returns a reference to an approval.""" return ClientApprovalRef( client_id=self.client_id, username=username, approval_id=approval_id, context=self._context, )
Returns a reference to an approval.
Approval
python
google/grr
api_client/python/grr_api_client/client.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/client.py
Apache-2.0
def CreateApproval( self, reason=None, notified_users=None, email_cc_addresses=None, expiration_duration_days=0, ): """Create a new approval for the current user to access this client.""" if not reason: raise ValueError("reason can't be empty") if not notified_users: raise ValueError("notified_users list can't be empty.") expiration_time_us = 0 if expiration_duration_days != 0: expiration_time_us = int( (time.time() + expiration_duration_days * 24 * 3600) * 1e6 ) approval = user_pb2.ApiClientApproval( reason=reason, notified_users=notified_users, email_cc_addresses=email_cc_addresses or [], expiration_time_us=expiration_time_us, ) args = user_pb2.ApiCreateClientApprovalArgs( client_id=self.client_id, approval=approval, ) data = self._context.SendRequest("CreateClientApproval", args) return ClientApproval( data=data, username=self._context.username, context=self._context )
Create a new approval for the current user to access this client.
CreateApproval
python
google/grr
api_client/python/grr_api_client/client.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/client.py
Apache-2.0
def _ProcessLabels(self, labels): """Checks that 'labels' arguments for AddLabels/RemoveLabels is correct.""" if isinstance(labels, (str, bytes)): raise TypeError( "'labels' argument is expected to be an " "iterable of strings, not {!r}.".format(labels) ) if not isinstance(labels, abc.Iterable): raise TypeError( "Expected iterable container, but got {!r} instead.".format(labels) ) labels_list = list(labels) if not labels_list: raise ValueError("Labels iterable can't be empty.") for l in labels_list: if not isinstance(l, str): raise TypeError( "Expected labels as strings, got {!r} instead.".format(l) ) return labels_list
Checks that 'labels' arguments for AddLabels/RemoveLabels is correct.
_ProcessLabels
python
google/grr
api_client/python/grr_api_client/client.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/client.py
Apache-2.0
def Get(self): """Fetch client's data and return a proper Client object.""" args = client_pb2.ApiGetClientArgs(client_id=self.client_id) result = self._context.SendRequest("GetClient", args) return Client(data=result, context=self._context)
Fetch client's data and return a proper Client object.
Get
python
google/grr
api_client/python/grr_api_client/client.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/client.py
Apache-2.0
def KillFleetspeak(self, force: bool) -> None: """Kills fleetspeak on the given client.""" args = client_pb2.ApiKillFleetspeakArgs() args.client_id = self.client_id args.force = force self._context.SendRequest("KillFleetspeak", args)
Kills fleetspeak on the given client.
KillFleetspeak
python
google/grr
api_client/python/grr_api_client/client.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/client.py
Apache-2.0
def RestartFleetspeakGrrService(self) -> None: """Restarts the GRR fleetspeak service on the given client.""" args = client_pb2.ApiRestartFleetspeakGrrServiceArgs() args.client_id = self.client_id self._context.SendRequest("RestartFleetspeakGrrService", args)
Restarts the GRR fleetspeak service on the given client.
RestartFleetspeakGrrService
python
google/grr
api_client/python/grr_api_client/client.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/client.py
Apache-2.0
def DeleteFleetspeakPendingMessages(self) -> None: """Deletes fleetspeak messages pending for the given client.""" args = client_pb2.ApiDeleteFleetspeakPendingMessagesArgs() args.client_id = self.client_id self._context.SendRequest("DeleteFleetspeakPendingMessages", args)
Deletes fleetspeak messages pending for the given client.
DeleteFleetspeakPendingMessages
python
google/grr
api_client/python/grr_api_client/client.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/client.py
Apache-2.0
def GetFleetspeakPendingMessageCount(self) -> int: """Returns the number of fleetspeak messages pending for the given client.""" args = client_pb2.ApiGetFleetspeakPendingMessageCountArgs() args.client_id = self.client_id result = self._context.SendRequest("GetFleetspeakPendingMessageCount", args) return result.count
Returns the number of fleetspeak messages pending for the given client.
GetFleetspeakPendingMessageCount
python
google/grr
api_client/python/grr_api_client/client.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/client.py
Apache-2.0
def GetFleetspeakPendingMessages( self, offset: int = 0, limit: int = 0, want_data: bool = False ) -> Sequence[client_pb2.ApiFleetspeakMessage]: """Returns messages pending for the given client.""" args = client_pb2.ApiGetFleetspeakPendingMessagesArgs() args.client_id = self.client_id args.offset = offset args.limit = limit args.want_data = want_data result = self._context.SendRequest("GetFleetspeakPendingMessages", args) return result.messages
Returns messages pending for the given client.
GetFleetspeakPendingMessages
python
google/grr
api_client/python/grr_api_client/client.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/client.py
Apache-2.0
def SearchClients(query=None, context=None): """List clients conforming to a givent query.""" args = client_pb2.ApiSearchClientsArgs(query=query) items = context.SendIteratorRequest("SearchClients", args) return utils.MapItemsIterator( lambda data: Client(data=data, context=context), items )
List clients conforming to a givent query.
SearchClients
python
google/grr
api_client/python/grr_api_client/client.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/client.py
Apache-2.0
def Get(self) -> "GrrUser": """Fetches user's data and returns it wrapped in a Grruser object.""" args = user_management_pb2.ApiGetGrrUserArgs(username=self.username) data = self._context.SendRequest("GetGrrUser", args) if not isinstance(data, user_pb2.ApiGrrUser): raise TypeError(f"Unexpected response type: '{type(data)}'") return GrrUser(data=data, context=self._context)
Fetches user's data and returns it wrapped in a Grruser object.
Get
python
google/grr
api_client/python/grr_api_client/root.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/root.py
Apache-2.0
def Delete(self): """Deletes the user.""" args = user_management_pb2.ApiDeleteGrrUserArgs(username=self.username) self._context.SendRequest("DeleteGrrUser", args)
Deletes the user.
Delete
python
google/grr
api_client/python/grr_api_client/root.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/root.py
Apache-2.0
def Modify( self, user_type: Optional[int] = None, password: Optional[str] = None, email: Optional[str] = None, ) -> "GrrUser": """Modifies user's type and/or password.""" args = user_management_pb2.ApiModifyGrrUserArgs(username=self.username) if user_type is not None: args.user_type = user_type if password is not None: args.password = password if email is not None: args.email = email data = self._context.SendRequest("ModifyGrrUser", args) if not isinstance(data, user_pb2.ApiGrrUser): raise TypeError(f"Unexpected response type: '{type(data)}'") return GrrUser(data=data, context=self._context)
Modifies user's type and/or password.
Modify
python
google/grr
api_client/python/grr_api_client/root.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/root.py
Apache-2.0
def Upload(self, fd: IO[bytes], sign_fn: Callable[[bytes], bytes]): """Uploads data from a given stream and signs them with a given key.""" args = binary_management_pb2.ApiUploadGrrBinaryArgs( type=self.binary_type, path=self.path ) while True: data = fd.read(self.__class__.CHUNK_SIZE) if not data: break blob = args.blobs.add() blob.signature = sign_fn(data) blob.signature_type = blob.RSA_PKCS1v15 blob.digest = hashlib.sha256(data).digest() blob.digest_type = blob.SHA256 blob.data = data self._context.SendRequest("UploadGrrBinary", args)
Uploads data from a given stream and signs them with a given key.
Upload
python
google/grr
api_client/python/grr_api_client/root.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/root.py
Apache-2.0
def CreateGrrUser( self, username: str, user_type: Optional[int] = None, password: Optional[str] = None, email: Optional[str] = None, ) -> GrrUser: """Creates a new GRR user of a given type with a given username/password.""" if not username: raise ValueError("Username can't be empty.") args = user_management_pb2.ApiCreateGrrUserArgs(username=username) if user_type is not None: args.user_type = user_type if password is not None: args.password = password if email is not None: args.email = email data = self._context.SendRequest("CreateGrrUser", args) if not isinstance(data, user_pb2.ApiGrrUser): raise TypeError(f"Unexpected response type: '{type(data)}'") return GrrUser(data=data, context=self._context)
Creates a new GRR user of a given type with a given username/password.
CreateGrrUser
python
google/grr
api_client/python/grr_api_client/root.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/root.py
Apache-2.0
def GrrUser( self, username: str, ) -> GrrUserRef: """Returns a reference to a GRR user.""" return GrrUserRef(username=username, context=self._context)
Returns a reference to a GRR user.
GrrUser
python
google/grr
api_client/python/grr_api_client/root.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/root.py
Apache-2.0
def ListGrrUsers(self) -> utils.ItemsIterator: """Lists all registered GRR users.""" args = user_management_pb2.ApiListGrrUsersArgs() items = self._context.SendIteratorRequest("ListGrrUsers", args) return utils.MapItemsIterator( lambda data: GrrUser(data=data, context=self._context), items )
Lists all registered GRR users.
ListGrrUsers
python
google/grr
api_client/python/grr_api_client/root.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/root.py
Apache-2.0
def CreateSignedCommands( self, commands: signed_commands_pb2.ApiSignedCommands, ) -> None: """Creates a command signer.""" args = signed_commands_pb2.ApiCreateSignedCommandsArgs() args.signed_commands.extend(commands.signed_commands) self._context.SendRequest("CreateSignedCommands", args)
Creates a command signer.
CreateSignedCommands
python
google/grr
api_client/python/grr_api_client/root.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/root.py
Apache-2.0
def DeleteAllSignedCommands(self): """Deletes all signed commands.""" self._context.SendRequest("DeleteAllSignedCommands", args=None)
Deletes all signed commands.
DeleteAllSignedCommands
python
google/grr
api_client/python/grr_api_client/root.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/root.py
Apache-2.0
def Get(self) -> "HuntApproval": """Fetch and return a proper HuntApproval object.""" args = user_pb2.ApiGetHuntApprovalArgs( hunt_id=self.hunt_id, approval_id=self.approval_id, username=self.username, ) result = self._context.SendRequest("GetHuntApproval", args) if not isinstance(result, user_pb2.ApiHuntApproval): raise TypeError(f"Unexpected response type: '{type(result)}'") return HuntApproval( data=result, username=self._context.username, context=self._context )
Fetch and return a proper HuntApproval object.
Get
python
google/grr
api_client/python/grr_api_client/hunt.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/hunt.py
Apache-2.0
def CreateApproval( self, reason: str, notified_users: Sequence[str], email_cc_addresses: Optional[Sequence[str]] = None, ) -> HuntApproval: """Create a new approval for the current user to access this hunt.""" if not reason: raise ValueError("reason can't be empty") if not notified_users: raise ValueError("notified_users list can't be empty.") if email_cc_addresses is None: email_cc_addresses = [] approval = user_pb2.ApiHuntApproval( reason=reason, notified_users=notified_users, email_cc_addresses=email_cc_addresses, ) args = user_pb2.ApiCreateHuntApprovalArgs( hunt_id=self.hunt_id, approval=approval ) data = self._context.SendRequest("CreateHuntApproval", args) if not isinstance(data, user_pb2.ApiHuntApproval): raise TypeError(f"unexpected response type: '{type(data)}'") return HuntApproval( data=data, username=self._context.username, context=self._context )
Create a new approval for the current user to access this hunt.
CreateApproval
python
google/grr
api_client/python/grr_api_client/hunt.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/hunt.py
Apache-2.0
def Modify( self, client_limit: Optional[int] = None, client_rate: Optional[int] = None, duration: Optional[int] = None, ) -> "Hunt": """Modifies a number of hunt arguments.""" args = hunt_pb2.ApiModifyHuntArgs(hunt_id=self.hunt_id) if client_limit is not None: args.client_limit = client_limit if client_rate is not None: args.client_rate = client_rate if duration is not None: args.duration = duration data = self._context.SendRequest("ModifyHunt", args) if not isinstance(data, hunt_pb2.ApiHunt): raise TypeError(f"Unexpected response type: '{type(data)}'") return Hunt(data=data, context=self._context)
Modifies a number of hunt arguments.
Modify
python
google/grr
api_client/python/grr_api_client/hunt.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/hunt.py
Apache-2.0
def Get(self) -> "Hunt": """Fetch hunt's data and return proper Hunt object.""" args = hunt_pb2.ApiGetHuntArgs(hunt_id=self.hunt_id) data = self._context.SendRequest("GetHunt", args) if not isinstance(data, hunt_pb2.ApiHunt): raise TypeError(f"Unexpected response type: '{type(data)}'") return Hunt(data=data, context=self._context)
Fetch hunt's data and return proper Hunt object.
Get
python
google/grr
api_client/python/grr_api_client/hunt.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/hunt.py
Apache-2.0
def CreateHunt( flow_name: str, flow_args: message.Message, hunt_runner_args: flows_pb2.HuntRunnerArgs, context: context_lib.GrrApiContext, ) -> Hunt: """Creates a new hunt. Args: flow_name: String with a name of a flow that will run on all the clients in the hunt. flow_args: Flow arguments to be used. A proto, that depends on a flow. hunt_runner_args: flows_pb2.HuntRunnerArgs instance. Used to specify description, client_rule_set, output_plugins and other useful hunt attributes. context: API context. Raises: ValueError: if flow_name is empty. Returns: Hunt object corresponding to the created hunt. """ if not flow_name: raise ValueError("flow_name can't be empty") request = hunt_pb2.ApiCreateHuntArgs(flow_name=flow_name) if flow_args: request.flow_args.Pack(flow_args) if hunt_runner_args: request.hunt_runner_args.CopyFrom(hunt_runner_args) data = context.SendRequest("CreateHunt", request) if not isinstance(data, hunt_pb2.ApiHunt): raise TypeError(f"Unexpected response type: '{type(data)}'") return Hunt(data=data, context=context)
Creates a new hunt. Args: flow_name: String with a name of a flow that will run on all the clients in the hunt. flow_args: Flow arguments to be used. A proto, that depends on a flow. hunt_runner_args: flows_pb2.HuntRunnerArgs instance. Used to specify description, client_rule_set, output_plugins and other useful hunt attributes. context: API context. Raises: ValueError: if flow_name is empty. Returns: Hunt object corresponding to the created hunt.
CreateHunt
python
google/grr
api_client/python/grr_api_client/hunt.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/hunt.py
Apache-2.0
def ListHunts(context: context_lib.GrrApiContext) -> utils.ItemsIterator[Hunt]: """List all GRR hunts.""" items = context.SendIteratorRequest("ListHunts", hunt_pb2.ApiListHuntsArgs()) return utils.MapItemsIterator( lambda data: Hunt(data=data, context=context), items )
List all GRR hunts.
ListHunts
python
google/grr
api_client/python/grr_api_client/hunt.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/hunt.py
Apache-2.0
def ListHuntApprovals( context: context_lib.GrrApiContext, ) -> utils.ItemsIterator[HuntApproval]: """List all hunt approvals belonging to requesting user.""" items = context.SendIteratorRequest( "ListHuntApprovals", user_pb2.ApiListHuntApprovalsArgs() ) def MapHuntApproval(data): return HuntApproval(data=data, username=context.username, context=context) return utils.MapItemsIterator(MapHuntApproval, items)
List all hunt approvals belonging to requesting user.
ListHuntApprovals
python
google/grr
api_client/python/grr_api_client/hunt.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/hunt.py
Apache-2.0
def GetCollectedTimelineBody( self, timestamp_subsecond_precision: bool = True, inode_ntfs_file_reference_format: bool = False, backslash_escape: bool = True, carriage_return_escape: bool = False, non_printable_escape: bool = False, ) -> utils.BinaryChunkIterator: """Fetches timeline content in the body format.""" args = timeline_pb2.ApiGetCollectedTimelineArgs() args.client_id = self.client_id args.flow_id = self.flow_id args.format = timeline_pb2.ApiGetCollectedTimelineArgs.BODY opts = args.body_opts opts.timestamp_subsecond_precision = timestamp_subsecond_precision opts.inode_ntfs_file_reference_format = inode_ntfs_file_reference_format opts.backslash_escape = backslash_escape opts.carriage_return_escape = carriage_return_escape opts.non_printable_escape = non_printable_escape return self._context.SendStreamingRequest("GetCollectedTimeline", args)
Fetches timeline content in the body format.
GetCollectedTimelineBody
python
google/grr
api_client/python/grr_api_client/flow.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/flow.py
Apache-2.0
def Get(self) -> "Flow": """Fetch flow's data and return proper Flow object.""" args = flow_pb2.ApiGetFlowArgs( client_id=self.client_id, flow_id=self.flow_id ) data = self._context.SendRequest("GetFlow", args) if not isinstance(data, flow_pb2.ApiFlow): raise TypeError(f"Unexpected response type: {type(data)}") return Flow(data=data, context=self._context)
Fetch flow's data and return proper Flow object.
Get
python
google/grr
api_client/python/grr_api_client/flow.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/flow.py
Apache-2.0
def WaitUntilDone( self, timeout: int = utils.DEFAULT_POLL_TIMEOUT, ) -> "Flow": """Wait until the flow completes. Args: timeout: timeout in seconds. None means default timeout (1 hour). 0 means no timeout (wait forever). Returns: Fresh flow object. Raises: PollTimeoutError: if timeout is reached. FlowFailedError: if the flow is not successful. """ f = utils.Poll( generator=self.Get, condition=lambda f: f.data.state != flow_pb2.ApiFlow.State.RUNNING, timeout=timeout, ) if f.data.state != flow_pb2.ApiFlow.State.TERMINATED: raise errors.FlowFailedError( "Flow %s (%s) failed: %s" % (self.flow_id, self.client_id, f.data.context.current_state) ) return f
Wait until the flow completes. Args: timeout: timeout in seconds. None means default timeout (1 hour). 0 means no timeout (wait forever). Returns: Fresh flow object. Raises: PollTimeoutError: if timeout is reached. FlowFailedError: if the flow is not successful.
WaitUntilDone
python
google/grr
api_client/python/grr_api_client/flow.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/flow.py
Apache-2.0
def GetLargeFileEncryptionKey(self) -> bytes: """Retrieves the encryption key of the large file collection flow.""" if self.data.name != "CollectLargeFileFlow": raise ValueError(f"Incorrect flow type: '{self.data.name}'") encryption_key_wrapper = wrappers_pb2.BytesValue() state = {item.key: item.value for item in self.data.state_data.items} state["encryption_key"].Unpack(encryption_key_wrapper) return encryption_key_wrapper.value
Retrieves the encryption key of the large file collection flow.
GetLargeFileEncryptionKey
python
google/grr
api_client/python/grr_api_client/flow.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/flow.py
Apache-2.0
def DecryptLargeFile( self, input_path: Optional[str] = None, output_path: Optional[str] = None, ) -> None: """Decrypts a file from a large file collection flow. Args: input_path: Path to read the encrypted file, if not set read from stdin. output_path: Path to write the decrypted file, if not set write to stdout. """ if self.data.name != "CollectLargeFileFlow": raise ValueError(f"Incorrect flow type: '{self.data.name}'") encryption_key = self.GetLargeFileEncryptionKey() if input_path: input_context = open(input_path, mode="rb") else: input_context = contextlib.nullcontext(sys.stdin.buffer) if output_path: output_context = open(output_path, mode="wb") else: output_context = contextlib.nullcontext(sys.stdout.buffer) with input_context as input_stream: with output_context as output_stream: decrypted_stream = utils.AEADDecrypt(input_stream, encryption_key) shutil.copyfileobj(decrypted_stream, output_stream)
Decrypts a file from a large file collection flow. Args: input_path: Path to read the encrypted file, if not set read from stdin. output_path: Path to write the decrypted file, if not set write to stdout.
DecryptLargeFile
python
google/grr
api_client/python/grr_api_client/flow.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/flow.py
Apache-2.0
def ListArtifacts( context: api_context.GrrApiContext, ) -> utils.ItemsIterator[Artifact]: """Lists all registered Grr artifacts.""" args = api_artifact_pb2.ApiListArtifactsArgs() items = context.SendIteratorRequest("ListArtifacts", args) return utils.MapItemsIterator( lambda data: Artifact(data=data, context=context), items )
Lists all registered Grr artifacts.
ListArtifacts
python
google/grr
api_client/python/grr_api_client/artifact.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/artifact.py
Apache-2.0
def UploadArtifact( context: api_context.GrrApiContext, yaml: str, ) -> None: # pylint: disable=line-too-long # fmt: off """Uploads the given [YAML artifact definition][1] to the GRR server. [1]: https://artifacts.readthedocs.io/en/latest/sources/Format-specification.html Args: context: GRR API context to use. yaml: YAML with the artifact definition. Returns: Nothing. """ # pylint: enable=line-too-long # fmt: on args = api_artifact_pb2.ApiUploadArtifactArgs() args.artifact = yaml context.SendRequest("UploadArtifact", args)
Uploads the given [YAML artifact definition][1] to the GRR server. [1]: https://artifacts.readthedocs.io/en/latest/sources/Format-specification.html Args: context: GRR API context to use. yaml: YAML with the artifact definition. Returns: Nothing.
UploadArtifact
python
google/grr
api_client/python/grr_api_client/artifact.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/artifact.py
Apache-2.0
def _GeneratePages( self, handler_name: str, args: Any, ) -> Iterator[message.Message]: """Generates iterator pages.""" offset = args.offset while True: args_copy = utils.CopyProto(args) args_copy.offset = offset args_copy.count = self.connector.page_size result = self.connector.SendRequest(handler_name, args_copy) if result is None: detail = f"No response returned for '{handler_name}'" raise TypeError(detail) if not hasattr(result, "items"): detail = f"Incorrect result type for '{handler_name}': {type(result)}" raise TypeError(detail) yield result if not result.items: break offset += self.connector.page_size
Generates iterator pages.
_GeneratePages
python
google/grr
api_client/python/grr_api_client/context.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/context.py
Apache-2.0
def SendIteratorRequest( self, handler_name: str, args: Any, ) -> utils.ItemsIterator: """Sends an iterator request.""" if not args or not hasattr(args, "count"): result = self.connector.SendRequest(handler_name, args) if not hasattr(result, "items"): detail = f"Incorrect result type for '{handler_name}': {type(result)}" raise TypeError(detail) total_count = getattr(result, "total_count", None) return utils.ItemsIterator(items=result.items, total_count=total_count) else: pages = self._GeneratePages(handler_name, args) first_page = next(pages) total_count = getattr(first_page, "total_count", None) def PageItems(page: message.Message) -> Iterator[message.Message]: if not hasattr(page, "items"): detail = f"Incorrect page type for '{handler_name}': {type(page)}" raise TypeError(detail) return page.items next_pages_items = itertools.chain.from_iterable(map(PageItems, pages)) all_items = itertools.chain(PageItems(first_page), next_pages_items) if args.count: all_items = itertools.islice(all_items, args.count) return utils.ItemsIterator(items=all_items, total_count=total_count)
Sends an iterator request.
SendIteratorRequest
python
google/grr
api_client/python/grr_api_client/context.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/context.py
Apache-2.0
def WaitUntilDone( self, timeout: Optional[int] = None, ) -> "FileOperation": """Wait until the operation is done. Args: timeout: timeout in seconds. None means default timeout (1 hour). 0 means no timeout (wait forever). Returns: Operation object with refreshed target_file. Raises: PollTimeoutError: if timeout is reached. """ utils.Poll( generator=self.GetState, condition=lambda s: s != self.__class__.RunningState(), timeout=timeout, ) self.target_file = self.target_file.Get() return self
Wait until the operation is done. Args: timeout: timeout in seconds. None means default timeout (1 hour). 0 means no timeout (wait forever). Returns: Operation object with refreshed target_file. Raises: PollTimeoutError: if timeout is reached.
WaitUntilDone
python
google/grr
api_client/python/grr_api_client/vfs.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/vfs.py
Apache-2.0
def ListFiles(self) -> utils.ItemsIterator["File"]: """Lists files under the directory.""" args = vfs_pb2.ApiListFilesArgs( client_id=self.client_id, file_path=self.path ) items = self._context.SendIteratorRequest("ListFiles", args) def MapDataToFile(data: message.Message) -> "File": if not isinstance(data, vfs_pb2.ApiFile): raise TypeError(f"Unexpected response type: {type(data)}") return File(client_id=self.client_id, data=data, context=self._context) return utils.MapItemsIterator(MapDataToFile, items)
Lists files under the directory.
ListFiles
python
google/grr
api_client/python/grr_api_client/vfs.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/vfs.py
Apache-2.0
def Get(self) -> "File": """Fetch file's data and return proper File object.""" args = vfs_pb2.ApiGetFileDetailsArgs( client_id=self.client_id, file_path=self.path ) data = self._context.SendRequest("GetFileDetails", args) if not isinstance(data, vfs_pb2.ApiGetFileDetailsResult): raise TypeError(f"Unexpected result type: {type(data)}") return File(client_id=self.client_id, data=data.file, context=self._context)
Fetch file's data and return proper File object.
Get
python
google/grr
api_client/python/grr_api_client/vfs.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/vfs.py
Apache-2.0
def SendRequest( self, handler_name: str, args: Optional[message.Message], ) -> Optional[message.Message]: """Sends a request to the GRR server. Args: handler_name: A handler to which the request should be delivered to. args: Arguments of the request to pass to the handler. Returns: A response from the server (if any). """ raise NotImplementedError()
Sends a request to the GRR server. Args: handler_name: A handler to which the request should be delivered to. args: Arguments of the request to pass to the handler. Returns: A response from the server (if any).
SendRequest
python
google/grr
api_client/python/grr_api_client/connectors/abstract.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/connectors/abstract.py
Apache-2.0
def SendStreamingRequest( self, handler_name: str, args: message.Message, ) -> utils.BinaryChunkIterator: """Sends a streaming request to the GRR server. Args: handler_name: A handler to which the request should be delivered to. args: Arguments of the request to pass to the handler. Returns: An iterator over binary chunks that the server responded with. """ raise NotImplementedError()
Sends a streaming request to the GRR server. Args: handler_name: A handler to which the request should be delivered to. args: Arguments of the request to pass to the handler. Returns: An iterator over binary chunks that the server responded with.
SendStreamingRequest
python
google/grr
api_client/python/grr_api_client/connectors/abstract.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/connectors/abstract.py
Apache-2.0
def FromJson( cls, json_str: str, ) -> "VersionTuple": """Creates a version tuple from a JSON response. The JSON response must be serialized variant of the `ApiGetGrrVersionResult` message. Args: json_str: A string object with version information JSON data. Returns: Parsed version tuple. """ result = metadata_pb2.ApiGetGrrVersionResult() json_format.Parse(json_str, result, ignore_unknown_fields=True) return cls.FromProto(result)
Creates a version tuple from a JSON response. The JSON response must be serialized variant of the `ApiGetGrrVersionResult` message. Args: json_str: A string object with version information JSON data. Returns: Parsed version tuple.
FromJson
python
google/grr
api_client/python/grr_api_client/connectors/http.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/connectors/http.py
Apache-2.0
def FromProto( cls, proto: metadata_pb2.ApiGetGrrVersionResult, ) -> "VersionTuple": """Creates a version tuple from a server response. Args: proto: A server response with version information. Returns: Parsed version tuple. """ return VersionTuple( major=proto.major, minor=proto.minor, revision=proto.revision, release=proto.release)
Creates a version tuple from a server response. Args: proto: A server response with version information. Returns: Parsed version tuple.
FromProto
python
google/grr
api_client/python/grr_api_client/connectors/http.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/connectors/http.py
Apache-2.0
def FromString(cls, string: str) -> "VersionTuple": """Creates a version tuple from a version string (like '1.3.3.post7'). Args: string: A version string. Returns: Parsed version tuple. """ match = _VERSION_STRING_PATTERN.match(string) if match is None: raise ValueError(f"Incorrect version string: {string!r}") return VersionTuple( major=int(match[1]), minor=int(match[2]), revision=int(match[3]), # TODO(hanuszczak): Replace with `str.removeprefix` once we support only # Python 3.9+. release=int(match[4][len("post"):] if match[4] .startswith("post") else match[4]))
Creates a version tuple from a version string (like '1.3.3.post7'). Args: string: A version string. Returns: Parsed version tuple.
FromString
python
google/grr
api_client/python/grr_api_client/connectors/http.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/connectors/http.py
Apache-2.0
def _FetchVersion(self) -> Optional[VersionTuple]: """Fetches version information about the GRR server. Note that it might be the case that the server version is so old that it does not have the method for retrieving server version. In such case, the method will return `None`. Returns: A message with version descriptor (if possible). """ headers = { "x-csrftoken": self.csrf_token, "x-requested-with": "XMLHttpRequest", } cookies = { "csrftoken": self.csrf_token, } response = self.session.get( url=f"{self.api_endpoint}/api/v2/metadata/version", headers=headers, cookies=cookies, ) try: self._CheckResponseStatus(response) except errors.Error: return None json_str = response.content.decode("utf-8").lstrip(self.JSON_PREFIX) return VersionTuple.FromJson(json_str)
Fetches version information about the GRR server. Note that it might be the case that the server version is so old that it does not have the method for retrieving server version. In such case, the method will return `None`. Returns: A message with version descriptor (if possible).
_FetchVersion
python
google/grr
api_client/python/grr_api_client/connectors/http.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/connectors/http.py
Apache-2.0
def _ValidateVersion(self): """Validates that the API client is compatible the GRR server. In case version is impossible to validate (e.g. we are not running from a PIP package), this function does nothing and skips validation. Raises: VersionMismatchError: If the API client is incompatible with the server. """ api_client_version = self.api_client_version server_version = self.server_version if api_client_version is None or server_version is None: # If either of the versions is unspecified, we cannot properly validate. return if api_client_version < server_version: raise errors.VersionMismatchError( server_version=server_version, api_client_version=api_client_version)
Validates that the API client is compatible the GRR server. In case version is impossible to validate (e.g. we are not running from a PIP package), this function does nothing and skips validation. Raises: VersionMismatchError: If the API client is incompatible with the server.
_ValidateVersion
python
google/grr
api_client/python/grr_api_client/connectors/http.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/connectors/http.py
Apache-2.0
def server_version(self) -> Optional[VersionTuple]: """Retrieves (lazily) the version server tuple.""" if self._server_version is None: self._server_version = self._FetchVersion() return self._server_version
Retrieves (lazily) the version server tuple.
server_version
python
google/grr
api_client/python/grr_api_client/connectors/http.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/connectors/http.py
Apache-2.0
def api_client_version(self) -> Optional[VersionTuple]: """Retrieves (lazily) the API client version tuple (if possible).""" if self._api_client_version is None: try: distribution = pkg_resources.get_distribution("grr_api_client") except pkg_resources.DistributionNotFound: # Distribution might not be available if we are not running from within # a PIP package. In such case, it is not possible to retrieve version. return None self._api_client_version = VersionTuple.FromString(distribution.version) return self._api_client_version
Retrieves (lazily) the API client version tuple (if possible).
api_client_version
python
google/grr
api_client/python/grr_api_client/connectors/http.py
https://github.com/google/grr/blob/master/api_client/python/grr_api_client/connectors/http.py
Apache-2.0
def make_ui_files(): """Builds necessary assets from sources.""" # Install node_modules, but keep package(-lock).json frozen. # Using shell=True, otherwise npm is not found in a nodeenv-built # virtualenv on Windows. subprocess.check_call( "npm ci", shell=True, cwd="grr_response_server/gui/static") subprocess.check_call( "npm run gulp compile", shell=True, cwd="grr_response_server/gui/static") # Compile UI v2. subprocess.check_call("npm ci", shell=True, cwd="grr_response_server/gui/ui") subprocess.check_call( "npm run ng build --prod", shell=True, cwd="grr_response_server/gui/ui")
Builds necessary assets from sources.
make_ui_files
python
google/grr
grr/server/setup.py
https://github.com/google/grr/blob/master/grr/server/setup.py
Apache-2.0
def get_config(): """Get relative path to version.ini file and the INI parser with its data.""" rel_ini_path = "version.ini" ini_path = os.path.join(THIS_DIRECTORY, rel_ini_path) if not os.path.exists(ini_path): rel_ini_path = os.path.join("..", "..", "version.ini") ini_path = os.path.join(THIS_DIRECTORY, rel_ini_path) if not os.path.exists(ini_path): raise RuntimeError("Couldn't find version.ini") config = configparser.ConfigParser() config.read(ini_path) return rel_ini_path, config
Get relative path to version.ini file and the INI parser with its data.
get_config
python
google/grr
grr/server/setup.py
https://github.com/google/grr/blob/master/grr/server/setup.py
Apache-2.0
def testEventNotification(self): """Test that events are sent to listeners.""" TestListener.received_events = [] event = rdf_flows.GrrMessage( session_id=rdfvalue.SessionID(flow_name="SomeFlow"), name="test message", payload=rdf_paths.PathSpec(path="foobar", pathtype="TSK"), source="aff4:/C.0000000000000001", auth_state="AUTHENTICATED", ) events.Events.PublishEvent("TestEvent", event, username=self.test_username) # Make sure the source is correctly propagated. self.assertEqual(TestListener.received_events[0], event)
Test that events are sent to listeners.
testEventNotification
python
google/grr
grr/server/grr_response_server/events_test.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/events_test.py
Apache-2.0
def Register(cls: Type[base.ExportConverter]): """Registers an ExportConversion class. Args: cls: ExportConversion class. """ _EXPORT_CONVERTER_REGISTRY.add(cls)
Registers an ExportConversion class. Args: cls: ExportConversion class.
Register
python
google/grr
grr/server/grr_response_server/export_converters_registry.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/export_converters_registry.py
Apache-2.0
def Unregister(cls: Type[base.ExportConverter]): """Unregisters an ExportConversion class. Args: cls: ExportConversion class to be unregistered. """ _EXPORT_CONVERTER_REGISTRY.remove(cls)
Unregisters an ExportConversion class. Args: cls: ExportConversion class to be unregistered.
Unregister
python
google/grr
grr/server/grr_response_server/export_converters_registry.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/export_converters_registry.py
Apache-2.0
def ClearExportConverters(): """Clears converters registry and its cached values.""" _EXPORT_CONVERTER_REGISTRY.clear()
Clears converters registry and its cached values.
ClearExportConverters
python
google/grr
grr/server/grr_response_server/export_converters_registry.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/export_converters_registry.py
Apache-2.0
def GetConvertersByClass(value_cls): """Returns all converters that take given value as an input value.""" results = [ cls for cls in _EXPORT_CONVERTER_REGISTRY if cls.input_rdf_type == value_cls ] if not results: results = [data_agnostic.DataAgnosticExportConverter] return results
Returns all converters that take given value as an input value.
GetConvertersByClass
python
google/grr
grr/server/grr_response_server/export_converters_registry.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/export_converters_registry.py
Apache-2.0
def testReceiveMessages(self): """Tests receiving messages.""" client_id = "C.1234567890123456" flow_id = "12345678" data_store.REL_DB.WriteClientMetadata(client_id) before_flow_create = data_store.REL_DB.Now() _, req = self._FlowSetup(client_id, flow_id) after_flow_create = data_store.REL_DB.Now() session_id = "%s/%s" % (client_id, flow_id) messages = [ rdf_flows.GrrMessage( request_id=1, response_id=i, session_id=session_id, auth_state="AUTHENTICATED", payload=rdfvalue.RDFInteger(i), ) for i in range(1, 10) ] ReceiveMessages(client_id, messages) received = data_store.REL_DB.ReadAllFlowRequestsAndResponses( client_id, flow_id ) self.assertLen(received, 1) received_request = received[0][0] self.assertEqual(received_request.client_id, req.client_id) self.assertEqual(received_request.flow_id, req.flow_id) self.assertEqual(received_request.request_id, req.request_id) self.assertBetween( received_request.timestamp, before_flow_create, after_flow_create ) self.assertLen(received[0][1], 9)
Tests receiving messages.
testReceiveMessages
python
google/grr
grr/server/grr_response_server/frontend_lib_test.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/frontend_lib_test.py
Apache-2.0
def AddFile(self, hash_id: rdf_objects.HashID, metadata: FileMetadata): """Add a new file to the file store.""" raise NotImplementedError()
Add a new file to the file store.
AddFile
python
google/grr
grr/server/grr_response_server/file_store.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/file_store.py
Apache-2.0
def AddFiles(self, hash_id_metadatas: Dict[rdf_objects.HashID, FileMetadata]): """Adds multiple files to the file store. Args: hash_id_metadatas: A dictionary mapping hash ids to file metadata (a tuple of hash client path and blob references). """ for hash_id, metadata in hash_id_metadatas.items(): self.AddFile(hash_id, metadata)
Adds multiple files to the file store. Args: hash_id_metadatas: A dictionary mapping hash ids to file metadata (a tuple of hash client path and blob references).
AddFiles
python
google/grr
grr/server/grr_response_server/file_store.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/file_store.py
Apache-2.0
def _GetChunk( self, ) -> tuple[Optional[bytes], Optional[rdf_objects.BlobReference]]: """Fetches a chunk corresponding to the current offset.""" found_ref = None for ref in self._blob_refs: if self._offset >= ref.offset and self._offset < (ref.offset + ref.size): found_ref = ref break if not found_ref: return None, None # If self._current_ref == found_ref, then simply return previously found # chunk. Otherwise, update self._current_chunk value. if self._current_ref != found_ref: self._current_ref = found_ref blob_id = models_blob.BlobID(found_ref.blob_id) data = data_store.BLOBS.ReadBlobs([blob_id]) if data[blob_id] is None: raise BlobNotFoundError(blob_id) self._current_chunk = data[blob_id] return self._current_chunk, self._current_ref
Fetches a chunk corresponding to the current offset.
_GetChunk
python
google/grr
grr/server/grr_response_server/file_store.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/file_store.py
Apache-2.0
def Read(self, length: Optional[int] = None) -> bytes: """Reads data.""" if length is None: length = self._length - self._offset # Only enforce limit when length is not specified manually. if length > self._max_unbound_read: raise OversizedReadError( "Attempted to read %d bytes when Server.max_unbound_read_size is %d" % (length, self._max_unbound_read) ) result = io.BytesIO() while result.tell() < length: chunk, ref = self._GetChunk() if not chunk: break part = chunk[self._offset - ref.offset :] if not part: break result.write(part) self._offset += min(length, len(part)) return result.getvalue()[:length]
Reads data.
Read
python
google/grr
grr/server/grr_response_server/file_store.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/file_store.py
Apache-2.0
def Tell(self) -> int: """Returns current reading cursor position.""" return self._offset
Returns current reading cursor position.
Tell
python
google/grr
grr/server/grr_response_server/file_store.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/file_store.py
Apache-2.0
def Seek(self, offset: int, whence=os.SEEK_SET) -> None: """Moves the reading cursor.""" if whence == os.SEEK_SET: self._offset = offset elif whence == os.SEEK_CUR: self._offset += offset elif whence == os.SEEK_END: self._offset = self._length + offset else: raise ValueError("Invalid whence argument: %s" % whence)
Moves the reading cursor.
Seek
python
google/grr
grr/server/grr_response_server/file_store.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/file_store.py
Apache-2.0
def size(self) -> int: """Size of the hashed data.""" return self._length
Size of the hashed data.
size
python
google/grr
grr/server/grr_response_server/file_store.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/file_store.py
Apache-2.0
def hash_id(self) -> rdf_objects.HashID: """Hash ID identifying hashed data.""" return self._hash_id
Hash ID identifying hashed data.
hash_id
python
google/grr
grr/server/grr_response_server/file_store.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/file_store.py
Apache-2.0
def AddFilesWithUnknownHashes( client_path_blob_refs: Dict[ db.ClientPath, Iterable[rdf_objects.BlobReference] ], use_external_stores: bool = True, ) -> Dict[db.ClientPath, rdf_objects.SHA256HashID]: """Adds new files consisting of given blob references. Args: client_path_blob_refs: A dictionary mapping `db.ClientPath` instances to lists of blob references. use_external_stores: A flag indicating if the files should also be added to external file stores. Returns: A dictionary mapping `db.ClientPath` to hash ids of the file. Raises: BlobNotFoundError: If one of the referenced blobs cannot be found. InvalidBlobSizeError: if reference's blob size is different from an actual blob size. InvalidBlobOffsetError: if reference's blob offset is different from an actual blob offset. """ hash_id_blob_refs = dict() client_path_hash_id = dict() metadatas = dict() all_client_path_blob_refs = list() for client_path, blob_refs in client_path_blob_refs.items(): if blob_refs: for blob_ref in blob_refs: blob_ref = mig_objects.ToProtoBlobReference(blob_ref) all_client_path_blob_refs.append((client_path, blob_ref)) else: # Make sure empty files (without blobs) are correctly handled. hash_id = rdf_objects.SHA256HashID.FromData(b"") client_path_hash_id[client_path] = hash_id hash_id_blob_refs[hash_id] = [] metadatas[hash_id] = FileMetadata(client_path=client_path, blob_refs=[]) client_path_offset = collections.defaultdict(lambda: 0) client_path_sha256 = collections.defaultdict(hashlib.sha256) verified_client_path_blob_refs = collections.defaultdict(list) client_path_blob_ref_batches = collection.Batch( items=all_client_path_blob_refs, size=_BLOBS_READ_BATCH_SIZE ) for client_path_blob_ref_batch in client_path_blob_ref_batches: blob_id_batch = set( models_blob.BlobID(blob_ref.blob_id) for _, blob_ref in client_path_blob_ref_batch ) blobs = data_store.BLOBS.ReadAndWaitForBlobs( blob_id_batch, timeout=BLOBS_READ_TIMEOUT ) for client_path, blob_ref in client_path_blob_ref_batch: blob = blobs[models_blob.BlobID(blob_ref.blob_id)] if blob is None: raise BlobNotFoundError(blob_ref.blob_id) offset = client_path_offset[client_path] if blob_ref.size != len(blob): raise InvalidBlobSizeError( "Got conflicting size information for blob %s: %d vs %d." % (blob_ref.blob_id, blob_ref.size, len(blob)) ) if blob_ref.offset != offset: raise InvalidBlobOffsetError( "Got conflicting offset information for blob %s: %d vs %d." % (blob_ref.blob_id, blob_ref.offset, offset) ) verified_client_path_blob_refs[client_path].append(blob_ref) client_path_offset[client_path] = offset + len(blob) client_path_sha256[client_path].update(blob) for client_path in client_path_sha256.keys(): sha256 = client_path_sha256[client_path].digest() hash_id = rdf_objects.SHA256HashID.FromSerializedBytes(sha256) client_path_hash_id[client_path] = hash_id hash_id_blob_refs[hash_id] = verified_client_path_blob_refs[client_path] data_store.REL_DB.WriteHashBlobReferences(hash_id_blob_refs) if use_external_stores: for client_path in verified_client_path_blob_refs.keys(): metadatas[client_path_hash_id[client_path]] = FileMetadata( client_path=client_path, blob_refs=list( map( mig_objects.ToRDFBlobReference, verified_client_path_blob_refs[client_path], ) ), ) EXTERNAL_FILE_STORE.AddFiles(metadatas) return client_path_hash_id
Adds new files consisting of given blob references. Args: client_path_blob_refs: A dictionary mapping `db.ClientPath` instances to lists of blob references. use_external_stores: A flag indicating if the files should also be added to external file stores. Returns: A dictionary mapping `db.ClientPath` to hash ids of the file. Raises: BlobNotFoundError: If one of the referenced blobs cannot be found. InvalidBlobSizeError: if reference's blob size is different from an actual blob size. InvalidBlobOffsetError: if reference's blob offset is different from an actual blob offset.
AddFilesWithUnknownHashes
python
google/grr
grr/server/grr_response_server/file_store.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/file_store.py
Apache-2.0
def AddFileWithUnknownHash( client_path: db.ClientPath, blob_refs: Sequence[rdf_objects.BlobReference], use_external_stores: bool = True, ) -> Dict[db.ClientPath, rdf_objects.SHA256HashID]: """Add a new file consisting of given blob IDs.""" precondition.AssertType(client_path, db.ClientPath) precondition.AssertIterableType(blob_refs, rdf_objects.BlobReference) return AddFilesWithUnknownHashes( {client_path: blob_refs}, use_external_stores=use_external_stores )[client_path]
Add a new file consisting of given blob IDs.
AddFileWithUnknownHash
python
google/grr
grr/server/grr_response_server/file_store.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/file_store.py
Apache-2.0
def CheckHashes( hash_ids: Collection[rdf_objects.SHA256HashID], ) -> Dict[rdf_objects.SHA256HashID, bool]: """Checks if files with given hashes are present in the file store. Args: hash_ids: A list of SHA256HashID objects. Returns: A dict where SHA256HashID objects are keys. Corresponding values may be False (if hash id is not present) or True if it is not present. """ return { k: bool(v) for k, v in data_store.REL_DB.ReadHashBlobReferences(hash_ids).items() }
Checks if files with given hashes are present in the file store. Args: hash_ids: A list of SHA256HashID objects. Returns: A dict where SHA256HashID objects are keys. Corresponding values may be False (if hash id is not present) or True if it is not present.
CheckHashes
python
google/grr
grr/server/grr_response_server/file_store.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/file_store.py
Apache-2.0
def OpenFile( client_path: db.ClientPath, max_timestamp: Optional[rdfvalue.RDFDatetime] = None, ) -> BlobStream: """Opens latest content of a given file for reading. Args: client_path: A path to a file. max_timestamp: If specified, will open the last collected version with a timestamp equal or lower than max_timestamp. If not specified, will simply open the latest version. Returns: A file like object with random access support. Raises: FileHasNoContentError: if the file was never collected. MissingBlobReferencesError: if one of the blobs was not found. """ proto_path_info = data_store.REL_DB.ReadLatestPathInfosWithHashBlobReferences( [client_path], max_timestamp=max_timestamp )[client_path] path_info = None if proto_path_info: path_info = mig_objects.ToRDFPathInfo(proto_path_info) if path_info is None: # If path_info returned by ReadLatestPathInfosWithHashBlobReferences # is None, do one more ReadPathInfo call to check if this path info # was ever present in the database. try: data_store.REL_DB.ReadPathInfo( client_path.client_id, client_path.path_type, client_path.components ) except db.UnknownPathError: raise FileNotFoundError(client_path) # If the given path info is present in the database, but there are # no suitable hash blob references associated with it, raise # FileHasNoContentError instead of FileNotFoundError. raise FileHasNoContentError(client_path) hash_id = rdf_objects.SHA256HashID.FromSerializedBytes( path_info.hash_entry.sha256.AsBytes() ) blob_references = data_store.REL_DB.ReadHashBlobReferences([hash_id])[hash_id] if blob_references is None: raise MissingBlobReferencesError( "File hash was expected to have corresponding " "blob references, but they were not found: %r" % hash_id ) blob_references = list(map(mig_objects.ToRDFBlobReference, blob_references)) return BlobStream(client_path, blob_references, hash_id)
Opens latest content of a given file for reading. Args: client_path: A path to a file. max_timestamp: If specified, will open the last collected version with a timestamp equal or lower than max_timestamp. If not specified, will simply open the latest version. Returns: A file like object with random access support. Raises: FileHasNoContentError: if the file was never collected. MissingBlobReferencesError: if one of the blobs was not found.
OpenFile
python
google/grr
grr/server/grr_response_server/file_store.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/file_store.py
Apache-2.0
def __init__( self, client_path: db.ClientPath, data: bytes, chunk_index: int, total_chunks: int, offset: int, total_size: int, ) -> None: """Initializes StreamedFileChunk object. Args: client_path: db.ClientPath identifying the file. data: bytes with chunk's contents. chunk_index: Index of this chunk (relative to the sequence of chunks corresponding to the file). total_chunks: Total number of chunks corresponding to a given file. offset: Offset of this chunk in bytes from the beginning of the file. total_size: Total size of the file in bytes. """ self.client_path = client_path self.data = data self.offset = offset self.total_size = total_size self.chunk_index = chunk_index self.total_chunks = total_chunks
Initializes StreamedFileChunk object. Args: client_path: db.ClientPath identifying the file. data: bytes with chunk's contents. chunk_index: Index of this chunk (relative to the sequence of chunks corresponding to the file). total_chunks: Total number of chunks corresponding to a given file. offset: Offset of this chunk in bytes from the beginning of the file. total_size: Total size of the file in bytes.
__init__
python
google/grr
grr/server/grr_response_server/file_store.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/file_store.py
Apache-2.0
def StreamFilesChunks( client_paths: Collection[db.ClientPath], max_timestamp: Optional[rdfvalue.RDFDatetime] = None, max_size: Optional[int] = None, ) -> Iterable[StreamedFileChunk]: """Streams contents of given files. Args: client_paths: db.ClientPath objects describing paths to files. max_timestamp: If specified, then for every requested file will open the last collected version of the file with a timestamp equal or lower than max_timestamp. If not specified, will simply open a latest version for each file. max_size: If specified, only the chunks covering max_size bytes will be returned. Yields: StreamedFileChunk objects for every file read. Chunks will be returned sequentially, their order will correspond to the client_paths order. Files having no content will simply be ignored. Raises: BlobNotFoundError: if one of the blobs wasn't found while streaming. """ proto_path_infos_by_cp = ( data_store.REL_DB.ReadLatestPathInfosWithHashBlobReferences( client_paths, max_timestamp=max_timestamp ) ) path_infos_by_cp = {} for k, v in proto_path_infos_by_cp.items(): path_infos_by_cp[k] = None if v is not None: path_infos_by_cp[k] = v hash_ids_by_cp = {} for cp, pi in path_infos_by_cp.items(): if pi: hash_ids_by_cp[cp] = rdf_objects.SHA256HashID.FromSerializedBytes( pi.hash_entry.sha256 ) blob_refs_by_hash_id = data_store.REL_DB.ReadHashBlobReferences( hash_ids_by_cp.values() ) all_chunks = [] for cp in client_paths: try: hash_id = hash_ids_by_cp[cp] except KeyError: continue try: blob_refs = blob_refs_by_hash_id[hash_id] except KeyError: continue num_blobs = len(blob_refs) total_size = 0 for ref in blob_refs: total_size += ref.size cur_size = 0 for i, ref in enumerate(blob_refs): blob_id = models_blob.BlobID(ref.blob_id) all_chunks.append((cp, blob_id, i, num_blobs, ref.offset, total_size)) cur_size += ref.size if max_size is not None and cur_size >= max_size: break for batch in collection.Batch(all_chunks, STREAM_CHUNKS_READ_AHEAD): blobs = data_store.BLOBS.ReadBlobs( [blob_id for _, blob_id, _, _, _, _ in batch] ) for cp, blob_id, i, num_blobs, offset, total_size in batch: blob_data = blobs[blob_id] if blob_data is None: raise BlobNotFoundError(blob_id) yield StreamedFileChunk(cp, blob_data, i, num_blobs, offset, total_size)
Streams contents of given files. Args: client_paths: db.ClientPath objects describing paths to files. max_timestamp: If specified, then for every requested file will open the last collected version of the file with a timestamp equal or lower than max_timestamp. If not specified, will simply open a latest version for each file. max_size: If specified, only the chunks covering max_size bytes will be returned. Yields: StreamedFileChunk objects for every file read. Chunks will be returned sequentially, their order will correspond to the client_paths order. Files having no content will simply be ignored. Raises: BlobNotFoundError: if one of the blobs wasn't found while streaming.
StreamFilesChunks
python
google/grr
grr/server/grr_response_server/file_store.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/file_store.py
Apache-2.0
def RetrieveIP4Info(self, ip): """Retrieves information for an IP4 address.""" if ip.is_private: return (IPInfo.INTERNAL, "Internal IP address.") try: # It's an external IP, let's try to do a reverse lookup. res = socket.getnameinfo((str(ip), 0), socket.NI_NAMEREQD) return (IPInfo.EXTERNAL, res[0]) except (socket.error, socket.herror, socket.gaierror): return (IPInfo.EXTERNAL, "Unknown IP address.")
Retrieves information for an IP4 address.
RetrieveIP4Info
python
google/grr
grr/server/grr_response_server/ip_resolver.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/ip_resolver.py
Apache-2.0
def RetrieveIP6Info(self, ip): """Retrieves information for an IP6 address.""" _ = ip return (IPInfo.INTERNAL, "Internal IP6 address.")
Retrieves information for an IP6 address.
RetrieveIP6Info
python
google/grr
grr/server/grr_response_server/ip_resolver.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/ip_resolver.py
Apache-2.0
def IPResolverInitOnce(): """Initializes IP resolver.""" global IP_RESOLVER ip_resolver_cls_name = config.CONFIG["Server.ip_resolver_class"] logging.debug("Using ip resolver: %s", ip_resolver_cls_name) cls = IPResolverBase.GetPlugin(ip_resolver_cls_name) IP_RESOLVER = cls()
Initializes IP resolver.
IPResolverInitOnce
python
google/grr
grr/server/grr_response_server/ip_resolver.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/ip_resolver.py
Apache-2.0
def Sign(self, command: execute_signed_command_pb2.Command) -> bytes: """Signs a command and returns the signature."""
Signs a command and returns the signature.
Sign
python
google/grr
grr/server/grr_response_server/command_signer.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/command_signer.py
Apache-2.0
def Verify( self, signature: bytes, command: execute_signed_command_pb2.Command, ) -> None: """Validates a signature for given data with a verification key. Args: signature: Signature to verify. command: Command that was signed. Raises: CommandSignatureValidationError: Invalid signature """
Validates a signature for given data with a verification key. Args: signature: Signature to verify. command: Command that was signed. Raises: CommandSignatureValidationError: Invalid signature
Verify
python
google/grr
grr/server/grr_response_server/command_signer.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/command_signer.py
Apache-2.0
def testNotMatchingArgTypeRaises(self): """Check that flows reject not matching args type.""" with self.assertRaises(TypeError): flow.StartFlow( client_id=self.client_id, flow_cls=CallStateFlow, flow_args=dummy.DummyArgs(), )
Check that flows reject not matching args type.
testNotMatchingArgTypeRaises
python
google/grr
grr/server/grr_response_server/flow_test.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/flow_test.py
Apache-2.0
def testCallState(self): """Test the ability to chain flows.""" CallStateFlow.success = False # Run the flow in the simulated way flow_test_lib.StartAndRunFlow(CallStateFlow, client_id=self.client_id) self.assertEqual(CallStateFlow.success, True)
Test the ability to chain flows.
testCallState
python
google/grr
grr/server/grr_response_server/flow_test.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/flow_test.py
Apache-2.0
def testCallStateProto(self): """Test the ability to chain states.""" CallStateProtoFlow.success = False # Run the flow in the simulated way flow_test_lib.StartAndRunFlow(CallStateProtoFlow, client_id=self.client_id) self.assertTrue(CallStateProtoFlow.success)
Test the ability to chain states.
testCallStateProto
python
google/grr
grr/server/grr_response_server/flow_test.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/flow_test.py
Apache-2.0
def testBrokenChainedFlowProto(self): # TODO: Add child flow with arguments and check that they are # passed correctly from parent to child. class BrokenParentCallFlowProto(flow_base.FlowBase): """This flow will launch a broken child flow.""" # This is a global flag which will be set when the flow runs. success = False def Start(self): # Call the child flow. self.CallFlowProto("BrokenChildFlow", next_state="ReceiveHello") def ReceiveHello(self, responses): if responses or responses.status.status == "OK": raise RuntimeError("Error not propagated to parent") BrokenParentCallFlowProto.success = True # The parent flow does not fail, just assert the child does. flow_test_lib.StartAndRunFlow( BrokenParentCallFlowProto, client_mock=ClientMock(), client_id=self.client_id, check_flow_errors=False, ) self.assertEqual(BrokenParentCallFlowProto.success, True)
This flow will launch a broken child flow.
testBrokenChainedFlowProto
python
google/grr
grr/server/grr_response_server/flow_test.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/flow_test.py
Apache-2.0
def testLimitPropagation(self): """This tests that client actions are limited properly.""" client_mock = action_mocks.CPULimitClientMock( user_cpu_usage=[10], system_cpu_usage=[10], network_usage=[1000], runtime_us=[rdfvalue.Duration.From(1, rdfvalue.SECONDS)], ) flow_test_lib.StartAndRunFlow( flow_test_lib.CPULimitFlow, client_mock=client_mock, client_id=self.client_id, cpu_limit=1000, network_bytes_limit=10000, runtime_limit=rdfvalue.Duration.From(5, rdfvalue.SECONDS), ) self.assertEqual(client_mock.storage["cpulimit"], [1000, 980, 960]) self.assertEqual(client_mock.storage["networklimit"], [10000, 9000, 8000]) self.assertEqual(client_mock.storage["networklimit"], [10000, 9000, 8000]) self.assertEqual( client_mock.storage["runtimelimit"], [ rdfvalue.Duration.From(5, rdfvalue.SECONDS), rdfvalue.Duration.From(4, rdfvalue.SECONDS), rdfvalue.Duration.From(3, rdfvalue.SECONDS), ], )
This tests that client actions are limited properly.
testLimitPropagation
python
google/grr
grr/server/grr_response_server/flow_test.py
https://github.com/google/grr/blob/master/grr/server/grr_response_server/flow_test.py
Apache-2.0