grr.lib.parsers.Parser.GetClassesByArtifact

Here are the examples of the python api grr.lib.parsers.Parser.GetClassesByArtifact taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

4 Examples 7

Example 1

Project: grr Source File: artifact_registry.py
  def GetArtifactParserDependencies(self):
    """Return the set of knowledgebase path dependencies required by the parser.

    Returns:
      A set of strings for the required kb objects e.g.
      ["users.appdata", "systemroot"]
    """
    deps = set()
    processors = parsers.Parser.GetClassesByArtifact(self.name)
    for parser in processors:
      deps.update(parser.knowledgebase_dependencies)
    return deps

Example 2

Project: grr Source File: artifact.py
  def BuildArtifactDescriptors(self, artifacts):
    result = []
    for artifact_val in artifacts:
      descriptor = artifact_registry.ArtifactDescriptor(
          artifact=artifact_val,
          artifact_source=artifact_val.ToPrettyJson(extended=True),
          dependencies=sorted(artifact_val.GetArtifactDependencies()),
          path_dependencies=sorted(artifact_val.GetArtifactPathDependencies()),
          error_message=artifact_val.error_message,
          is_custom=artifact_val.loaded_from.startswith("datastore:"))

      for processor in parsers.Parser.GetClassesByArtifact(artifact_val.name):
        descriptor.processors.append(
            artifact_registry.ArtifactProcessorDescriptor(
                name=processor.__name__,
                output_types=processor.output_types,
                description=processor.GetDescription()))

      result.append(descriptor)

    return result

Example 3

Project: grr Source File: checks.py
  def _RunProcessors(self, artifact_name, responses):
    """Manages processing of raw data from the artifact collection.

    The raw data and parsed results are stored in different result contexts:
    Anomaly, Parser and Raw. Demuxing these results makes the specific data
    types available to checks working in different contexts.

    Then, iterate over the parsers that should be applied to the raw data and
    map rdfvalues to the Parse context.

    Args:
      artifact_name: The name of the artifact being processed as a string.
      responses: Input from previous states as an rdfvalue.Dict
    """
    source = responses.request_data.GetItem("source", None)

    # Find all the parsers that should apply to an artifact.
    processors = parsers.Parser.GetClassesByArtifact(artifact_name)
    saved_responses = {}
    # For each item of collected host data, identify whether to parse
    # immediately or once all the artifact data is collected.
    # Then, send the host data for parsing and demuxing.
    for response in responses:
      if processors:
        for processor_cls in processors:
          processor = processor_cls()
          if processor.process_together:
            # Store the response until we have them all.
            processor_name = processor.__class__.__name__
            saved_responses.setdefault(processor_name, []).append(response)
          else:
            # Process the response immediately
            self._ProcessData(processor, response, artifact_name, source)

    # If we were saving responses, process them now:
    for processor_name, responses_list in saved_responses.items():
      processor = parsers.Parser.classes[processor_name]()
      self._ProcessData(processor, responses_list, artifact_name, source)

Example 4

Project: grr Source File: collectors.py
  @flow.StateHandler()
  def ProcessCollected(self, responses):
    """Each individual collector will call back into here.

    Args:
      responses: Responses from the collection.

    Raises:
      artifact_utils.ArtifactDefinitionError: On bad definition.
      artifact_utils.ArtifactProcessingError: On failure to process.
    """
    flow_name = self.__class__.__name__
    artifact_name = responses.request_data["artifact_name"]
    source = responses.request_data.GetItem("source", None)

    if responses.success:
      self.Log("Artifact data collection %s completed successfully in flow %s "
               "with %d responses", artifact_name, flow_name, len(responses))
    else:
      self.Log("Artifact %s data collection failed. Status: %s.", artifact_name,
               responses.status)
      if not self.CallFallback(artifact_name, responses.request_data):
        self.state.failed_count += 1
        self.state.artifacts_failed.append(artifact_name)
      return

    output_collection_map = {}
    aff4_output_map = {}

    # Now process the responses.
    processors = parsers.Parser.GetClassesByArtifact(artifact_name)
    saved_responses = {}
    for response in responses:
      if processors and self.args.apply_parsers:
        for processor in processors:
          processor_obj = processor()
          if processor_obj.process_together:
            # Store the response until we have them all.
            saved_responses.setdefault(processor.__name__, []).append(response)
          else:
            # Process the response immediately
            self._ParseResponses(processor_obj, response, responses,
                                 artifact_name, source, aff4_output_map,
                                 output_collection_map)
      else:
        # We don't have any defined processors for this artifact.
        self._ParseResponses(None, response, responses, artifact_name, source,
                             aff4_output_map, output_collection_map)

    # If we were saving responses, process them now:
    for processor_name, responses_list in saved_responses.items():
      processor_obj = parsers.Parser.classes[processor_name]()
      self._ParseResponses(processor_obj, responses_list, responses,
                           artifact_name, source, aff4_output_map,
                           output_collection_map)

    # Flush the results to the objects.
    if self.args.split_output_by_artifact:
      self._FinalizeSplitCollection(output_collection_map)
    if self.args.store_results_in_aff4:
      self._FinalizeMappedAFF4Locations(artifact_name, aff4_output_map)
    if self.state.client_anomalies:
      with aff4.FACTORY.Create(
          self.client_id.Add("anomalies"),
          collects.RDFValueCollection,
          token=self.token,
          mode="rw") as store:
        for anomaly_value in self.state.client_anomalies:
          store.Add(anomaly_value)