diff --git a/CHANGELOG.md b/CHANGELOG.md
index f8f1ecda9a..57d01727a0 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -7,10 +7,16 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added
+* Added support for listing `%SystemDrive%\Users` as a supplementary mechanism
+ for collecting user profiles on Windows (additionally to using data from the
+ registry).
+
### Removed
* Removed the `ListFlowApplicableParsers` API method.
* Removed the `ListParsedFlowResults` API method.
+* Removed support for the `GREP` artifact source (these were internal to GRR and
+ not part of the [official specification](https://artifacts.readthedocs.io/en/latest/sources/Format-specification.html).
## [3.4.7.4] - 2024-05-28
diff --git a/Dockerfile b/Dockerfile
index 02ea7612a7..30ab1edbfc 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,6 +1,6 @@
# A Docker image capable of running all GRR components.
#
-# See https://hub.docker.com/r/grrdocker/grr/
+# See https://github.com/google/grr/pkgs/container/grr
#
# We have configured Github Actions to trigger an image build every
# time a new a PUSH happens in the GRR github repository.
diff --git a/api_client/python/grr_api_client/client.py b/api_client/python/grr_api_client/client.py
index 03a735db23..34e87292d2 100644
--- a/api_client/python/grr_api_client/client.py
+++ b/api_client/python/grr_api_client/client.py
@@ -2,12 +2,12 @@
"""Clients-related part of GRR API client library."""
from collections import abc
+import time
from typing import Sequence
from grr_api_client import flow
from grr_api_client import utils
from grr_api_client import vfs
-from grr_response_core.lib import rdfvalue
from grr_response_proto.api import client_pb2
from grr_response_proto.api import flow_pb2
from grr_response_proto.api import user_pb2
@@ -209,10 +209,9 @@ def CreateApproval(
expiration_time_us = 0
if expiration_duration_days != 0:
- expiration_time_us = (
- rdfvalue.RDFDatetime.Now()
- + rdfvalue.Duration.From(expiration_duration_days, rdfvalue.DAYS)
- ).AsMicrosecondsSinceEpoch()
+ expiration_time_us = int(
+ (time.time() + expiration_duration_days * 24 * 3600) * 1e6
+ )
approval = user_pb2.ApiClientApproval(
reason=reason,
diff --git a/api_client/python/grr_api_client/flow.py b/api_client/python/grr_api_client/flow.py
index a04538f2f1..9898048a45 100644
--- a/api_client/python/grr_api_client/flow.py
+++ b/api_client/python/grr_api_client/flow.py
@@ -11,7 +11,6 @@
from grr_api_client import context as api_context
from grr_api_client import errors
from grr_api_client import utils
-from grr_response_core.lib.util import aead
from grr_response_proto.api import flow_pb2
from grr_response_proto.api import osquery_pb2
from grr_response_proto.api import timeline_pb2
@@ -268,5 +267,5 @@ def DecryptLargeFile(
with input_context as input_stream:
with output_context as output_stream:
- decrypted_stream = aead.Decrypt(input_stream, encryption_key)
+ decrypted_stream = utils.AEADDecrypt(input_stream, encryption_key)
shutil.copyfileobj(decrypted_stream, output_stream)
diff --git a/api_client/python/grr_api_client/utils.py b/api_client/python/grr_api_client/utils.py
index 3a30cf9be3..9e1f1dca63 100644
--- a/api_client/python/grr_api_client/utils.py
+++ b/api_client/python/grr_api_client/utils.py
@@ -1,6 +1,9 @@
#!/usr/bin/env python
"""Utility functions and classes for GRR API client library."""
+import io
+import itertools
+import struct
import time
from typing import Any
from typing import Callable
@@ -11,6 +14,8 @@
from typing import TypeVar
from typing import Union
+from cryptography.hazmat.primitives.ciphers import aead
+
from google.protobuf import any_pb2
from google.protobuf import wrappers_pb2
from google.protobuf import descriptor
@@ -307,6 +312,97 @@ def Xor(bytestr: bytes, key: int) -> bytes:
return bytes([byte ^ key for byte in bytestr])
+class _Unchunked(io.RawIOBase, IO[bytes]): # pytype: disable=signature-mismatch # overriding-return-type-checks
+ """A raw file-like object that reads chunk stream on demand."""
+
+ def __init__(self, chunks: Iterator[bytes]) -> None:
+ """Initializes the object."""
+ super().__init__()
+ self._chunks = chunks
+ self._buf = io.BytesIO()
+
+ def readable(self) -> bool:
+ return True
+
+ def readall(self) -> bytes:
+ return b"".join(self._chunks)
+
+ def readinto(self, buf: bytearray) -> int:
+ if self._buf.tell() == len(self._buf.getbuffer()):
+ self._buf.seek(0, io.SEEK_SET)
+ self._buf.truncate()
+ self._buf.write(next(self._chunks, b""))
+ self._buf.seek(0, io.SEEK_SET)
+
+ return self._buf.readinto(buf)
+
+
+def AEADDecrypt(stream: IO[bytes], key: bytes) -> IO[bytes]:
+ """Decrypts given file-like object using AES algorithm in GCM mode.
+
+ Refer to the encryption documentation to learn about the details of the format
+ that this function allows to decode.
+
+ Args:
+ stream: A file-like object to decrypt.
+ key: A secret key used for decrypting the data.
+
+ Returns:
+ A file-like object with decrypted data.
+ """
+ aesgcm = aead.AESGCM(key)
+
+ def Generate() -> Iterator[bytes]:
+ # Buffered reader should accept `IO[bytes]` but for now it accepts only
+ # `RawIOBase` (which is a concrete base class for all I/O implementations).
+ reader = io.BufferedReader(stream) # pytype: disable=wrong-arg-types
+
+ # We abort early if there is no data in the stream. Otherwise we would try
+ # to read nonce and fail.
+ if not reader.peek():
+ return
+
+ for idx in itertools.count():
+ nonce = reader.read(_AEAD_NONCE_SIZE)
+
+ # As long there is some data in the buffer (and there should be because of
+ # the initial check) there should be a fixed-size nonce prepended to each
+ # chunk.
+ if len(nonce) != _AEAD_NONCE_SIZE:
+ raise EOFError(f"Incorrect nonce length: {len(nonce)}")
+
+ chunk = reader.read(_AEAD_CHUNK_SIZE + 16)
+
+ # `BufferedReader#peek` will return non-empty byte string if there is more
+ # data available in the stream.
+ is_last = reader.peek() == b"" # pylint: disable=g-explicit-bool-comparison
+
+ adata = _AEAD_ADATA_FORMAT.pack(idx, is_last)
+
+ yield aesgcm.decrypt(nonce, chunk, adata)
+
+ if is_last:
+ break
+
+ return io.BufferedReader(_Unchunked(Generate()))
+
+
+# We use 12 bytes (96 bits) as it is the recommended IV length by NIST for best
+# performance [1]. See AESGCM documentation for more details.
+#
+# [1]: https://csrc.nist.gov/publications/detail/sp/800-38d/final
+_AEAD_NONCE_SIZE = 12
+
+# Because chunk size is crucial to the security of the whole procedure, we don't
+# let users pick their own chunk size. Instead, we use a fixed-size chunks of
+# 4 mebibytes.
+_AEAD_CHUNK_SIZE = 4 * 1024 * 1024
+
+# As associated data for each encrypted chunk we use an integer denoting chunk
+# id followed by a byte with information whether this is the last chunk.
+_AEAD_ADATA_FORMAT = struct.Struct("!Q?")
+
+
def RegisterProtoDescriptors(
db: symbol_database.SymbolDatabase,
*additional_descriptors: descriptor.FileDescriptor,
diff --git a/api_client/python/grr_api_client/utils_test.py b/api_client/python/grr_api_client/utils_test.py
index 5544af6e67..b3b89d1e70 100644
--- a/api_client/python/grr_api_client/utils_test.py
+++ b/api_client/python/grr_api_client/utils_test.py
@@ -1,8 +1,11 @@
#!/usr/bin/env python
import io
+import os
import struct
from absl.testing import absltest
+from cryptography import exceptions
+from cryptography.hazmat.primitives.ciphers import aead
from google.protobuf import empty_pb2
from google.protobuf import timestamp_pb2
@@ -96,5 +99,69 @@ def testDecodeSeveralChunks(self):
self.assertEqual(b"".join(decoded), content)
+class AEADDecryptTest(absltest.TestCase):
+
+ def testReadExact(self):
+ key = os.urandom(32)
+
+ aesgcm = aead.AESGCM(key)
+ nonce = os.urandom(utils._AEAD_NONCE_SIZE)
+ adata = utils._AEAD_ADATA_FORMAT.pack(0, True)
+ encrypted = io.BytesIO(
+ nonce + aesgcm.encrypt(nonce, b"foobarbazquxnorf", adata)
+ )
+
+ decrypted = utils.AEADDecrypt(encrypted, key)
+ self.assertEqual(decrypted.read(3), b"foo")
+ self.assertEqual(decrypted.read(3), b"bar")
+ self.assertEqual(decrypted.read(3), b"baz")
+ self.assertEqual(decrypted.read(3), b"qux")
+ self.assertEqual(decrypted.read(4), b"norf")
+
+ self.assertEqual(decrypted.read(), b"")
+
+ def testIncorrectNonceLength(self):
+ key = os.urandom(32)
+
+ buf = io.BytesIO()
+
+ nonce = os.urandom(utils._AEAD_NONCE_SIZE - 1)
+ buf.write(nonce)
+ buf.seek(0, io.SEEK_SET)
+
+ with self.assertRaisesRegex(EOFError, "nonce length"):
+ utils.AEADDecrypt(buf, key).read()
+
+ def testIncorrectTag(self):
+ key = os.urandom(32)
+ aesgcm = aead.AESGCM(key)
+
+ buf = io.BytesIO()
+
+ nonce = os.urandom(utils._AEAD_NONCE_SIZE)
+ buf.write(nonce)
+ buf.write(aesgcm.encrypt(nonce, b"foo", b"QUUX"))
+ buf.seek(0, io.SEEK_SET)
+
+ with self.assertRaises(exceptions.InvalidTag):
+ utils.AEADDecrypt(buf, key).read()
+
+ def testIncorrectData(self):
+ key = os.urandom(32)
+ aesgcm = aead.AESGCM(key)
+
+ buf = io.BytesIO()
+
+ nonce = os.urandom(utils._AEAD_NONCE_SIZE)
+ adata = utils._AEAD_ADATA_FORMAT.pack(0, True)
+ buf.write(nonce)
+ buf.write(aesgcm.encrypt(nonce, b"foo", adata))
+ buf.getbuffer()[-1] ^= 0b10101010 # Corrupt last byte.
+ buf.seek(0, io.SEEK_SET)
+
+ with self.assertRaises(exceptions.InvalidTag):
+ utils.AEADDecrypt(buf, key).read()
+
+
if __name__ == "__main__":
absltest.main()
diff --git a/appveyor/windows_templates/build_windows_templates.py b/appveyor/windows_templates/build_windows_templates.py
index 456ae19f3e..59f1ea5a75 100644
--- a/appveyor/windows_templates/build_windows_templates.py
+++ b/appveyor/windows_templates/build_windows_templates.py
@@ -11,54 +11,67 @@
import subprocess
import sys
import time
-
from typing import Callable
parser = argparse.ArgumentParser(description="Build windows templates.")
parser.add_argument(
- "--build_dir", default=r"C:\grrbuild", help="GRR build directory.")
+ "--build_dir", default=r"C:\grrbuild", help="GRR build directory."
+)
parser.add_argument(
"--grr_src",
default=r"C:\grrbuild\grr",
- help="Location of the grr src code. If it doesn't exist "
- " at this path we'll try to check it out from github.")
+ help=(
+ "Location of the grr src code. If it doesn't exist "
+ " at this path we'll try to check it out from github."
+ ),
+)
parser.add_argument(
"--output_dir",
default=r"C:\grrbuild\output",
- help="Destination directory for the templates.")
+ help="Destination directory for the templates.",
+)
parser.add_argument(
"--test_repack_install",
action="store_true",
default=False,
- help="Test repacking by calling repack on the template after building,"
- "then try and install the result. For use by integration tests. If you use "
- "this option you must run as admin.")
+ help=(
+ "Test repacking by calling repack on the template after building,then"
+ " try and install the result. For use by integration tests. If you use"
+ " this option you must run as admin."
+ ),
+)
parser.add_argument(
"--wheel_dir",
default=None,
- help="A directory that will be passed to pip as the wheel-dir parameter.")
+ help="A directory that will be passed to pip as the wheel-dir parameter.",
+)
parser.add_argument(
"--expect_service_running",
dest="expect_service_running",
action="store_true",
- help="Triggers whether after installation the GRR service should be "
- "running or not. Used for testing the installation.")
+ help=(
+ "Triggers whether after installation the GRR service should be "
+ "running or not. Used for testing the installation."
+ ),
+)
parser.add_argument(
"--noexpect_service_running",
dest="expect_service_running",
- action="store_false")
+ action="store_false",
+)
parser.set_defaults(expect_service_running=True)
parser.add_argument(
"--config",
default="",
- help="Path to the config file to be used when building templates.")
+ help="Path to the config file to be used when building templates.",
+)
args = parser.parse_args()
@@ -79,13 +92,16 @@ def _FileRetryLoop(path: str, f: Callable[[], None]) -> None:
return
except OSError as e:
attempts += 1
- if (e.errno == errno.EACCES and
- attempts < _FILE_RETRY_LOOP_RETRY_TIME_SECS):
+ if (
+ e.errno == errno.EACCES
+ and attempts < _FILE_RETRY_LOOP_RETRY_TIME_SECS
+ ):
# The currently installed GRR process may stick around for a few
# seconds after the service is terminated (keeping the contents of
# the installation directory locked).
- logging.info("Permission-denied error while trying to process %s.",
- path)
+ logging.info(
+ "Permission-denied error while trying to process %s.", path
+ )
time.sleep(1)
else:
raise
@@ -99,27 +115,6 @@ def _Rename(src: str, dst: str) -> None:
_FileRetryLoop(src, lambda: os.rename(src, dst))
-def _RmTreePseudoTransactional(path: str) -> None:
- """Removes `path`.
-
- Makes sure that either `path` is gone or that it is still present as
- it was.
-
- Args:
- path: The path to remove.
- """
- temp_path = f"{path}_orphaned_{int(time.time())}"
- logging.info("Trying to rename %s -> %s.", path, temp_path)
-
- _Rename(path, temp_path)
-
- try:
- logging.info("Trying to remove %s.", temp_path)
- _RmTree(temp_path)
- except: # pylint: disable=bare-except
- logging.info("Failed to remove %s. Ignoring.", temp_path, exc_info=True)
-
-
def _VerboseCheckCall(params):
logging.info("Running: %s", params)
@@ -141,8 +136,9 @@ def SetupVars(self):
self.virtualenv64 = os.path.join(args.build_dir, "python_64")
self.grr_client_build64 = "grr_client_build"
- self.virtualenv_python64 = os.path.join(self.virtualenv64,
- r"Scripts\python.exe")
+ self.virtualenv_python64 = os.path.join(
+ self.virtualenv64, r"Scripts\python.exe"
+ )
self.git = r"git"
@@ -180,13 +176,17 @@ def Clean(self):
def GitCheckoutGRR(self):
os.chdir(args.build_dir)
subprocess.check_call(
- [self.git, "clone", "https://github.com/google/grr.git"])
+ [self.git, "clone", "https://github.com/google/grr.git"]
+ )
def MakeProtoSdist(self):
os.chdir(os.path.join(args.grr_src, "grr/proto"))
subprocess.check_call([
- self.virtualenv_python64, "setup.py", "sdist", "--formats=zip",
- "--dist-dir=%s" % args.build_dir
+ self.virtualenv_python64,
+ "setup.py",
+ "sdist",
+ "--formats=zip",
+ "--dist-dir=%s" % args.build_dir,
])
return glob.glob(
os.path.join(args.build_dir, "grr_response_proto-*.zip")
@@ -195,8 +195,12 @@ def MakeProtoSdist(self):
def MakeCoreSdist(self):
os.chdir(os.path.join(args.grr_src, "grr/core"))
subprocess.check_call([
- self.virtualenv_python64, "setup.py", "sdist", "--formats=zip",
- "--dist-dir=%s" % args.build_dir, "--no-sync-artifacts"
+ self.virtualenv_python64,
+ "setup.py",
+ "sdist",
+ "--formats=zip",
+ "--dist-dir=%s" % args.build_dir,
+ "--no-sync-artifacts",
])
return glob.glob(
os.path.join(args.build_dir, "grr_response_core-*.zip")
@@ -205,8 +209,11 @@ def MakeCoreSdist(self):
def MakeClientSdist(self):
os.chdir(os.path.join(args.grr_src, "grr/client/"))
subprocess.check_call([
- self.virtualenv_python64, "setup.py", "sdist", "--formats=zip",
- "--dist-dir=%s" % args.build_dir
+ self.virtualenv_python64,
+ "setup.py",
+ "sdist",
+ "--formats=zip",
+ "--dist-dir=%s" % args.build_dir,
])
return glob.glob(
os.path.join(args.build_dir, "grr_response_client-*.zip")
@@ -215,8 +222,11 @@ def MakeClientSdist(self):
def MakeClientBuilderSdist(self):
os.chdir(os.path.join(args.grr_src, "grr/client_builder/"))
subprocess.check_call([
- self.virtualenv_python64, "setup.py", "sdist", "--formats=zip",
- "--dist-dir=%s" % args.build_dir
+ self.virtualenv_python64,
+ "setup.py",
+ "sdist",
+ "--formats=zip",
+ "--dist-dir=%s" % args.build_dir,
])
return glob.glob(
os.path.join(args.build_dir, "grr_response_client_builder-*.zip")
@@ -242,8 +252,12 @@ def BuildTemplates(self):
"""
if args.config:
build_args = [
- "--verbose", "--config", args.config, "build", "--output",
- args.output_dir
+ "--verbose",
+ "--config",
+ args.config,
+ "build",
+ "--output",
+ args.output_dir,
]
else:
build_args = ["--verbose", "build", "--output", args.output_dir]
@@ -268,9 +282,11 @@ def _WixToolsPath(self) -> str:
def _RepackTemplates(self):
"""Repack templates with a dummy config."""
dummy_config = os.path.join(
- args.grr_src, "grr/test/grr_response_test/test_data/dummyconfig.yaml")
- template_amd64 = glob.glob(os.path.join(args.output_dir,
- "*_amd64*.zip")).pop()
+ args.grr_src, "grr/test/grr_response_test/test_data/dummyconfig.yaml"
+ )
+ template_amd64 = glob.glob(
+ os.path.join(args.output_dir, "*_amd64*.zip")
+ ).pop()
fleetspeak_config = os.path.join(
args.grr_src,
@@ -345,8 +361,9 @@ def _CheckInstallSuccess(self):
raise RuntimeError("Install failed, no files at: %s" % self.install_path)
try:
- output = subprocess.check_output(["sc", "query", self.service_name],
- encoding="utf-8")
+ output = subprocess.check_output(
+ ["sc", "query", self.service_name], encoding="utf-8"
+ )
service_running = "RUNNING" in output
except subprocess.CalledProcessError as e:
output = e.output
@@ -370,13 +387,15 @@ def _CheckInstallSuccess(self):
if self.expect_service_running:
if not service_running:
raise RuntimeError(
- "GRR service not running after install, sc query output: %s" %
- output)
+ "GRR service not running after install, sc query output: %s"
+ % output
+ )
else:
if service_running:
raise RuntimeError(
"GRR service running after install with expect_service_running == "
- "False, sc query output: %s" % output)
+ "False, sc query output: %s" % output
+ )
def _InstallInstallers(self):
"""Install the installer built by RepackTemplates."""
diff --git a/colab/grr_colab/__init__.py b/colab/grr_colab/__init__.py
index dc4a3567c7..df2f808875 100644
--- a/colab/grr_colab/__init__.py
+++ b/colab/grr_colab/__init__.py
@@ -424,7 +424,6 @@ def collect(
args = flows_pb2.ArtifactCollectorFlowArgs()
args.artifact_list.append(artifact)
- args.apply_parsers = True
try:
ac = self._client.CreateFlow(name='ArtifactCollectorFlow', args=args)
diff --git a/colab/grr_colab/client_test.py b/colab/grr_colab/client_test.py
index 3550505643..a261ad1fdb 100644
--- a/colab/grr_colab/client_test.py
+++ b/colab/grr_colab/client_test.py
@@ -22,7 +22,6 @@
from grr_response_server import data_store
from grr_response_server.rdfvalues import mig_objects
from grr.test_lib import osquery_test_lib
-from grr.test_lib import parser_test_lib
from grr.test_lib import test_lib
@@ -88,7 +87,6 @@ def testWithHostname_MultipleClients(self):
self.assertEqual(context.exception.hostname, hostname)
self.assertItemsEqual([client_id1, client_id2], context.exception.clients)
- @parser_test_lib.WithAllParsers
def testWithHostname_NoClients(self):
hostname = 'noclients.loc.group.example.com'
@@ -217,7 +215,6 @@ def testHostname(self):
client = grr_colab.Client.with_id(ClientTest.FAKE_CLIENT_ID)
self.assertEqual(client.hostname, hostname)
- @parser_test_lib.WithAllParsers
def testHostname_AfterInterrogate(self):
data_store.REL_DB.WriteClientMetadata(client_id=ClientTest.FAKE_CLIENT_ID)
@@ -237,7 +234,6 @@ def testIfaces(self):
self.assertLen(client.ifaces, 1)
self.assertEqual(client.ifaces[0].ifname, ifname)
- @parser_test_lib.WithAllParsers
def testIfaces_AfterInterrogate(self):
data_store.REL_DB.WriteClientMetadata(client_id=ClientTest.FAKE_CLIENT_ID)
@@ -281,7 +277,6 @@ def testArch(self):
client = grr_colab.Client.with_id(ClientTest.FAKE_CLIENT_ID)
self.assertEqual(client.arch, arch)
- @parser_test_lib.WithAllParsers
def testArch_AfterInterrogate(self):
data_store.REL_DB.WriteClientMetadata(client_id=ClientTest.FAKE_CLIENT_ID)
@@ -300,7 +295,6 @@ def testKernel(self):
client = grr_colab.Client.with_id(ClientTest.FAKE_CLIENT_ID)
self.assertEqual(client.kernel, kernel)
- @parser_test_lib.WithAllParsers
def testKernel_AfterInterrogate(self):
data_store.REL_DB.WriteClientMetadata(client_id=ClientTest.FAKE_CLIENT_ID)
@@ -389,7 +383,6 @@ def ProcessApproval():
finally:
thread.join()
- @parser_test_lib.WithAllParsers
def testInterrogate(self):
data_store.REL_DB.WriteClientMetadata(client_id=ClientTest.FAKE_CLIENT_ID)
client = grr_colab.Client.with_id(ClientTest.FAKE_CLIENT_ID)
@@ -497,7 +490,6 @@ def testOsquery_WithoutApproval(self):
self.assertEqual(context.exception.client_id, ClientTest.FAKE_CLIENT_ID)
- @parser_test_lib.WithAllParsers
def testCollect(self):
data_store.REL_DB.WriteClientMetadata(client_id=ClientTest.FAKE_CLIENT_ID)
diff --git a/grr/client/grr_response_client/client_actions/timeline.py b/grr/client/grr_response_client/client_actions/timeline.py
index aaa712f78f..6f01aa6004 100644
--- a/grr/client/grr_response_client/client_actions/timeline.py
+++ b/grr/client/grr_response_client/client_actions/timeline.py
@@ -4,13 +4,13 @@
import hashlib
import os
import stat as stat_mode
-from typing import Iterator
-from typing import Optional
+from typing import Iterator, Optional
import psutil
from grr_response_client import actions
from grr_response_core.lib import rdfvalue
+from grr_response_core.lib.rdfvalues import mig_timeline
from grr_response_core.lib.rdfvalues import protodict as rdf_protodict
from grr_response_core.lib.rdfvalues import timeline as rdf_timeline
from grr_response_core.lib.util import iterator
@@ -33,7 +33,10 @@ def Run(self, args: rdf_timeline.TimelineArgs) -> None:
"""Executes the client action."""
fstype = GetFilesystemType(args.root)
entries = iterator.Counted(Walk(args.root))
- for entry_batch in rdf_timeline.TimelineEntry.SerializeStream(entries):
+ proto_entries = (
+ mig_timeline.ToProtoTimelineEntry(entry) for entry in entries
+ )
+ for entry_batch in rdf_timeline.SerializeTimelineEntryStream(proto_entries):
entry_batch_blob = rdf_protodict.DataBlob(data=entry_batch)
self.SendReply(entry_batch_blob, session_id=self._TRANSFER_STORE_ID)
diff --git a/grr/client/grr_response_client/client_startup.py b/grr/client/grr_response_client/client_startup.py
index 01fded0c63..c9944d0148 100644
--- a/grr/client/grr_response_client/client_startup.py
+++ b/grr/client/grr_response_client/client_startup.py
@@ -6,7 +6,6 @@
from grr_response_core import config
from grr_response_core.config import contexts
from grr_response_core.lib import config_lib
-from grr_response_core.lib.parsers import all as all_parsers
def ClientInit():
@@ -17,7 +16,6 @@ def ClientInit():
config_lib.ParseConfigCommandLine()
client_logging.LogInit()
- all_parsers.Register()
if not config.CONFIG.ContextApplied(contexts.CLIENT_BUILD_CONTEXT):
config.CONFIG.Persist("Client.labels")
diff --git a/grr/core/grr_response_core/artifacts/README.md b/grr/core/grr_response_core/artifacts/README.md
index 24704fffb3..6808d0b728 100644
--- a/grr/core/grr_response_core/artifacts/README.md
+++ b/grr/core/grr_response_core/artifacts/README.md
@@ -1,13 +1,12 @@
# This directory is reserved for external artifacts
-The Makefile removes ``*.yaml`` from this directory when syncing the external
-repo located [here] (https://github.com/ForensicArtifacts/artifacts).
+The Makefile removes `*.yaml` from this directory when syncing the external repo
+located [here](https://github.com/ForensicArtifacts/artifacts).
## Where artifacts go
-- Private artifacts should go in ``artifacts/local``
-- Public artifacts that are non GRR specific should be submitted to the external
-repo.
-- Public artifacts that call GRR functions with ``LIST_FILES``,
- ``GRR_CLIENT_ACTION``, ``GREP`` etc. should live in
- ``artifacts/flow_templates``
+- Private artifacts should go in `artifacts/local`.
+- Public artifacts that are non GRR specific should be submitted to the
+ external repo.
+- Public artifacts that call GRR functions with `LIST_FILES`,
+ `GRR_CLIENT_ACTION` etc. should live in `artifacts/flow_templates`.
diff --git a/grr/core/grr_response_core/artifacts/flow_templates/linux.yaml b/grr/core/grr_response_core/artifacts/flow_templates/linux.yaml
index b958309724..0d55447ea1 100644
--- a/grr/core/grr_response_core/artifacts/flow_templates/linux.yaml
+++ b/grr/core/grr_response_core/artifacts/flow_templates/linux.yaml
@@ -13,17 +13,6 @@ labels: [Users]
provides: [users.homedir, users.username, users.last_logon, users.full_name]
supported_os: [Linux]
---
-name: LinuxPasswdHomedirs
-doc: Grep passwd file for user homedirs.
-sources:
-- type: GREP
- attributes:
- paths: ['/etc/passwd']
- content_regex_list: ["^%%users.username%%:[^:]*:[^:]*:[^:]*:[^:]*:[^:]+:[^:]*\n"]
-provides: [users.homedir, users.full_name]
-labels: [Authentication]
-supported_os: [Linux]
----
name: RedhatYumPackagesList
doc: Linux output of yum list installed.
sources:
diff --git a/grr/core/grr_response_core/config/gui.py b/grr/core/grr_response_core/config/gui.py
index e529767d5e..005301e838 100644
--- a/grr/core/grr_response_core/config/gui.py
+++ b/grr/core/grr_response_core/config/gui.py
@@ -8,9 +8,12 @@
config_lib.DEFINE_integer("AdminUI.port", 8000, "port to listen on")
config_lib.DEFINE_integer(
- "AdminUI.port_max", None, "If set and AdminUI.port is in use, attempt to "
+ "AdminUI.port_max",
+ None,
+ "If set and AdminUI.port is in use, attempt to "
"use ports between AdminUI.port and "
- "AdminUI.port_max.")
+ "AdminUI.port_max.",
+)
# Override this if you want to access admin ui extenally. Make sure it is
# secured (i.e. AdminUI.webauth_manager is not NullWebAuthManager)!
@@ -19,86 +22,124 @@
config_lib.DEFINE_string(
"AdminUI.document_root",
"%(grr_response_server/gui/static@grr-response-server|resource)",
- "The main path to the static HTML pages.")
+ "The main path to the static HTML pages.",
+)
config_lib.DEFINE_string(
"AdminUI.template_root",
"%(grr_response_server/gui/templates@grr-response-server|resource)",
- "The main path to the templates.")
+ "The main path to the templates.",
+)
config_lib.DEFINE_string(
- "AdminUI.webauth_manager", "NullWebAuthManager",
- "The web auth manager for controlling access to the UI.")
+ "AdminUI.webauth_manager",
+ "NullWebAuthManager",
+ "The web auth manager for controlling access to the UI.",
+)
config_lib.DEFINE_string(
- "AdminUI.remote_user_header", "X-Remote-User",
+ "AdminUI.remote_user_header",
+ "X-Remote-User",
"Header containing authenticated user's username. "
- "Used by RemoteUserWebAuthManager.")
+ "Used by RemoteUserWebAuthManager.",
+)
config_lib.DEFINE_string(
- "AdminUI.remote_email_header", "X-Remote-Extra-Email",
+ "AdminUI.remote_email_header",
+ "X-Remote-Extra-Email",
"Header containing authenticated user's e-mail address. "
"If present, the e-mail address of a newly created GRR user will be set "
"to the header's value. "
- "Used by RemoteUserWebAuthManager.")
+ "Used by RemoteUserWebAuthManager.",
+)
config_lib.DEFINE_list(
- "AdminUI.remote_user_trusted_ips", ["127.0.0.1"],
+ "AdminUI.remote_user_trusted_ips",
+ ["127.0.0.1"],
"Only requests coming from these IPs will be processed "
- "by RemoteUserWebAuthManager.")
+ "by RemoteUserWebAuthManager.",
+)
-config_lib.DEFINE_string("AdminUI.firebase_api_key", None,
- "Firebase API key. Used by FirebaseWebAuthManager.")
-config_lib.DEFINE_string("AdminUI.firebase_auth_domain", None,
- "Firebase API key. Used by FirebaseWebAuthManager.")
config_lib.DEFINE_string(
- "AdminUI.firebase_auth_provider", "GoogleAuthProvider",
+ "AdminUI.firebase_api_key",
+ None,
+ "Firebase API key. Used by FirebaseWebAuthManager.",
+)
+config_lib.DEFINE_string(
+ "AdminUI.firebase_auth_domain",
+ None,
+ "Firebase API key. Used by FirebaseWebAuthManager.",
+)
+config_lib.DEFINE_string(
+ "AdminUI.firebase_auth_provider",
+ "GoogleAuthProvider",
"Firebase auth provider (see "
"https://firebase.google.com/docs/auth/web/start). Used by "
- "FirebaseWebAuthManager.")
+ "FirebaseWebAuthManager.",
+)
config_lib.DEFINE_string(
- "AdminUI.csrf_secret_key", "CHANGE_ME",
+ "AdminUI.csrf_secret_key",
+ "CHANGE_ME",
"This is a secret key that should be set in the server "
- "config. It is used in CSRF protection.")
+ "config. It is used in CSRF protection.",
+)
-config_lib.DEFINE_bool("AdminUI.enable_ssl", False,
- "Turn on SSL. This needs AdminUI.ssl_cert to be set.")
+config_lib.DEFINE_bool(
+ "AdminUI.enable_ssl",
+ False,
+ "Turn on SSL. This needs AdminUI.ssl_cert to be set.",
+)
-config_lib.DEFINE_string("AdminUI.ssl_cert_file", "",
- "The SSL certificate to use.")
+config_lib.DEFINE_string(
+ "AdminUI.ssl_cert_file", "", "The SSL certificate to use."
+)
config_lib.DEFINE_string(
- "AdminUI.ssl_key_file", None,
+ "AdminUI.ssl_key_file",
+ None,
"The SSL key to use. The key may also be part of the cert file, in which "
- "case this can be omitted.")
+ "case this can be omitted.",
+)
-config_lib.DEFINE_string("AdminUI.url", "http://localhost:8000/",
- "The direct external URL for the user interface.")
+config_lib.DEFINE_string(
+ "AdminUI.url",
+ "http://localhost:8000/",
+ "The direct external URL for the user interface.",
+)
config_lib.DEFINE_bool(
- "AdminUI.use_precompiled_js", False,
+ "AdminUI.use_precompiled_js",
+ False,
"If True - use Closure-compiled JS bundle. This flag "
- "is experimental and is not properly supported yet.")
+ "is experimental and is not properly supported yet.",
+)
config_lib.DEFINE_string(
- "AdminUI.export_command", "/usr/bin/grr_api_shell "
- "'%(AdminUI.url)'", "Command to show in the fileview for downloading the "
- "files from the command line.")
+ "AdminUI.export_command",
+ "/usr/bin/grr_api_shell '%(AdminUI.url)'",
+ "Command to show in the fileview for downloading the "
+ "files from the command line.",
+)
-config_lib.DEFINE_string("AdminUI.heading", "",
- "Dashboard heading displayed in the Admin UI.")
+config_lib.DEFINE_string(
+ "AdminUI.heading", "", "Dashboard heading displayed in the Admin UI."
+)
-config_lib.DEFINE_string("AdminUI.report_url",
- "https://github.com/google/grr/issues",
- "URL of the 'Report a problem' link.")
+config_lib.DEFINE_string(
+ "AdminUI.report_url",
+ "https://github.com/google/grr/issues",
+ "URL of the 'Report a problem' link.",
+)
-config_lib.DEFINE_string("AdminUI.help_url", "/help/index.html",
- "URL of the 'Help' link.")
+config_lib.DEFINE_string(
+ "AdminUI.help_url", "/help/index.html", "URL of the 'Help' link."
+)
config_lib.DEFINE_string(
"AdminUI.docs_location",
"https://grr-doc.readthedocs.io/en/v%(Source.version_major)."
"%(Source.version_minor).%(Source.version_revision)",
- "Base path for GRR documentation. ")
+ "Base path for GRR documentation. ",
+)
# This accepts a comma-separated list of multiple plugins. Ideally, we'd use
@@ -123,102 +164,133 @@
)
config_lib.DEFINE_semantic_struct(
- rdf_config.AdminUIClientWarningsConfigOption, "AdminUI.client_warnings",
- None, "List of per-client-label warning messages to be shown.")
+ rdf_config.AdminUIHuntConfig,
+ "AdminUI.hunt_config",
+ None,
+ "List of labels to include or exclude by default when hunts are created,"
+ " and warning message to be shown.",
+)
+
+config_lib.DEFINE_semantic_struct(
+ rdf_config.AdminUIClientWarningsConfigOption,
+ "AdminUI.client_warnings",
+ None,
+ "List of per-client-label warning messages to be shown.",
+)
config_lib.DEFINE_string(
- "AdminUI.analytics_id", None,
+ "AdminUI.analytics_id",
+ None,
"The Google Analytics ID to use for logging interactions when users access "
"the web UI. If None (default), no Analytics script will be included and "
- "no events will be logged.")
+ "no events will be logged.",
+)
config_lib.DEFINE_bool(
- "AdminUI.rapid_hunts_enabled", True,
+ "AdminUI.rapid_hunts_enabled",
+ True,
"If True, enabled 'rapid hunts' feature in the Hunts Wizard. Rapid hunts "
"support will automatically set client rate to 0 in FileFinder hunts "
- "matching certain criteria (no recursive globs, no file downloads, etc).")
-
-# Temporary option that allows limiting access to legacy UI renderers. Useful
-# when giving access to GRR AdminUI to parties that have to use the HTTP API
-# only.
-# TODO(user): remove as soon as legacy rendering system is removed.
-config_lib.DEFINE_list(
- "AdminUI.legacy_renderers_allowed_groups", [],
- "Users belonging to these groups can access legacy GRR renderers, "
- "which are still used for some GRR features (manage binaries, legacy "
- "browse virtual filesystem pane, etc). If this option is not set, then "
- "no additional checks are performed when legacy renderers are used.")
+ "matching certain criteria (no recursive globs, no file downloads, etc).",
+)
config_lib.DEFINE_string(
- "AdminUI.debug_impersonate_user", None,
+ "AdminUI.debug_impersonate_user",
+ None,
"NOTE: for debugging purposes only! If set, every request AdminUI gets "
"will be attributed to the specified user. Useful for checking how AdminUI "
- "looks like for an access-restricted user.")
+ "looks like for an access-restricted user.",
+)
config_lib.DEFINE_bool(
- "AdminUI.headless", False,
+ "AdminUI.headless",
+ False,
"When running in headless mode, AdminUI ignores checks for JS/CSS compiled "
"bundles being present. AdminUI.headless=True should be used to run "
- "the AdminUI as an API endpoint only.")
+ "the AdminUI as an API endpoint only.",
+)
# Configuration requirements for Cloud IAP Setup.
config_lib.DEFINE_string(
- "AdminUI.google_cloud_project_id", None,
+ "AdminUI.google_cloud_project_id",
+ None,
"Cloud Project ID for IAP. This must be set if "
- "the IAPWebAuthManager is used.")
+ "the IAPWebAuthManager is used.",
+)
config_lib.DEFINE_string(
- "AdminUI.google_cloud_backend_service_id", None,
+ "AdminUI.google_cloud_backend_service_id",
+ None,
"GCP Cloud Backend Service ID for IAP. This must be set if "
- "the IAPWebAuthManager is used.")
+ "the IAPWebAuthManager is used.",
+)
config_lib.DEFINE_string(
- "AdminUI.profile_image_url", None,
+ "AdminUI.profile_image_url",
+ None,
"URL to user's profile images. The placeholder {username} is replaced with "
- "the actual value. E.g. https://avatars.example.com/{username}.jpg")
+ "the actual value. E.g. https://avatars.example.com/{username}.jpg",
+)
-config_lib.DEFINE_bool("AdminUI.csp_enabled", False,
- "If True, enable the Content Security Policy header.")
+config_lib.DEFINE_bool(
+ "AdminUI.csp_enabled",
+ False,
+ "If True, enable the Content Security Policy header.",
+)
config_lib.DEFINE_string(
- "AdminUI.csp_policy", "{}",
+ "AdminUI.csp_policy",
+ "{}",
"A JSON string of keys to lists of values to include in the Content "
- "Security Policy header. E.g. {\"default-src\": [\"https:\"]}")
+ 'Security Policy header. E.g. {"default-src": ["https:"]}',
+)
config_lib.DEFINE_bool(
- "AdminUI.csp_report_only", True,
+ "AdminUI.csp_report_only",
+ True,
"If True, set the Content Security Policy header to 'report only' mode. "
- "This flag has no effect if AdminUI.csp_enabled is False.")
+ "This flag has no effect if AdminUI.csp_enabled is False.",
+)
config_lib.DEFINE_bool(
- "AdminUI.trusted_types_enabled", True,
+ "AdminUI.trusted_types_enabled",
+ True,
"If True, enable the Trusted Types feature of the Content Security Policy "
"header. Combined with setting 'AdminUI.trusted_types_report_only' to "
"True, this setting will have no effect on the behavior of GRR - it will "
"only report Trusted Types violations in your browser developer console. "
"Trusted Types can prevent most common XSS attacks, see "
- "https://web.dev/trusted-types/ for more information.")
+ "https://web.dev/trusted-types/ for more information.",
+)
config_lib.DEFINE_bool(
- "AdminUI.trusted_types_report_only", True,
+ "AdminUI.trusted_types_report_only",
+ True,
"If True, set the Trusted Types Content Security Policy header to 'report "
"only' mode. When in 'report only' mode, Trusted Types violations will be "
"logged to the browser developer console, but the behavior of GRR will "
"not change. When this flag is set to False, Trusted Types rules will be "
"enforced. This flag has no effect if AdminUI.trusted_types_enabled is "
- "False. See https://web.dev/trusted-types/ for more information.")
+ "False. See https://web.dev/trusted-types/ for more information.",
+)
config_lib.DEFINE_string(
- "AdminUI.csp_report_uri", None,
- "URL to report Content Security Policy violations to.")
+ "AdminUI.csp_report_uri",
+ None,
+ "URL to report Content Security Policy violations to.",
+)
config_lib.DEFINE_list(
- "AdminUI.csp_include_url_prefixes", ["/v2"],
+ "AdminUI.csp_include_url_prefixes",
+ ["/v2"],
"Only requests for URLs with these prefixes will have a Content Security "
- "Policy header added. Leave empty to include all URLs.")
+ "Policy header added. Leave empty to include all URLs.",
+)
config_lib.DEFINE_list(
- "AdminUI.csp_exclude_url_prefixes", [],
+ "AdminUI.csp_exclude_url_prefixes",
+ [],
"Requests for URLs with these prefixes will not have a Content Security "
"Policy header added. This is applied to URLs after applying "
- "AdminUI.csp_include_url_prefixes.")
+ "AdminUI.csp_include_url_prefixes.",
+)
diff --git a/grr/core/grr_response_core/lib/artifact_utils.py b/grr/core/grr_response_core/lib/artifact_utils.py
index 50bc335ff6..fd09685403 100644
--- a/grr/core/grr_response_core/lib/artifact_utils.py
+++ b/grr/core/grr_response_core/lib/artifact_utils.py
@@ -6,9 +6,8 @@
"""
import re
-from typing import Iterable, Optional
+from typing import Sequence
-from grr_response_core.lib import interpolation
from grr_response_proto import knowledge_base_pb2
@@ -24,28 +23,6 @@ class ArtifactProcessingError(Error):
"""Unable to process artifact."""
-class KbInterpolationMissingAttributesError(Error):
- """An exception class for missing knowledgebase attributes."""
-
- def __init__(self, attrs: Iterable[str]) -> None:
- message = "Some attributes could not be located in the knowledgebase: {}"
- message = message.format(", ".join(attrs))
- super().__init__(message)
-
- self.attrs = list(attrs)
-
-
-class KbInterpolationUnknownAttributesError(Error):
- """An exception class for non-existing knowledgebase attributes."""
-
- def __init__(self, attrs: Iterable[str]) -> None:
- message = "Some attributes are not part of the knowledgebase: {}"
- message = message.format(", ".join(attrs))
- super().__init__(message)
-
- self.attrs = list(attrs)
-
-
class KnowledgeBaseUninitializedError(Error):
"""Attempt to process artifact without a valid Knowledge Base."""
@@ -57,110 +34,6 @@ class KnowledgeBaseAttributesMissingError(Error):
INTERPOLATED_REGEX = re.compile(r"%%([^%]+?)%%")
-def InterpolateKbAttributes(
- pattern: str,
- knowledge_base: Optional[knowledge_base_pb2.KnowledgeBase],
-) -> Iterable[str]:
- """Interpolate all knowledgebase attributes in pattern.
-
- Args:
- pattern: A string with potential interpolation markers. For example:
- "/home/%%users.username%%/Downloads/"
- knowledge_base: The knowledge_base to interpolate parameters from, if
- knowledge_base is None, then the pattern must not have placeholders.
-
- Raises:
- KbInterpolationMissingAttributesError: If any of the required pattern
- parameters is not present in the knowledgebase.
- KbInterpolationUnknownAttributesError: If any of the specified pattern
- parameters is not a valid knowledgebase attribute.
- KnowledgeBaseUninitializedError: If the pattern requires knowledgebase
- attributes, but the knowledgebase is not initialized.
-
- Returns:
- An iterator over all unique strings generated by expanding the pattern.
- """
-
- # TODO(hanuszczak): Control flow feels a bit awkward here because of error
- # handling that tries not to break any functionality. With the new utilities
- # it should be possible to improve the code, changing the behaviour to a more
- # sane one.
- interpolator = interpolation.Interpolator(pattern)
-
- if not knowledge_base:
- if interpolator.Vars() or interpolator.Scopes():
- raise KnowledgeBaseUninitializedError(
- "Knowledge base is not initialized, but the pattern requires it."
- )
- return interpolator.Interpolate()
-
- missing_attr_names = set()
- unknown_attr_names = set()
-
- for var_id in interpolator.Vars():
- var_name = interpolation.GetVarName(var_id)
-
- if var_name not in knowledge_base.DESCRIPTOR.fields_by_name:
- unknown_attr_names.add(var_name)
- continue
-
- value = getattr(knowledge_base, var_name)
- if not value:
- missing_attr_names.add(var_name)
- continue
-
- interpolator.BindVar(var_name, value) # pytype: disable=wrong-arg-types
-
- for scope_id in interpolator.Scopes():
- scope_name = interpolation.GetScopeName(scope_id)
-
- # We are currently only having one scope which is `users`. Restricting the
- # implementation to this, to not having to differentiate between nested
- # lists, protos, deeply nested scopes etc.
- if scope_name != "users":
- unknown_attr_names.add(scope_name)
- continue
-
- users = knowledge_base.users
- if not users:
- missing_attr_names.add(scope_name)
- continue
-
- scope_var_ids = interpolator.ScopeVars(scope_id)
- scope_bound = False
- scope_missing_var_names = set()
- for user in users:
- bindings = {}
- for scope_var_id in scope_var_ids:
- scope_var_name = interpolation.GetVarName(scope_var_id)
-
- if scope_var_name not in user.DESCRIPTOR.fields_by_name:
- unknown_attr_names.add(f"{scope_name}.{scope_var_name}")
- continue
-
- value = getattr(user, scope_var_name)
- if not value:
- scope_missing_var_names.add(f"{scope_name}.{scope_var_name}")
- continue
-
- bindings[scope_var_id] = value
-
- if set(bindings) == set(scope_var_ids):
- interpolator.BindScope(scope_id, bindings)
- scope_bound = True
-
- if not scope_bound:
- missing_attr_names.update(scope_missing_var_names)
-
- if unknown_attr_names:
- raise KbInterpolationUnknownAttributesError(unknown_attr_names)
-
- if missing_attr_names:
- raise KbInterpolationMissingAttributesError(missing_attr_names)
-
- return interpolator.Interpolate()
-
-
def GetWindowsEnvironmentVariablesMap(knowledge_base):
"""Return a dictionary of environment variables and their values.
@@ -223,32 +96,261 @@ def GetWindowsEnvironmentVariablesMap(knowledge_base):
return environ_vars
-def ExpandWindowsEnvironmentVariables(data_string, knowledge_base):
- r"""Take a string and expand any windows environment variables.
+def ExpandKnowledgebaseWindowsEnvVars(
+ unexpanded_kb: knowledge_base_pb2.KnowledgeBase,
+) -> knowledge_base_pb2.KnowledgeBase:
+ """Expands all Windows environment variable values in the given knowledgebase.
+
+ Unexpanded values can contain references to other environment variables, e.g.
+ `%SystemRoot/System32`. Such references are expanded using knowledgebase
+ values recursively, e.g. the above could be expanded to `C:/System32`.
+
+ If an environment variable value contains a reference that cannot be expanded,
+ this function will not raise but rather leave it in unexpanded form (similarly
+ to what Windows shell does).
+
+ If unexpanded references form a cycle, this function will raise.
Args:
- data_string: A string, e.g. "%SystemRoot%\\LogFiles"
- knowledge_base: A knowledgebase object.
+ unexpanded_kb: A knowledgebase with environment variables to expand.
Returns:
- A string with available environment variables expanded. If we can't expand
- we just return the string with the original variables.
+ A knowledgebase in which all environment variables are expanded.
"""
- win_environ_regex = re.compile(r"%([^%]+?)%")
- components = []
- offset = 0
- for match in win_environ_regex.finditer(data_string):
- components.append(data_string[offset : match.start()])
-
- # KB environment variables are prefixed with environ_.
- kb_value = getattr(
- knowledge_base, "environ_%s" % match.group(1).lower(), None
- )
- if isinstance(kb_value, str) and kb_value:
- components.append(kb_value)
+ if unexpanded_kb.os != "Windows":
+ raise ValueError(f"Invalid system: {unexpanded_kb.os!r}")
+
+ kb = knowledge_base_pb2.KnowledgeBase(
+ environ_path="%SystemRoot%\\;%SystemRoot%\\System32\\;%SystemRoot%\\System32\\wbem\\",
+ environ_temp="%SystemRoot%\\TEMP",
+ environ_allusersappdata="%ProgramData%",
+ environ_allusersprofile="%ProgramData%",
+ environ_commonprogramfiles="%ProgramFiles%\\Common Files",
+ environ_commonprogramfilesx86="%ProgramFiles(x86)%\\Common Files",
+ environ_comspec="%SystemRoot%\\System32\\cmd.exe",
+ environ_driverdata="%SystemRoot%\\System32\\Drivers\\DriverData",
+ environ_programfiles="%SystemDrive%\\Program Files",
+ environ_programfilesx86="%SystemDrive%\\Program Files (x86)",
+ environ_programdata="%SystemDrive%\\ProgramData",
+ environ_systemdrive="C:",
+ environ_systemroot="%SystemDrive%\\Windows",
+ environ_windir="%SystemRoot%",
+ )
+ kb.MergeFrom(unexpanded_kb)
+
+ already_expanded_env_var_refs: dict[str, str] = dict()
+ currently_expanded_env_var_refs: set[str] = set()
+
+ def Expand(unexpanded: str) -> str:
+ expanded = ""
+ offset = 0
+
+ for match in re.finditer("%[^%]+?%", unexpanded):
+ env_var_ref = match.group(0).upper()
+
+ expanded += unexpanded[offset : match.start()]
+ offset += match.end()
+
+ if env_var_ref in already_expanded_env_var_refs:
+ expanded += already_expanded_env_var_refs[env_var_ref]
+ continue
+
+ if env_var_ref in currently_expanded_env_var_refs:
+ raise ValueError(f"Circular dependency involving {env_var_ref!r}")
+
+ if env_var_ref == "%PATH%":
+ value = kb.environ_path
+ elif env_var_ref == "%TEMP%":
+ value = kb.environ_temp
+ elif env_var_ref == "%ALLUSERSAPPDATA%":
+ value = kb.environ_allusersappdata
+ elif env_var_ref == "%ALLUSERSPROFILE%":
+ value = kb.environ_allusersprofile
+ elif env_var_ref == "%COMMONPROGRAMFILES%":
+ value = kb.environ_commonprogramfiles
+ elif env_var_ref == "%COMMONPROGRAMFILES(X86)%":
+ value = kb.environ_commonprogramfilesx86
+ elif env_var_ref == "%COMSPEC%":
+ value = kb.environ_comspec
+ elif env_var_ref == "%DRIVERDATA%":
+ value = kb.environ_driverdata
+ elif env_var_ref == "%PROGRAMFILES%":
+ value = kb.environ_programfiles
+ elif env_var_ref == "%PROGRAMFILES(X86)%":
+ value = kb.environ_programfilesx86
+ elif env_var_ref == "%PROGRAMDATA%":
+ value = kb.environ_programdata
+ elif env_var_ref == "%SYSTEMDRIVE%":
+ value = kb.environ_systemdrive
+ elif env_var_ref == "%SYSTEMROOT%":
+ value = kb.environ_systemroot
+ elif env_var_ref == "%WINDIR%":
+ value = kb.environ_windir
+ else:
+ # We use original match instead of `env_var_ref` as the latter was case
+ # corrected.
+ expanded += match.group(0)
+ continue
+
+ currently_expanded_env_var_refs.add(env_var_ref)
+ already_expanded_env_var_refs[env_var_ref] = Expand(value)
+ currently_expanded_env_var_refs.remove(env_var_ref)
+
+ expanded += already_expanded_env_var_refs[env_var_ref]
+
+ expanded += unexpanded[offset:]
+ return expanded
+
+ kb.environ_path = Expand(kb.environ_path)
+ kb.environ_temp = Expand(kb.environ_temp)
+ kb.environ_allusersappdata = Expand(kb.environ_allusersappdata)
+ kb.environ_allusersprofile = Expand(kb.environ_allusersprofile)
+ kb.environ_commonprogramfiles = Expand(kb.environ_commonprogramfiles)
+ kb.environ_commonprogramfilesx86 = Expand(kb.environ_commonprogramfilesx86)
+ kb.environ_comspec = Expand(kb.environ_comspec)
+ kb.environ_driverdata = Expand(kb.environ_driverdata)
+ kb.environ_profilesdirectory = Expand(kb.environ_profilesdirectory)
+ kb.environ_programfiles = Expand(kb.environ_programfiles)
+ kb.environ_programfilesx86 = Expand(kb.environ_programfilesx86)
+ kb.environ_programdata = Expand(kb.environ_programdata)
+ kb.environ_systemdrive = Expand(kb.environ_systemdrive)
+ kb.environ_systemroot = Expand(kb.environ_systemroot)
+ kb.environ_windir = Expand(kb.environ_windir)
+ return kb
+
+
+class KnowledgeBaseInterpolation:
+ """Interpolation of the given pattern with knowledgebase values.
+
+ Pattern can have placeholder variables like `%%os%%` or `%%fqdn%%` that will
+ be replaced by concrete values from the knowledgebase corresponding to these.
+
+ In case of repeated knowledgebase values like `users`, every possible result
+ is returned.
+
+ Because interpolation can sometimes omit certain results or use some default
+ values, this object exposes a `logs` property with messages when such steps
+ were made. These messages can then be forwarded to the user specifying the
+ pattern to help the debug issues in case the pattern is behaving unexpectedly.
+ """
+
+ def __init__(
+ self,
+ pattern: str,
+ kb: knowledge_base_pb2.KnowledgeBase,
+ ) -> None:
+ self._results: list[str] = list()
+ self._logs: list[str] = list()
+
+ user_attrs = [
+ m["attr"] for m in re.finditer(r"%%users\.(?P\w+)%%", pattern)
+ ]
+ non_user_attrs = [
+ m["attr"] for m in re.finditer(r"%%(?P\w+)%%", pattern)
+ ]
+
+ if not user_attrs:
+ # If the pattern does not contain any user attributes, loops below won't
+ # yield any results. Hence, we add the pattern as-is for further expansion
+ # to always have at least one to work with.
+ self._results.append(pattern)
else:
- # Failed to expand, leave the variable as it was.
- components.append("%%%s%%" % match.group(1))
- offset = match.end()
- components.append(data_string[offset:]) # Append the final chunk.
- return "".join(components)
+ # We start with interpolating `users` variables for each user. Because
+ # there can be multiple users on the system and the pattern can contain
+ # both user and non-user variables we have to then combine all possible
+ # user-based interpolations with non-user-based ones.
+ for user in kb.users:
+ # There might be cases in which username is not strictly necessary but
+ # scenario in which we do not have username but have other values is
+ # very unlikely. Assuming that users do have usernames makes the logic
+ # much simpler below.
+ if not (username := user.username):
+ self._logs.append(
+ f"user {user!r} without username",
+ )
+ continue
+
+ user_result = pattern
+
+ # `userprofile` is a base for all default values so we precompute it
+ # ahead and provide various heuristics in case it is not available.
+ userprofile: str
+ if user.userprofile:
+ userprofile = user.userprofile
+ elif user.homedir:
+ userprofile = user.homedir
+ elif kb.environ_systemdrive:
+ userprofile = f"{kb.environ_systemdrive}\\Users\\{username}"
+ else:
+ userprofile = f"C:\\Users\\{username}"
+
+ for attr in user_attrs:
+ try:
+ value = getattr(user, attr)
+ except AttributeError as error:
+ raise ValueError(f"`%%users.{attr}%%` does not exist") from error
+
+ if not value:
+ try:
+ value = {
+ # pylint: disable=line-too-long
+ # pyformat: disable
+ "userprofile": userprofile,
+ "homedir": userprofile,
+ "temp": f"{userprofile}\\AppData\\Local\\Temp",
+ "desktop": f"{userprofile}\\Desktop",
+ "appdata": f"{userprofile}\\AppData\\Roaming",
+ "localappdata": f"{userprofile}\\AppData\\Local",
+ "cookies": f"{userprofile}\\AppData\\Local\\Microsoft\\Windows\\INetCookies",
+ "recent": f"{userprofile}\\AppData\\Roaming\\Microsoft\\Windows\\Recent",
+ "personal": f"{userprofile}\\Documents",
+ "startup": f"{userprofile}\\AppData\\Roaming\\Microsoft\\Windows\\Start Menu\\Programs\\Startup",
+ # pylint: enable=line-too-long
+ # pyformat: enable
+ }[attr]
+ except KeyError:
+ self._logs.append(
+ f"user {username!r} is missing {attr!r}",
+ )
+ break
+
+ self._logs.append(
+ f"using default {value!r} for {attr!r} for user {username!r}",
+ )
+
+ user_result = user_result.replace(f"%%users.{attr}%%", value)
+ else:
+ # This will run only if we successfully filled every variable. If any
+ # is missing we will break the loop and this block won't be executed.
+ self._results.append(user_result)
+
+ # At this point all results have no user variables, so there is only one way
+ # to interpolate them. We do a pass for every variable in every result to
+ # expand these.
+ for attr in non_user_attrs:
+ try:
+ value = getattr(kb, attr)
+ except AttributeError as error:
+ raise ValueError(f"`%%{attr}%%` does not exist") from error
+
+ if not value:
+ self._logs.append(
+ f"{attr!r} is missing",
+ )
+ # If the attribute value is missing in the knowledge base, the pattern
+ # cannot be interpolated and should yield no results.
+ self._results = []
+
+ # Because strings in Python are immutable, we cannot simply iterate over
+ # the elements of the list if we want to update them, so we use indices to
+ # simulate references.
+ for i in range(len(self._results)):
+ self._results[i] = self._results[i].replace(f"%%{attr}%%", value)
+
+ @property
+ def results(self) -> Sequence[str]:
+ return self._results
+
+ @property
+ def logs(self) -> Sequence[str]:
+ return self._logs
diff --git a/grr/core/grr_response_core/lib/parser.py b/grr/core/grr_response_core/lib/parser.py
deleted file mode 100644
index e8419b9684..0000000000
--- a/grr/core/grr_response_core/lib/parser.py
+++ /dev/null
@@ -1,123 +0,0 @@
-#!/usr/bin/env python
-"""Registry for parsers and abstract classes for basic parser functionality."""
-
-from typing import Any
-
-from grr_response_core.lib import rdfvalue
-from grr_response_core.lib.parsers import abstract
-from grr_response_core.lib.rdfvalues import client_action as rdf_client_action
-from grr_response_core.lib.rdfvalues import client_fs as rdf_client_fs
-from grr_response_core.lib.rdfvalues import file_finder as rdf_file_finder
-from grr_response_core.lib.rdfvalues import protodict as rdf_protodict
-from grr_response_core.lib.util import precondition
-
-
-# TODO(hanuszczak): Type command parsers.
-class CommandParser(abstract.SingleResponseParser[Any]):
- """Abstract parser for processing command output.
-
- Must implement the Parse function.
- """
-
- # TODO(hanuszczak): This should probably be abstract or private.
- def Parse(self, cmd, args, stdout, stderr, return_val, knowledge_base):
- """Take the output of the command run, and yield RDFValues."""
-
- def ParseResponse(self, knowledge_base, response):
- precondition.AssertType(response, rdf_client_action.ExecuteResponse)
-
- return self.Parse(
- cmd=response.request.cmd,
- args=response.request.args,
- stdout=response.stdout,
- stderr=response.stderr,
- return_val=response.exit_status,
- knowledge_base=knowledge_base,
- )
-
- def CheckReturn(self, cmd, return_val):
- """Raise if return value is bad."""
- if return_val != 0:
- message = (
- "Parsing output of command '{command}' failed, as command had "
- "{code} return code"
- )
- raise abstract.ParseError(message.format(command=cmd, code=return_val))
-
-
-# TODO(hanuszczak): Type WMI query parsers.
-class WMIQueryParser(abstract.MultiResponseParser[Any]):
- """Abstract parser for processing WMI query output."""
-
- # TODO(hanuszczak): Make this abstract.
- def ParseMultiple(self, result_dicts):
- """Take the output of the query, and yield RDFValues."""
-
- def ParseResponses(self, knowledge_base, responses):
- del knowledge_base # Unused.
- precondition.AssertIterableType(responses, rdf_protodict.Dict)
-
- return self.ParseMultiple(responses)
-
-
-# TODO(hanuszczak): Type registry value parsers.
-class RegistryValueParser(abstract.SingleResponseParser[Any]):
- """Abstract parser for processing Registry values."""
-
- # TODO(hanuszczak): Make this abstract.
- # TODO(hanuszczak): Make order of arguments consistent with other methods.
- def Parse(self, stat, knowledge_base):
- """Take the stat, and yield RDFValues."""
-
- def ParseResponse(self, knowledge_base, response):
- # TODO(hanuszczak): Why some of the registry value parsers anticipate string
- # response? This is stupid.
- precondition.AssertType(
- response, (rdf_client_fs.StatEntry, rdfvalue.RDFString)
- )
-
- return self.Parse(response, knowledge_base)
-
-
-# TODO(hanuszczak): Type registry parsers.
-class RegistryParser(abstract.SingleResponseParser[Any]):
- """Abstract parser for processing Registry values."""
-
- # TODO(hanuszczak): Make this abstract.
- # TODO(hanuszczak): Make order of arguments consistent with other methods.
- def Parse(self, stat, knowledge_base):
- """Take the stat, and yield RDFValues."""
-
- def ParseResponse(self, knowledge_base, response):
- precondition.AssertType(response, rdf_client_fs.StatEntry)
-
- return self.Parse(response, knowledge_base)
-
-
-# TODO(hanuszczak): Type registry multi-parsers.
-class RegistryMultiParser(abstract.MultiResponseParser[Any]):
- """Abstract parser for processing registry values."""
-
- # TODO(hanuszczak): Make this abstract.
- def ParseMultiple(self, stats, knowledge_base):
- raise NotImplementedError()
-
- def ParseResponses(self, knowledge_base, responses):
- precondition.AssertIterableType(responses, rdf_client_fs.StatEntry)
-
- return self.ParseMultiple(responses, knowledge_base)
-
-
-# TODO(hanuszczak): Type grep parsers.
-class GrepParser(abstract.SingleResponseParser[Any]):
- """Parser for the results of grep artifacts."""
-
- # TODO(hanuszczak): Make this abstract.
- # TODO(hanuszczak): Make order of arguments consistent with other methods.
- def Parse(self, response, knowledge_base):
- """Parse the FileFinderResult.matches."""
-
- def ParseResponse(self, knowledge_base, response):
- precondition.AssertType(response, rdf_file_finder.FileFinderResult)
-
- return self.Parse(response, knowledge_base)
diff --git a/grr/core/grr_response_core/lib/parsers/__init__.py b/grr/core/grr_response_core/lib/parsers/__init__.py
deleted file mode 100644
index 5223f06f84..0000000000
--- a/grr/core/grr_response_core/lib/parsers/__init__.py
+++ /dev/null
@@ -1,139 +0,0 @@
-#!/usr/bin/env python
-"""Generic parsers (for GRR server and client code)."""
-
-from typing import Iterator, Type, TypeVar
-
-from grr_response_core.lib import factory
-from grr_response_core.lib import rdfvalue
-from grr_response_core.lib.parsers import abstract
-from grr_response_core.lib.util import collection
-from grr_response_core.lib.util import precondition
-
-ParseError = abstract.ParseError
-
-Parser = abstract.Parser
-SingleResponseParser = abstract.SingleResponseParser
-SingleFileParser = abstract.SingleFileParser
-MultiResponseParser = abstract.MultiResponseParser
-MultiFileParser = abstract.MultiFileParser
-
-_Factory = factory.Factory
-_RDFValue = rdfvalue.RDFValue
-
-SINGLE_RESPONSE_PARSER_FACTORY: _Factory[SingleResponseParser[_RDFValue]] = (
- _Factory(SingleResponseParser[_RDFValue])
-)
-
-MULTI_RESPONSE_PARSER_FACTORY: _Factory[MultiResponseParser[_RDFValue]] = (
- _Factory(MultiResponseParser[_RDFValue])
-)
-
-SINGLE_FILE_PARSER_FACTORY: _Factory[SingleFileParser[_RDFValue]] = _Factory(
- SingleFileParser[_RDFValue]
-)
-
-MULTI_FILE_PARSER_FACTORY: _Factory[MultiFileParser[_RDFValue]] = _Factory(
- MultiFileParser[_RDFValue]
-)
-
-
-_P = TypeVar("_P", bound=Parser)
-
-
-class ArtifactParserFactory:
- """A factory wrapper class that yields parsers for specific artifact."""
-
- def __init__(self, artifact_name: str) -> None:
- """Initializes the artifact parser factory.
-
- Args:
- artifact_name: A name of the artifact this factory is supposed to provide
- parser instances for.
- """
- precondition.AssertType(artifact_name, str)
- self._artifact_name = artifact_name
-
- def HasParsers(self) -> bool:
- return (
- self.HasSingleResponseParsers()
- or self.HasMultiResponseParsers()
- or self.HasSingleFileParsers()
- or self.HasMultiFileParsers()
- )
-
- def HasSingleResponseParsers(self) -> bool:
- return any(self.SingleResponseParserTypes())
-
- def SingleResponseParsers(self) -> Iterator[SingleResponseParser[_RDFValue]]:
- return self._CreateSupportedParsers(SINGLE_RESPONSE_PARSER_FACTORY)
-
- def SingleResponseParserNames(self) -> Iterator[str]:
- return self._SupportedNames(SINGLE_RESPONSE_PARSER_FACTORY)
-
- def SingleResponseParserTypes(
- self,
- ) -> Iterator[Type[SingleResponseParser[_RDFValue]]]:
- return self._SupportedTypes(SINGLE_RESPONSE_PARSER_FACTORY)
-
- def HasMultiResponseParsers(self) -> bool:
- return any(self.MultiResponseParserTypes())
-
- def MultiResponseParsers(self) -> Iterator[MultiResponseParser[_RDFValue]]:
- return self._CreateSupportedParsers(MULTI_RESPONSE_PARSER_FACTORY)
-
- def MultiResponseParserNames(self) -> Iterator[str]:
- return self._SupportedNames(MULTI_RESPONSE_PARSER_FACTORY)
-
- def MultiResponseParserTypes(
- self,
- ) -> Iterator[Type[MultiResponseParser[_RDFValue]]]:
- return self._SupportedTypes(MULTI_RESPONSE_PARSER_FACTORY)
-
- def HasSingleFileParsers(self) -> bool:
- return any(self.SingleFileParserTypes())
-
- def SingleFileParsers(self) -> Iterator[SingleFileParser[_RDFValue]]:
- return self._CreateSupportedParsers(SINGLE_FILE_PARSER_FACTORY)
-
- def SingleFileParserNames(self) -> Iterator[str]:
- return self._SupportedNames(SINGLE_FILE_PARSER_FACTORY)
-
- def SingleFileParserTypes(
- self,
- ) -> Iterator[Type[SingleFileParser[_RDFValue]]]:
- return self._SupportedTypes(SINGLE_FILE_PARSER_FACTORY)
-
- def HasMultiFileParsers(self) -> bool:
- return any(self.MultiFileParserTypes())
-
- def MultiFileParsers(self) -> Iterator[MultiFileParser[_RDFValue]]:
- return self._CreateSupportedParsers(MULTI_FILE_PARSER_FACTORY)
-
- def MultiFileParserNames(self) -> Iterator[str]:
- return self._SupportedNames(MULTI_FILE_PARSER_FACTORY)
-
- def MultiFileParserTypes(self) -> Iterator[Type[MultiFileParser[_RDFValue]]]:
- return self._SupportedTypes(MULTI_FILE_PARSER_FACTORY)
-
- def AllParserTypes(self) -> Iterator[Type[Parser[_RDFValue]]]:
- """Returns all known parser types applicable for the artifact."""
- return collection.Flatten([
- self.SingleResponseParserTypes(),
- self.MultiResponseParserTypes(),
- self.SingleFileParserTypes(),
- self.MultiFileParserTypes(),
- ])
-
- def _CreateSupportedParsers(self, fac: _Factory[_P]) -> Iterator[_P]:
- for name in self._SupportedNames(fac):
- yield fac.Create(name)
-
- def _SupportedTypes(self, fac: _Factory[_P]) -> Iterator[Type[_P]]:
- for name in self._SupportedNames(fac):
- yield fac.GetType(name)
-
- def _SupportedNames(self, fac: _Factory[_P]) -> Iterator[str]:
- for name in fac.Names():
- cls = fac.GetType(name)
- if self._artifact_name in cls.supported_artifacts:
- yield name
diff --git a/grr/core/grr_response_core/lib/parsers/abstract.py b/grr/core/grr_response_core/lib/parsers/abstract.py
deleted file mode 100644
index 09303863cf..0000000000
--- a/grr/core/grr_response_core/lib/parsers/abstract.py
+++ /dev/null
@@ -1,169 +0,0 @@
-#!/usr/bin/env python
-"""Registry for parsers and abstract classes for basic parser functionality."""
-
-import abc
-from typing import Generic
-from typing import IO
-from typing import Iterable
-from typing import Iterator
-from typing import Optional
-from typing import TypeVar
-
-from grr_response_core.lib import rdfvalue
-from grr_response_core.lib.rdfvalues import client as rdf_client
-from grr_response_core.lib.rdfvalues import paths as rdf_paths
-
-
-class ParseError(Exception):
- """A class for errors raised when parsers encounter problems when parsing.
-
- Attributes:
- cause: An optional exception that caused this exception to be raised.
- """
-
- def __init__(self, message: str, cause: Optional[Exception] = None) -> None:
- """Initializes the error.
-
- Args:
- message: A message string explaining why the exception was raised.
- cause: An optional exception that caused this exception to be raised.
-
- Returns:
- Nothing.
- """
- if cause is not None:
- message = "{message}: {cause}".format(message=message, cause=cause)
-
- super().__init__(message)
- self.cause = cause
-
-
-_O = TypeVar("_O") # Type variable for parser output types.
-
-
-class Parser(Generic[_O], metaclass=abc.ABCMeta):
- """A base interface for all parsers types."""
-
- # TODO(hanuszczak): Once support for Python 2 is dropped, properties below can
- # be defined as abstract, ensuring that all subclasses really define them.
-
- # TODO(hanuszczak): It would be better if parsers identified types that they
- # can parse rather than declare supported artifacts (which are defined in a
- # completely different place, in an external repository). Then parser can have
- # well-defined types.
-
- # A list of string identifiers for artifacts that this parser can process.
- supported_artifacts = []
-
- # Any knowledgebase dependencies required by the parser. Dependencies required
- # by the artifact itself will be inferred from the artifact definition.
- knowledgebase_dependencies = []
-
- # TODO(hanuszczak): Parser should have well defined types and what they can
- # return should be defined statically. Moreover, it is not possible to enforce
- # that parser really yields what `output_types` specified so this serves no
- # purpose other than documentation.
- #
- # There is only one parser that returns more than one type of value, so maybe
- # it should be re-evaluated whether this field actually makes sense.
-
- # The semantic types that can be produced by this parser.
- output_types = []
-
-
-class SingleResponseParser(Parser[_O]):
- """An abstract class for parsers that are able to parse individual replies."""
-
- @abc.abstractmethod
- def ParseResponse(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- response: rdfvalue.RDFValue,
- ) -> Iterator[_O]:
- """Parse a single response from the client.
-
- Args:
- knowledge_base: A knowledgebase for the client that provided the response.
- response: An RDF value representing the result of artifact collection.
-
- Raises:
- ParseError: If parser is not able to parse the response.
- """
-
-
-class SingleFileParser(Parser[_O]):
- """An interface for parsers that read file content."""
-
- # TODO(hanuszczak): Define a clear file reader interface.
-
- @abc.abstractmethod
- def ParseFile(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- pathspec: rdf_paths.PathSpec,
- filedesc: IO[bytes],
- ) -> Iterator[_O]:
- """Parses a single file from the client.
-
- Args:
- knowledge_base: A knowledgebase for the client to whom the file belongs.
- pathspec: A pathspec corresponding to the parsed file.
- filedesc: A file-like object to parse.
-
- Yields:
- RDF values with parsed data.
-
- Raises:
- ParseError: If parser is not able to parse the file.
- """
-
-
-class MultiResponseParser(Parser[_O]):
- """An interface for parsers requiring all replies in order to parse them."""
-
- @abc.abstractmethod
- def ParseResponses(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- responses: Iterable[rdfvalue.RDFValue],
- ) -> Iterator[_O]:
- """Parse responses from the client.
-
- Args:
- knowledge_base: A knowledgebase for the client that provided responses.
- responses: A list of RDF values with results of artifact collection.
-
- Raises:
- ParseError: If parser is not able to parse the responses.
- """
-
-
-class MultiFileParser(Parser[_O]):
- """An interface for parsers that need to read content of multiple files."""
-
- # TODO(hanuszczak): The file interface mentioned above should also have
- # `pathspec` property. With the current solution there is no way to enforce
- # on the type level that `pathspecs` and `filedescs` have the same length and
- # there is no clear correlation between the two. One possible solution would
- # be to use a list of pairs but this is ugly to document.
-
- @abc.abstractmethod
- def ParseFiles(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- pathspecs: Iterable[rdf_paths.PathSpec],
- filedescs: Iterable[IO[bytes]],
- ) -> Iterator[_O]:
- """Parses multiple files from the client.
-
- Args:
- knowledge_base: A knowledgebase for the client to whome the files belong.
- pathspecs: A list of pathspecs corresponding to the parsed files.
- filedescs: A list of file-like objects to parse.
-
- Yields:
- RDF values with parsed data.
-
- Raises:
- ParseError: If parser is not able to parse the files.
- """
diff --git a/grr/core/grr_response_core/lib/parsers/all.py b/grr/core/grr_response_core/lib/parsers/all.py
deleted file mode 100644
index a295513d2c..0000000000
--- a/grr/core/grr_response_core/lib/parsers/all.py
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env python
-"""A module for registering all known parsers."""
-
-
-def Register():
- """Adds all known parsers to the registry."""
- # pyformat: disable
- # pyformat: enable
diff --git a/grr/core/grr_response_core/lib/parsers/config_file.py b/grr/core/grr_response_core/lib/parsers/config_file.py
deleted file mode 100644
index e921dcce70..0000000000
--- a/grr/core/grr_response_core/lib/parsers/config_file.py
+++ /dev/null
@@ -1,357 +0,0 @@
-#!/usr/bin/env python
-"""Simple parsers for configuration files."""
-
-from collections import abc
-import logging
-import re
-
-from grr_response_core.lib import lexer
-from grr_response_core.lib.rdfvalues import config_file as rdf_config_file
-from grr_response_core.lib.util import precondition
-
-
-def AsIter(arg):
- """Encapsulates an argument in a tuple, if it's not already iterable."""
- if isinstance(arg, str):
- rslt = [arg]
- elif isinstance(arg, abc.Iterable):
- rslt = arg
- elif not arg:
- rslt = []
- else:
- rslt = [arg]
- return tuple(rslt)
-
-
-# Grr lexer implementation of ssv parser. Considered using
-# https://github.com/Eugeny/reconfigure/blob/master/reconfigure/parsers/ssv.py
-# but it doesn't seem to actually forward lookup.
-class FieldParser(lexer.Lexer):
- r"""A generalized field based parser that splits entries into fields.
-
- Entries refer to distinct records within the text content, for example each
- line of /etc/passwd or a ssh configuration attribute.
- Fields are elements that make up the entry, for example the individual
- parameters in /etc/passwd.
-
- The parser supports:
- - Flexible field based separators (e.g. spaces, commas, colons).
- - Identification and removal of line comments. Inline comments (e.g. /*...*/)
- are not supported.
- - Line continuation detection.
- - Multiline quotes.
-
- The parser uses the following attributes as defaults:
- - comments: #
- - cont: \ (followed by any amount of whitespace)
- - ml_quote: False (by default, quotes must close before newlines).
- - quot: Both " and ' characters.
- - sep: Whitespace
- - term: Newlines.
-
- To override default values, pass in appropriate keywords with a python
- compatible regex string.
- """
-
- def __init__(
- self,
- comments=r"#",
- cont=r"\\\s*\n",
- ml_quote=False,
- quot=(r"\"", r"'"),
- sep=r"[ \t\f\v]+",
- term=r"[\r\n]",
- verbose=0,
- ):
- r"""A generalized field-based parser.
-
- Handles whitespace, csv etc.
-
- Args:
- comments: Line comment patterns (e.g. "#").
- cont: Continuation patterns (e.g. "\\").
- ml_quote: Boolean flag to allow quoted strings to span lines.
- quot: Quotation patterns (e.g. "\\"" or "'").
- sep: Field separator patterns (e.g. "[\\s,]").
- term: Entry termination patterns (e.g. "\\n").
- verbose: Enable verbose mode for the lexer. Useful for debugging.
- """
- super().__init__()
- self.entries = []
- self.fields = []
- self.field = ""
- self.comments = AsIter(comments)
- self.cont = AsIter(cont)
- self.ml_quote = AsIter(ml_quote)
- self.quot = AsIter(quot)
- self.sep = AsIter(sep)
- self.term = AsIter(term)
- self.verbose = verbose
- self._GenStates()
-
- def Reset(self):
- super().Reset()
- self.entries = []
- self.fields = []
- self.field = ""
-
- def _GenStates(self):
- """Generate the lexer states."""
- self.GenCommentState()
- self.GenFwdState()
- self.GenQuotedState()
- self.GenCatchallState()
-
- def _AddToken(self, state_regex, regex, actions, next_state):
- self._tokens.append(lexer.Token(state_regex, regex, actions, next_state))
-
- def GenCommentState(self):
- if self.comments:
- self._AddToken("COMMENT", r"\n", "PushBack,PopState", None)
- self._AddToken("COMMENT", ".", None, None)
-
- def GenFwdState(self):
- """Generates forwarding state rules.
-
- The lexer will fast forward until there is string content. The
- string content will be returned to the string processor.
- """
- for c in self.cont:
- self._AddToken("FWD", c, None, None)
- for s in self.sep:
- self._AddToken("FWD", s, None, None)
- self._AddToken("FWD", ".", "PushBack,PopState", None)
-
- def GenQuotedState(self):
- """Generate string matching state rules."""
- for i, q in enumerate(self.quot):
- label = "%s_STRING" % i
- escaped = re.escape(q)
- self._AddToken(label, escaped, "PopState", None)
- self._AddToken(label, q, "PopState", None)
- if self.ml_quote:
- self._AddToken(label, r"\n", None, None)
- else:
- self._AddToken(label, r"\n", "BadLine", None)
- self._AddToken(label, ".", "AddToField", None)
-
- def GenCatchallState(self):
- """Generate string matching state rules.
-
- This sets up initial state handlers that cover both the 'INITIAL' state
- and the intermediate content between fields.
-
- The lexer acts on items with precedence:
- - continuation characters: use the fast forward state rules.
- - field separators: finalize processing the field.
- - quotation characters: use the quotation state rules.
- """
- for c in self.comments:
- self._AddToken(".", c, "PushState,EndField", "COMMENT")
- for c in self.cont:
- self._AddToken(".", c, "PushState", "FWD")
- for t in self.term:
- self._AddToken(".", t, "EndEntry", None)
- for s in self.sep:
- self._AddToken(".", s, "EndField", None)
- for i, q in enumerate(self.quot):
- self._AddToken(".", q, "PushState", "%s_STRING" % i)
- self._AddToken(".", ".", "AddToField", None)
-
- def EndEntry(self, **_):
- self.EndField()
- if self.fields:
- # Copy the fields into the processed entries.
- self.entries.append(self.fields[:])
- self.fields = []
-
- def AddToField(self, string="", **_):
- if string:
- self.field += string
-
- def EndField(self, **_):
- if self.field:
- self.fields.append(self.field[:])
- self.field = ""
-
- def BadLine(self, **_):
- logging.debug("Skipped bad line in file at %s", self.processed)
- self.field = ""
-
- def ParseEntries(self, data: str):
- precondition.AssertType(data, str)
-
- # Flush any old results.
- self.Reset()
- self.Feed(data)
- self.Close()
- # In case there isn't a terminating field at the end of the feed, e.g. \n
- self.EndEntry()
- return self.entries
-
-
-class KeyValueParser(FieldParser):
- """A generalized KeyValue parser that splits entries into key/value pairs.
-
- Capabilities and parameters are identical to FieldParser, with one difference.
- The parser also accepts the parameter "kv_sep"
- Patterns specified in kv_sep are used to demarcate key/value processing.
-
- kv_sep defaults to "="
- """
-
- def __init__(
- self,
- comments=r"#",
- cont=r"\\\s*\n",
- kv_sep="=",
- ml_quote=False,
- quot=(r"\"", r"'"),
- sep=r"[ \t\f\v]+",
- term=r"[\r\n]",
- verbose=0,
- ):
- """A generalized key-value parser.
-
- Handles whitespace, csv etc.
-
- Args:
- comments: Line comment patterns (e.g. "#").
- cont: Continuation patterns (e.g. "\\").
- kv_sep: Key/Value separators (e.g. "=" or ":").
- ml_quote: Boolean flag to allow quoted strings to span lines.
- quot: Quotation patterns (e.g. "\\"" or "'").
- sep: Field separator patterns (e.g. "[\\s,]").
- term: Entry termination patterns (e.g. "\\n").
- verbose: Enable verbose mode for the lexer. Useful for debugging.
- """
- self.kv_sep = AsIter(kv_sep)
- super().__init__(
- comments=comments,
- cont=cont,
- ml_quote=ml_quote,
- quot=quot,
- sep=sep,
- term=term,
- verbose=verbose,
- )
- self.key_field = ""
-
- def _GenStates(self):
- self.GenCommentState()
- self.GenFwdState()
- self.GenQuotedState()
- self.GenMatchFirstState()
- self.GenInitialState()
- self.GenKeyState()
- self.GenValueState()
- self.GenCatchallState()
-
- def GenMatchFirstState(self):
- for i, q in enumerate(self.quot):
- self._AddToken(".", q, "PushState", "%s_STRING" % i)
- for c in self.cont:
- self._AddToken(".", c, "PushState", "FWD")
-
- def GenInitialState(self):
- for c in self.comments:
- self._AddToken("INITIAL", c, "PushState,EndField", "COMMENT")
- for t in self.term:
- self._AddToken("INITIAL", t, "EndField,EndEntry", None)
- for c in self.sep:
- self._AddToken("INITIAL", c, "PushState", "FWD")
- for k in self.kv_sep:
- self._AddToken("INITIAL", k, "BadLine", None)
- self._AddToken("INITIAL", ".", "PushState,PushBack", "KEY")
-
- def GenKeyState(self):
- for c in self.comments:
- self._AddToken(
- "KEY", c, "EndKeyField,EndEntry,PopState,PushBack", "COMMENT"
- )
- for t in self.term:
- self._AddToken("KEY", t, "EndKeyField,EndEntry,PopState", None)
- for k in self.kv_sep:
- self._AddToken("KEY", k, "EndKeyField", "VALUE")
-
- def GenValueState(self):
- for c in self.comments:
- self._AddToken(
- "VALUE", c, "EndField,EndEntry,PopState,PushBack", "COMMENT"
- )
- for t in self.term:
- self._AddToken("VALUE", t, "EndField,EndEntry,PopState", None)
- for s in self.sep:
- self._AddToken("VALUE", s, "EndField", None)
-
- def GenCatchallState(self):
- self._AddToken(".", ".", "AddToField", None)
-
- def EndKeyField(self, **_):
- self.key_field = self.field
- self.field = ""
-
- def EndEntry(self, **_):
- # Finalize processing for non-terminated entries. Key first, then fields.
- if self.field and not self.key_field:
- self.EndKeyField()
- else:
- self.EndField()
- # Set up the entry.
- key_field = self.key_field.strip()
- if key_field:
- self.entries.append({key_field: self.fields})
- self.key_field = ""
- self.fields = []
-
- def ParseToOrderedDict(self, data):
- result = dict()
- for field in self.ParseEntries(data):
- result.update(field)
- return result
-
-
-class RsyslogFieldParser(FieldParser):
- """Field parser for syslog configurations."""
-
- log_rule_re = re.compile(r"([\w,\*]+)\.([\w,!=\*]+)")
- destinations = dict([
- ("TCP", re.compile(r"(?:@@)([^;]*)")),
- ("UDP", re.compile(r"(?:@)([^;]*)")),
- ("PIPE", re.compile(r"(?:\|)([^;]*)")),
- ("NONE", re.compile(r"(?:~)([^;]*)")),
- ("SCRIPT", re.compile(r"(?:\^)([^;]*)")),
- ("MODULE", re.compile(r"(?::om\w:)([^;]*)")),
- ("FILE", re.compile(r"-?(/[^;]*)")), ("WALL", re.compile(r"(\*)"))
- ]) # pyformat: disable
-
- def ParseAction(self, action):
- """Extract log configuration data from rsyslog actions.
-
- Actions have the format:
- / ;
- e.g. *.* @@loghost.example.com.:514;RSYSLOG_ForwardFormat
-
- Actions are selected by a type definition. These include:
- "@@": TCP syslog
- "@": UDP syslog
- "|": Named pipe
- "~": Drop to /dev/null
- "^": Shell script
- ":om:": An output module
- Or a file path.
-
- Args:
- action: The action string from rsyslog.
-
- Returns:
- a rdfvalue.LogTarget message.
- """
- rslt = rdf_config_file.LogTarget()
- for dst_str, dst_re in self.destinations.items():
- dst = dst_re.match(action)
- if dst:
- rslt.transport = dst_str
- rslt.destination = dst.group(1)
- break
- return rslt
diff --git a/grr/core/grr_response_core/lib/parsers/config_file_test.py b/grr/core/grr_response_core/lib/parsers/config_file_test.py
deleted file mode 100644
index 333c64b586..0000000000
--- a/grr/core/grr_response_core/lib/parsers/config_file_test.py
+++ /dev/null
@@ -1,106 +0,0 @@
-#!/usr/bin/env python
-"""Unit test for config files."""
-
-from absl import app
-
-from grr_response_core.lib.parsers import config_file
-from grr.test_lib import test_lib
-
-CFG = b"""
-# A comment.
-Protocol 2 # Another comment.
-Ciphers aes128-ctr,aes256-ctr,aes128-cbc,aes256-cbc
-ServerKeyBits 768
-Port 22
-Port 2222,10222
-
-# Make life easy for root. It's hard running a server.
-Match User root
- PermitRootLogin yes
-
-# Oh yeah, this is an excellent way to protect that root account.
-Match Address 192.168.3.12
- PermitRootLogin no
- Protocol 1 # Not a valid match group entry.
-"""
-
-
-class FieldParserTests(test_lib.GRRBaseTest):
- """Test the field parser."""
-
- def testParser(self):
- test_data = r"""
- each of these words:should;be \
- fields # but not these ones \n, or \ these.
- this should be another entry "with this quoted text as one field"
- 'an entry'with" only two" fields ;; and not this comment.
- """
- expected = [
- ["each", "of", "these", "words", "should", "be", "fields"],
- [
- "this",
- "should",
- "be",
- "another",
- "entry",
- "with this quoted text as one field",
- ],
- ["an entrywith only two", "fields"],
- ]
- cfg = config_file.FieldParser(
- sep=["[ \t\f\v]+", ":", ";"], comments=["#", ";;"]
- )
- results = cfg.ParseEntries(test_data)
- for i, expect in enumerate(expected):
- self.assertCountEqual(expect, results[i])
-
- def testNoFinalTerminator(self):
- test_data = "you forgot a newline"
- expected = [["you", "forgot", "a", "newline"]]
- cfg = config_file.FieldParser()
- results = cfg.ParseEntries(test_data)
- for i, expect in enumerate(expected):
- self.assertCountEqual(expect, results[i])
-
- def testWhitespaceDoesntNukeNewline(self):
- test_data = "trailing spaces \nno trailing spaces\n"
- expected = [["trailing", "spaces"], ["no", "trailing", "spaces"]]
- results = config_file.FieldParser().ParseEntries(test_data)
- for i, expect in enumerate(expected):
- self.assertCountEqual(expect, results[i])
- expected = [["trailing", "spaces", "no", "trailing", "spaces"]]
- results = config_file.FieldParser(sep=r"\s+").ParseEntries(test_data)
- for i, expect in enumerate(expected):
- self.assertCountEqual(expect, results[i])
-
-
-class KeyValueParserTests(test_lib.GRRBaseTest):
- """Test the field parser."""
-
- def testParser(self):
- test_data = r"""
- key1 = a list of \
- fields # but not \n this, or \ this.
-
- # Nothing here.
- key 2:another entry
- = # Bad line
- 'a key'with" no" value field ;; and not this comment.
- """
- expected = [
- {"key1": ["a", "list", "of", "fields"]},
- {"key 2": ["another", "entry"]},
- {"a keywith no value field": []},
- ]
- cfg = config_file.KeyValueParser(kv_sep=["=", ":"], comments=["#", ";;"])
- results = cfg.ParseEntries(test_data)
- for i, expect in enumerate(expected):
- self.assertDictEqual(expect, results[i])
-
-
-def main(args):
- test_lib.main(args)
-
-
-if __name__ == "__main__":
- app.run(main)
diff --git a/grr/core/grr_response_core/lib/parsers/linux_cmd_parser.py b/grr/core/grr_response_core/lib/parsers/linux_cmd_parser.py
deleted file mode 100644
index 298bd90025..0000000000
--- a/grr/core/grr_response_core/lib/parsers/linux_cmd_parser.py
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/usr/bin/env python
-"""Simple parsers for the output of linux commands."""
diff --git a/grr/core/grr_response_core/lib/parsers/linux_cmd_parser_test.py b/grr/core/grr_response_core/lib/parsers/linux_cmd_parser_test.py
deleted file mode 100644
index 1bf8bafdf2..0000000000
--- a/grr/core/grr_response_core/lib/parsers/linux_cmd_parser_test.py
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/usr/bin/env python
-"""Unit test for the linux cmd parser."""
-
-from absl import app
-
-from grr.test_lib import test_lib
-
-
-def main(args):
- test_lib.main(args)
-
-
-if __name__ == "__main__":
- app.run(main)
diff --git a/grr/core/grr_response_core/lib/parsers/parsers_test.py b/grr/core/grr_response_core/lib/parsers/parsers_test.py
deleted file mode 100644
index f37d6630b5..0000000000
--- a/grr/core/grr_response_core/lib/parsers/parsers_test.py
+++ /dev/null
@@ -1,297 +0,0 @@
-#!/usr/bin/env python
-from typing import IO, Iterable, Iterator
-from unittest import mock
-
-from absl.testing import absltest
-
-from grr_response_core.lib import factory
-from grr_response_core.lib import parsers
-from grr_response_core.lib import rdfvalue
-from grr_response_core.lib.rdfvalues import client as rdf_client
-from grr_response_core.lib.rdfvalues import paths as rdf_paths
-from grr.test_lib import parser_test_lib
-
-
-class ArtifactParserFactoryTest(absltest.TestCase):
-
- @mock.patch.object(
- parsers,
- "SINGLE_RESPONSE_PARSER_FACTORY",
- factory.Factory(parsers.SingleResponseParser),
- )
- def testSingleResponseParsers(self):
-
- class FooParser(parsers.SingleResponseParser[None]):
-
- supported_artifacts = ["Quux", "Norf"]
-
- def ParseResponse(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- response: rdfvalue.RDFValue,
- ) -> Iterator[None]:
- raise NotImplementedError()
-
- class BarParser(parsers.SingleResponseParser[None]):
-
- supported_artifacts = ["Norf", "Thud"]
-
- def ParseResponse(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- response: rdfvalue.RDFValue,
- ) -> Iterator[None]:
- raise NotImplementedError()
-
- class BazParser(parsers.SingleResponseParser[None]):
-
- supported_artifacts = ["Thud", "Quux"]
-
- def ParseResponse(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- response: rdfvalue.RDFValue,
- ) -> Iterator[None]:
- raise NotImplementedError()
-
- parsers.SINGLE_RESPONSE_PARSER_FACTORY.Register("Foo", FooParser)
- parsers.SINGLE_RESPONSE_PARSER_FACTORY.Register("Bar", BarParser)
- parsers.SINGLE_RESPONSE_PARSER_FACTORY.Register("Baz", BazParser)
-
- quux_factory = parsers.ArtifactParserFactory("Quux")
- quux_parsers = quux_factory.SingleResponseParsers()
- self.assertCountEqual(map(type, quux_parsers), [FooParser, BazParser])
-
- norf_factory = parsers.ArtifactParserFactory("Norf")
- norf_parsers = norf_factory.SingleResponseParsers()
- self.assertCountEqual(map(type, norf_parsers), [FooParser, BarParser])
-
- thud_factory = parsers.ArtifactParserFactory("Thud")
- thud_parsers = thud_factory.SingleResponseParsers()
- self.assertCountEqual(map(type, thud_parsers), [BarParser, BazParser])
-
- @mock.patch.object(
- parsers,
- "MULTI_RESPONSE_PARSER_FACTORY",
- factory.Factory(parsers.MultiResponseParser),
- )
- def testMultiResponseParsers(self):
-
- class FooParser(parsers.MultiResponseParser[None]):
-
- supported_artifacts = ["Foo"]
-
- def ParseResponses(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- responses: Iterable[rdfvalue.RDFValue],
- ) -> Iterator[None]:
- raise NotImplementedError()
-
- class BarParser(parsers.MultiResponseParser[None]):
-
- supported_artifacts = ["Bar"]
-
- def ParseResponses(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- responses: Iterable[rdfvalue.RDFValue],
- ) -> Iterator[None]:
- raise NotImplementedError()
-
- parsers.MULTI_RESPONSE_PARSER_FACTORY.Register("Foo", FooParser)
- parsers.MULTI_RESPONSE_PARSER_FACTORY.Register("Bar", BarParser)
-
- foo_factory = parsers.ArtifactParserFactory("Foo")
- foo_parsers = foo_factory.MultiResponseParsers()
- self.assertCountEqual(map(type, foo_parsers), [FooParser])
-
- bar_factory = parsers.ArtifactParserFactory("Bar")
- bar_parsers = bar_factory.MultiResponseParsers()
- self.assertCountEqual(map(type, bar_parsers), [BarParser])
-
- @mock.patch.object(
- parsers,
- "SINGLE_FILE_PARSER_FACTORY",
- factory.Factory(parsers.SingleFileParser),
- )
- def testSingleFileParsers(self):
-
- class FooParser(parsers.SingleFileParser):
-
- supported_artifacts = ["Bar"]
-
- def ParseFile(self, knowledge_base, pathspec, filedesc):
- raise NotImplementedError()
-
- parsers.SINGLE_FILE_PARSER_FACTORY.Register("Foo", FooParser)
-
- bar_factory = parsers.ArtifactParserFactory("Bar")
- bar_parsers = bar_factory.SingleFileParsers()
- self.assertCountEqual(map(type, bar_parsers), [FooParser])
-
- baz_factory = parsers.ArtifactParserFactory("Baz")
- baz_parsers = baz_factory.SingleFileParsers()
- self.assertCountEqual(map(type, baz_parsers), [])
-
- @mock.patch.object(
- parsers,
- "MULTI_FILE_PARSER_FACTORY",
- factory.Factory(parsers.MultiFileParser),
- )
- def testMultiFileParsers(self):
-
- class FooParser(parsers.MultiFileParser[None]):
-
- supported_artifacts = ["Quux", "Norf"]
-
- def ParseFiles(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- pathspecs: Iterable[rdf_paths.PathSpec],
- filedescs: Iterable[IO[bytes]],
- ) -> Iterator[None]:
- raise NotImplementedError()
-
- class BarParser(parsers.MultiFileParser[None]):
-
- supported_artifacts = ["Quux", "Thud"]
-
- def ParseFiles(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- pathspecs: Iterable[rdf_paths.PathSpec],
- filedescs: Iterable[IO[bytes]],
- ) -> Iterator[None]:
- raise NotImplementedError()
-
- parsers.MULTI_FILE_PARSER_FACTORY.Register("Foo", FooParser)
- parsers.MULTI_FILE_PARSER_FACTORY.Register("Bar", BarParser)
-
- quux_factory = parsers.ArtifactParserFactory("Quux")
- quux_parsers = quux_factory.MultiFileParsers()
- self.assertCountEqual(map(type, quux_parsers), [FooParser, BarParser])
-
- norf_factory = parsers.ArtifactParserFactory("Norf")
- norf_parsers = norf_factory.MultiFileParsers()
- self.assertCountEqual(map(type, norf_parsers), [FooParser])
-
- thud_factory = parsers.ArtifactParserFactory("Thud")
- thud_parsers = thud_factory.MultiFileParsers()
- self.assertCountEqual(map(type, thud_parsers), [BarParser])
-
- @mock.patch.object(
- parsers,
- "SINGLE_FILE_PARSER_FACTORY",
- factory.Factory(parsers.SingleFileParser),
- )
- @mock.patch.object(
- parsers,
- "MULTI_RESPONSE_PARSER_FACTORY",
- factory.Factory(parsers.MultiResponseParser),
- )
- def testAllParsers(self):
-
- class FooParser(parsers.SingleFileParser[None]):
-
- supported_artifacts = ["Quux"]
-
- def ParseFile(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- pathspec: rdf_paths.PathSpec,
- filedesc: IO[bytes],
- ):
- raise NotImplementedError()
-
- class BarParser(parsers.MultiResponseParser[None]):
-
- supported_artifacts = ["Quux"]
-
- def ParseResponses(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- responses: Iterable[rdfvalue.RDFValue],
- ) -> Iterator[None]:
- raise NotImplementedError()
-
- parsers.SINGLE_FILE_PARSER_FACTORY.Register("Foo", FooParser)
- parsers.MULTI_RESPONSE_PARSER_FACTORY.Register("Bar", BarParser)
-
- quux_factory = parsers.ArtifactParserFactory("Quux")
- quux_parsers = quux_factory.AllParserTypes()
- self.assertCountEqual(quux_parsers, [FooParser, BarParser])
-
- def testSingleResponseParserNames(self):
-
- class FooParser(parsers.SingleResponseParser[None]):
-
- supported_artifacts = ["Quux"]
-
- def ParseResponse(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- response: rdfvalue.RDFValue,
- ) -> Iterator[None]:
- raise NotImplementedError()
-
- with parser_test_lib._ParserContext("Foo", FooParser):
- quux_factory = parsers.ArtifactParserFactory("Quux")
- self.assertEqual(list(quux_factory.SingleResponseParserNames()), ["Foo"])
-
- def testMultiResponseParserNames(self):
-
- class FooParser(parsers.MultiResponseParser[None]):
-
- supported_artifacts = ["Quux"]
-
- def ParseResponses(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- responses: Iterable[rdfvalue.RDFValue],
- ) -> Iterator[None]:
- raise NotImplementedError()
-
- with parser_test_lib._ParserContext("Foo", FooParser):
- quux_factory = parsers.ArtifactParserFactory("Quux")
- self.assertEqual(list(quux_factory.MultiResponseParserNames()), ["Foo"])
-
- def testSingleFileParserNames(self):
-
- class FooParser(parsers.SingleFileParser[None]):
-
- supported_artifacts = ["Quux"]
-
- def ParseFile(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- pathspec: rdf_paths.PathSpec,
- filedesc: IO[bytes],
- ) -> Iterator[None]:
- raise NotImplementedError()
-
- with parser_test_lib._ParserContext("Foo", FooParser):
- quux_factory = parsers.ArtifactParserFactory("Quux")
- self.assertEqual(list(quux_factory.SingleFileParserNames()), ["Foo"])
-
- def testMultiFileParserNames(self):
-
- class FooParser(parsers.MultiFileParser[None]):
-
- supported_artifacts = ["Quux"]
-
- def ParseFiles(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- pathspecs: Iterable[rdf_paths.PathSpec],
- filedescs: Iterable[IO[bytes]],
- ) -> Iterator[None]:
- raise NotImplementedError()
-
- with parser_test_lib._ParserContext("Foo", FooParser):
- quux_factory = parsers.ArtifactParserFactory("Quux")
- self.assertEqual(list(quux_factory.MultiFileParserNames()), ["Foo"])
-
-
-if __name__ == "__main__":
- absltest.main()
diff --git a/grr/core/grr_response_core/lib/parsers/parsers_test_lib.py b/grr/core/grr_response_core/lib/parsers/parsers_test_lib.py
deleted file mode 100644
index 7a2c525bab..0000000000
--- a/grr/core/grr_response_core/lib/parsers/parsers_test_lib.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/env python
-"""Parser testing lib."""
-
-import io
-
-from grr_response_core.lib.rdfvalues import client_fs as rdf_client_fs
-from grr_response_core.lib.rdfvalues import paths as rdf_paths
-
-
-def GenInit(svc, desc, start=("2", "3", "4", "5"), stop="1"):
- """Generate init file."""
- insserv = r"""
- $local_fs +umountfs
- $network +networking
- $remote_fs $local_fs +umountnfs +sendsigs
- $syslog +rsyslog +sysklogd +syslog-ng +dsyslog +inetutils-syslogd
- """
- tmpl = r"""
- ### BEGIN INIT INFO
- # Provides: %s
- # Required-Start: $remote_fs $syslog
- # Required-Stop: $syslog
- # Default-Start: %s
- # Default-Stop: %s
- # Short-Description: %s
- ### END INIT INFO
- """ % (svc, " ".join(start), " ".join(stop), desc)
- return {
- "/etc/insserv.conf": insserv.encode("utf-8"),
- "/etc/init.d/%s" % svc: tmpl.encode("utf-8"),
- }
-
-
-def GenTestData(paths, data, st_mode=33188):
- stats = []
- files = []
- for path in paths:
- p = rdf_paths.PathSpec(path=path, pathtype="OS")
- stats.append(rdf_client_fs.StatEntry(pathspec=p, st_mode=st_mode))
- for val in data:
- files.append(io.BytesIO(val.encode("utf-8")))
- return stats, files
-
-
-def GenXinetd(svc="test", disable="no"):
- """Generate xinetd file."""
- defaults = r"""
- defaults
- {
- instances = 60
- log_type = SYSLOG authpriv
- log_on_success = HOST PID
- log_on_failure = HOST
- cps = 25 30
- }
- includedir /etc/xinetd.d
- """.encode("utf-8")
- tmpl = ("""
- service %s
- {
- disable = %s
- }
- """ % (svc, disable)).encode("utf-8")
- return {"/etc/xinetd.conf": defaults, "/etc/xinetd.d/%s" % svc: tmpl}
diff --git a/grr/core/grr_response_core/lib/rdfvalues/artifacts.py b/grr/core/grr_response_core/lib/rdfvalues/artifacts.py
index 8be9e770b6..c81e453e5f 100644
--- a/grr/core/grr_response_core/lib/rdfvalues/artifacts.py
+++ b/grr/core/grr_response_core/lib/rdfvalues/artifacts.py
@@ -2,11 +2,9 @@
"""rdf value representation for artifact collector parameters."""
import json
-from typing import Type
import yaml
-from grr_response_core.lib import parsers
from grr_response_core.lib import rdfvalue
from grr_response_core.lib.rdfvalues import client as rdf_client
from grr_response_core.lib.rdfvalues import protodict as rdf_protodict
@@ -97,10 +95,6 @@ class ArtifactSource(rdf_structs.RDFProtoStruct):
"required_attributes": ["paths"],
"output_type": "StatEntry",
},
- artifact_pb2.ArtifactSource.GREP: {
- "required_attributes": ["paths", "content_regex_list"],
- "output_type": "BufferReference",
- },
artifact_pb2.ArtifactSource.LIST_FILES: {
"required_attributes": ["paths"],
"output_type": "StatEntry",
@@ -308,45 +302,12 @@ def ReduceDict(in_dict):
return yaml.safe_dump(ordered_artifact_dict)
-class ArtifactProcessorDescriptor(rdf_structs.RDFProtoStruct):
- """Describes artifact processor."""
-
- protobuf = artifact_pb2.ArtifactProcessorDescriptor
-
- @classmethod
- def FromParser(
- cls, parser_cls: Type[parsers.Parser]
- ) -> "ArtifactProcessorDescriptor":
- """Creates a descriptor corresponding to the given parser.
-
- Args:
- parser_cls: A parser class for which the descriptor is to be created.
-
- Returns:
- A parser descriptor corresponding to the given parser.
- """
- # TODO(hanuszczak): Relying on a class docstring to get a description seems
- # like a lazy hack. Lets not do that.
- if parser_cls.__doc__:
- description = parser_cls.__doc__.split("\n")[0]
- else:
- description = ""
-
- output_types = [t.__name__ for t in parser_cls.output_types]
- return cls(
- name=parser_cls.__name__,
- description=description,
- output_types=output_types,
- )
-
-
class ArtifactDescriptor(rdf_structs.RDFProtoStruct):
"""Includes artifact, its JSON source, processors and additional info."""
protobuf = artifact_pb2.ArtifactDescriptor
rdf_deps = [
Artifact,
- ArtifactProcessorDescriptor,
]
diff --git a/grr/core/grr_response_core/lib/rdfvalues/config.py b/grr/core/grr_response_core/lib/rdfvalues/config.py
index ce7e6bd6ab..cca8b98ba5 100644
--- a/grr/core/grr_response_core/lib/rdfvalues/config.py
+++ b/grr/core/grr_response_core/lib/rdfvalues/config.py
@@ -12,3 +12,7 @@ class AdminUIClientWarningRule(rdf_structs.RDFProtoStruct):
class AdminUIClientWarningsConfigOption(rdf_structs.RDFProtoStruct):
protobuf = config_pb2.AdminUIClientWarningsConfigOption
rdf_deps = [AdminUIClientWarningRule]
+
+
+class AdminUIHuntConfig(rdf_structs.RDFProtoStruct):
+ protobuf = config_pb2.AdminUIHuntConfig
diff --git a/grr/core/grr_response_core/lib/rdfvalues/crypto.py b/grr/core/grr_response_core/lib/rdfvalues/crypto.py
index 8fd061020b..737e4fdf3a 100644
--- a/grr/core/grr_response_core/lib/rdfvalues/crypto.py
+++ b/grr/core/grr_response_core/lib/rdfvalues/crypto.py
@@ -828,37 +828,40 @@ def Verify(self, message, signature):
raise VerificationError(e)
-class Password(rdf_structs.RDFProtoStruct):
- """A password stored in the database."""
+def _CalculateHash(password: bytes, salt: bytes, iteration_count: int) -> bytes:
+ kdf = pbkdf2.PBKDF2HMAC(
+ algorithm=hashes.SHA256(),
+ length=32,
+ salt=salt,
+ iterations=iteration_count,
+ backend=openssl.backend,
+ )
+ return kdf.derive(password)
- protobuf = jobs_pb2.Password
- def _CalculateHash(self, password, salt, iteration_count):
- kdf = pbkdf2.PBKDF2HMAC(
- algorithm=hashes.SHA256(),
- length=32,
- salt=salt,
- iterations=iteration_count,
- backend=openssl.backend,
- )
- return kdf.derive(password)
+def SetPassword(proto: jobs_pb2.Password, password: str) -> None:
+ """Sets the password in the proto."""
- def SetPassword(self, password):
- self.salt = b"%016x" % random.UInt64()
- self.iteration_count = 100000
+ proto.salt = b"%016x" % random.UInt64()
+ proto.iteration_count = 100000
- # prevent non-descriptive 'key_material must be bytes' error later
- if isinstance(password, str):
- password = password.encode("utf-8")
+ # prevent non-descriptive 'key_material must be bytes' error later
+ password_bytes = password.encode("utf-8")
+
+ proto.hashed_pwd = _CalculateHash(
+ password_bytes, proto.salt, proto.iteration_count
+ )
- self.hashed_pwd = self._CalculateHash(
- password, self.salt, self.iteration_count
- )
- def CheckPassword(self, password):
- # prevent non-descriptive 'key_material must be bytes' error later
- if isinstance(password, str):
- password = password.encode("utf-8")
+def CheckPassword(proto: jobs_pb2.Password, password: str) -> bool:
+ # prevent non-descriptive 'key_material must be bytes' error later
+ password_bytes = password.encode("utf-8")
- h = self._CalculateHash(password, self.salt, self.iteration_count)
- return constant_time.bytes_eq(h, self.hashed_pwd)
+ h = _CalculateHash(password_bytes, proto.salt, proto.iteration_count)
+ return constant_time.bytes_eq(h, proto.hashed_pwd)
+
+
+class Password(rdf_structs.RDFProtoStruct):
+ """A password stored in the database."""
+
+ protobuf = jobs_pb2.Password
diff --git a/grr/core/grr_response_core/lib/rdfvalues/crypto_test.py b/grr/core/grr_response_core/lib/rdfvalues/crypto_test.py
index ee19e85e61..116200651e 100644
--- a/grr/core/grr_response_core/lib/rdfvalues/crypto_test.py
+++ b/grr/core/grr_response_core/lib/rdfvalues/crypto_test.py
@@ -16,6 +16,7 @@
from grr_response_core.lib import utils
from grr_response_core.lib.rdfvalues import crypto as rdf_crypto
from grr_response_core.lib.rdfvalues import test_base as rdf_test_base
+from grr_response_proto import jobs_pb2
from grr.test_lib import test_lib
@@ -472,18 +473,19 @@ def testExpiredTestCertificate(self):
class PasswordTest(CryptoTestBase):
def testPassword(self):
- sample = rdf_crypto.Password()
+ sample = jobs_pb2.Password()
- sample.SetPassword(b"foo")
- serialized = sample.SerializeToBytes()
+ rdf_crypto.SetPassword(sample, "foo")
+ serialized = sample.SerializeToString()
self.assertNotIn(b"foo", serialized)
- read_sample = rdf_crypto.Password.FromSerializedBytes(serialized)
+ read_sample = jobs_pb2.Password()
+ read_sample.ParseFromString(serialized)
- self.assertFalse(sample.CheckPassword(b"bar"))
- self.assertFalse(read_sample.CheckPassword(b"bar"))
- self.assertTrue(sample.CheckPassword(b"foo"))
- self.assertTrue(read_sample.CheckPassword(b"foo"))
+ self.assertFalse(rdf_crypto.CheckPassword(sample, "bar"))
+ self.assertFalse(rdf_crypto.CheckPassword(read_sample, "bar"))
+ self.assertTrue(rdf_crypto.CheckPassword(sample, "foo"))
+ self.assertTrue(rdf_crypto.CheckPassword(read_sample, "foo"))
def _Tamper(string):
diff --git a/grr/core/grr_response_core/lib/rdfvalues/mig_artifacts.py b/grr/core/grr_response_core/lib/rdfvalues/mig_artifacts.py
index 7ba0ffa6c2..0c01e92d0c 100644
--- a/grr/core/grr_response_core/lib/rdfvalues/mig_artifacts.py
+++ b/grr/core/grr_response_core/lib/rdfvalues/mig_artifacts.py
@@ -28,20 +28,6 @@ def ToRDFArtifact(proto: artifact_pb2.Artifact) -> rdf_artifacts.Artifact:
return rdf_artifacts.Artifact.FromSerializedBytes(proto.SerializeToString())
-def ToProtoArtifactProcessorDescriptor(
- rdf: rdf_artifacts.ArtifactProcessorDescriptor,
-) -> artifact_pb2.ArtifactProcessorDescriptor:
- return rdf.AsPrimitiveProto()
-
-
-def ToRDFArtifactProcessorDescriptor(
- proto: artifact_pb2.ArtifactProcessorDescriptor,
-) -> rdf_artifacts.ArtifactProcessorDescriptor:
- return rdf_artifacts.ArtifactProcessorDescriptor.FromSerializedBytes(
- proto.SerializeToString()
- )
-
-
def ToProtoArtifactDescriptor(
rdf: rdf_artifacts.ArtifactDescriptor,
) -> artifact_pb2.ArtifactDescriptor:
diff --git a/grr/core/grr_response_core/lib/rdfvalues/paths.py b/grr/core/grr_response_core/lib/rdfvalues/paths.py
index 14d9a46e64..a13cee1931 100644
--- a/grr/core/grr_response_core/lib/rdfvalues/paths.py
+++ b/grr/core/grr_response_core/lib/rdfvalues/paths.py
@@ -19,7 +19,7 @@
import itertools
import posixpath
import re
-from typing import Iterable, Sequence
+from typing import Iterable, Iterator, Optional, Sequence
from grr_response_core.lib import artifact_utils
from grr_response_core.lib import rdfvalue
@@ -333,11 +333,16 @@ def Validate(self):
if len(self.RECURSION_REGEX.findall(self._value)) > 1:
raise ValueError("Only one ** is permitted per path: %s." % self._value)
- def Interpolate(self, knowledge_base=None):
- kb = knowledge_base
- patterns = artifact_utils.InterpolateKbAttributes(self._value, kb)
-
- for pattern in patterns:
+ def Interpolate(
+ self,
+ knowledge_base: Optional[knowledge_base_pb2.KnowledgeBase] = None,
+ ) -> Iterator[str]:
+ interpolation = artifact_utils.KnowledgeBaseInterpolation(
+ pattern=self._value,
+ kb=knowledge_base or knowledge_base_pb2.KnowledgeBase(),
+ )
+
+ for pattern in interpolation.results:
# Normalize the component path (this allows us to resolve ../
# sequences).
pattern = utils.NormalizePath(pattern.replace("\\", "/"))
@@ -345,7 +350,10 @@ def Interpolate(self, knowledge_base=None):
for p in self.InterpolateGrouping(pattern):
yield p
- def InterpolateGrouping(self, pattern):
+ def InterpolateGrouping(
+ self,
+ pattern: str,
+ ) -> Iterator[str]:
"""Interpolate inline globbing groups."""
components = []
offset = 0
@@ -363,11 +371,11 @@ def InterpolateGrouping(self, pattern):
for vector in itertools.product(*components):
yield "".join(vector)
- def _ReplaceRegExGrouping(self, grouping):
+ def _ReplaceRegExGrouping(self, grouping: re.Match[str]) -> str:
alternatives = grouping.group(1).split(",")
return "(" + "|".join(re.escape(s) for s in alternatives) + ")"
- def _ReplaceRegExPart(self, part):
+ def _ReplaceRegExPart(self, part: str) -> str:
if part == "**/":
return "(?:.*\\/)?"
elif part == "*":
@@ -402,14 +410,12 @@ def ExplainComponents(
# if a GlobExpression uses %%users.a%% and %%users.b%%, the underlying
# user might be different for a and b. For the sake of explaining
# possible values, this should still be enough.
- try:
- examples = artifact_utils.InterpolateKbAttributes(
- glob_part, knowledge_base
- )
- except artifact_utils.Error:
- # Interpolation can fail for many non-critical reasons, e.g. when the
- # client is missing a KB attribute.
- examples = []
+ interpolation = artifact_utils.KnowledgeBaseInterpolation(
+ pattern=glob_part,
+ kb=knowledge_base,
+ )
+
+ examples = interpolation.results
else:
examples = []
@@ -418,7 +424,7 @@ def ExplainComponents(
return components
- def AsRegEx(self):
+ def AsRegEx(self) -> rdf_standard.RegularExpression:
"""Return the current glob as a simple regex.
Note: No interpolation is performed.
diff --git a/grr/core/grr_response_core/lib/rdfvalues/paths_test.py b/grr/core/grr_response_core/lib/rdfvalues/paths_test.py
index 0fe6de422d..eb9a0d717e 100644
--- a/grr/core/grr_response_core/lib/rdfvalues/paths_test.py
+++ b/grr/core/grr_response_core/lib/rdfvalues/paths_test.py
@@ -268,9 +268,9 @@ def testRegExIsCaseInsensitive(self):
def testGlobExpressionSplitsIntoExplainableComponents(self):
kb = knowledge_base_pb2.KnowledgeBase(
users=[
- knowledge_base_pb2.User(homedir="/home/foo"),
- knowledge_base_pb2.User(homedir="/home/bar"),
- knowledge_base_pb2.User(homedir="/home/baz"),
+ knowledge_base_pb2.User(username="foo", homedir="/home/foo"),
+ knowledge_base_pb2.User(username="bar", homedir="/home/bar"),
+ knowledge_base_pb2.User(username="baz", homedir="/home/baz"),
]
)
diff --git a/grr/core/grr_response_core/lib/rdfvalues/timeline.py b/grr/core/grr_response_core/lib/rdfvalues/timeline.py
index cb783034c3..03cf25397b 100644
--- a/grr/core/grr_response_core/lib/rdfvalues/timeline.py
+++ b/grr/core/grr_response_core/lib/rdfvalues/timeline.py
@@ -2,7 +2,7 @@
"""A module with RDF value wrappers for timeline protobufs."""
import os
-from typing import Iterator
+from typing import Iterable, Iterator
from grr_response_core.lib.rdfvalues import structs as rdf_structs
from grr_response_core.lib.util import gzchunked
@@ -73,19 +73,20 @@ def FromStatx(cls, path: bytes, stat: statx.Result) -> "TimelineEntry":
return entry
- @classmethod
- def SerializeStream(
- cls,
- entries: Iterator["TimelineEntry"],
- ) -> Iterator[bytes]:
- return gzchunked.Serialize(_.SerializeToBytes() for _ in entries)
- @classmethod
- def DeserializeStream(
- cls,
- entries: Iterator[bytes],
- ) -> Iterator["TimelineEntry"]:
- return map(cls.FromSerializedBytes, gzchunked.Deserialize(entries))
+def SerializeTimelineEntryStream(
+ entries: Iterable[timeline_pb2.TimelineEntry],
+) -> Iterator[bytes]:
+ return gzchunked.Serialize(entry.SerializeToString() for entry in entries)
+
+
+def DeserializeTimelineEntryStream(
+ entries: Iterator[bytes],
+) -> Iterator[timeline_pb2.TimelineEntry]:
+ for entry in gzchunked.Deserialize(entries):
+ parsed_entry = timeline_pb2.TimelineEntry()
+ parsed_entry.ParseFromString(entry)
+ yield parsed_entry
class TimelineProgress(rdf_structs.RDFProtoStruct):
diff --git a/grr/core/grr_response_core/lib/rdfvalues/timeline_test.py b/grr/core/grr_response_core/lib/rdfvalues/timeline_test.py
index 0bea132e79..b275a75575 100644
--- a/grr/core/grr_response_core/lib/rdfvalues/timeline_test.py
+++ b/grr/core/grr_response_core/lib/rdfvalues/timeline_test.py
@@ -10,6 +10,7 @@
from grr_response_core.lib.rdfvalues import timeline as rdf_timeline
from grr_response_core.lib.util import statx
from grr_response_core.lib.util import temp
+from grr_response_proto import timeline_pb2
class TimelineEntryTest(absltest.TestCase):
@@ -70,11 +71,11 @@ def testFromStatx(self):
self.assertEqual(entry.mtime_ns, statx_result.mtime_ns)
def testSerializeAndDeserializeStream(self):
- serialize = rdf_timeline.TimelineEntry.SerializeStream
- deserialize = rdf_timeline.TimelineEntry.DeserializeStream
+ serialize = rdf_timeline.SerializeTimelineEntryStream
+ deserialize = rdf_timeline.DeserializeTimelineEntryStream
- def RandomEntry() -> rdf_timeline.TimelineEntry:
- entry = rdf_timeline.TimelineEntry()
+ def RandomEntry() -> timeline_pb2.TimelineEntry:
+ entry = timeline_pb2.TimelineEntry()
entry.path = os.urandom(4096)
entry.mode = random.randint(0x0000, 0xFFFF - 1)
entry.size = random.randint(0, 1e9)
@@ -82,7 +83,7 @@ def RandomEntry() -> rdf_timeline.TimelineEntry:
entries = [RandomEntry() for _ in range(3000)]
- self.assertEqual(list(deserialize(serialize(iter(entries)))), entries)
+ self.assertEqual(list(deserialize(serialize(entries))), entries)
if __name__ == "__main__":
diff --git a/grr/core/grr_response_core/lib/registry.py b/grr/core/grr_response_core/lib/registry.py
index b014b22994..8ed73a2380 100644
--- a/grr/core/grr_response_core/lib/registry.py
+++ b/grr/core/grr_response_core/lib/registry.py
@@ -41,6 +41,9 @@ def IsAbstract(cls):
cls, abstract_attribute
)
+ def IsDeprecated(cls):
+ return hasattr(cls, "deprecated")
+
def __init__(cls, name, bases, env_dict):
abc.ABCMeta.__init__(cls, name, bases, env_dict)
@@ -124,18 +127,25 @@ class FlowRegistry(MetaclassRegistry):
"""A dedicated registry that only contains new style flows."""
FLOW_REGISTRY = {}
+ DEPRECATED_FLOWS = {}
def __init__(cls, name, bases, env_dict):
MetaclassRegistry.__init__(cls, name, bases, env_dict)
- if not cls.IsAbstract():
+ if cls.IsAbstract():
+ pass
+ elif cls.IsDeprecated():
+ cls.DEPRECATED_FLOWS[name] = cls
+ else:
cls.FLOW_REGISTRY[name] = cls
@classmethod
def FlowClassByName(mcs, flow_name):
flow_cls = mcs.FLOW_REGISTRY.get(flow_name)
if flow_cls is None:
- raise ValueError("Flow '%s' not known." % flow_name)
+ flow_cls = mcs.DEPRECATED_FLOWS.get(flow_name)
+ if flow_cls is None:
+ raise ValueError("Flow '%s' not known." % flow_name)
return flow_cls
diff --git a/grr/proto/grr_response_proto/api/config.proto b/grr/proto/grr_response_proto/api/config.proto
index 75aab2379f..e40c886c59 100644
--- a/grr/proto/grr_response_proto/api/config.proto
+++ b/grr/proto/grr_response_proto/api/config.proto
@@ -85,6 +85,7 @@ message ApiGetGrrBinaryBlobArgs {
optional string path = 2 [(sem_type) = { description: "Binary path." }];
}
+// Next: 11
message ApiUiConfig {
optional string heading = 1;
optional string report_url = 2;
@@ -92,6 +93,7 @@ message ApiUiConfig {
optional string grr_version = 4;
optional string profile_image_url = 5;
optional HuntRunnerArgs default_hunt_runner_args = 6;
+ optional AdminUIHuntConfig hunt_config = 10;
optional AdminUIClientWarningsConfigOption client_warnings = 7;
optional uint64 default_access_duration_seconds = 8;
optional uint64 max_access_duration_seconds = 9;
diff --git a/grr/proto/grr_response_proto/api/flow.proto b/grr/proto/grr_response_proto/api/flow.proto
index 805f6c6b5a..f7aa91affa 100644
--- a/grr/proto/grr_response_proto/api/flow.proto
+++ b/grr/proto/grr_response_proto/api/flow.proto
@@ -125,8 +125,7 @@ message ApiFlowResult {
description: "Result payload.",
dynamic_type: "GetPayloadClass"
}];
- optional string payload_type = 2
- [(sem_type) = { description: "Type of the payload." }];
+ reserved 2;
optional uint64 timestamp = 3 [(sem_type) = {
type: "RDFDatetime",
description: "Timestamp indicating when result was written to the data "
diff --git a/grr/proto/grr_response_proto/api/hunt.proto b/grr/proto/grr_response_proto/api/hunt.proto
index ad3cd1ce38..4ce99ea1bf 100644
--- a/grr/proto/grr_response_proto/api/hunt.proto
+++ b/grr/proto/grr_response_proto/api/hunt.proto
@@ -170,8 +170,8 @@ message ApiHuntResult {
description: "Result payload.",
dynamic_type: "GetPayloadClass"
}];
- optional string payload_type = 4
- [(sem_type) = { description: "Type of the payload." }];
+ reserved 4;
+
optional uint64 timestamp = 5 [(sem_type) = {
type: "RDFDatetime",
description: "Timestamp indicating when result was written to the data "
diff --git a/grr/proto/grr_response_proto/api/root/user_management.proto b/grr/proto/grr_response_proto/api/root/user_management.proto
index de1293867c..f213cefd49 100644
--- a/grr/proto/grr_response_proto/api/root/user_management.proto
+++ b/grr/proto/grr_response_proto/api/root/user_management.proto
@@ -10,7 +10,8 @@ package grr;
message ApiCreateGrrUserArgs {
optional string username = 1;
optional ApiGrrUser.UserType user_type = 2;
- optional string password = 3;
+ optional string password = 3
+ ;
optional string email = 4;
}
@@ -21,7 +22,8 @@ message ApiDeleteGrrUserArgs {
message ApiModifyGrrUserArgs {
optional string username = 1;
optional ApiGrrUser.UserType user_type = 2;
- optional string password = 3;
+ optional string password = 3
+ ;
optional string email = 4;
}
diff --git a/grr/proto/grr_response_proto/artifact.proto b/grr/proto/grr_response_proto/artifact.proto
index eb34513ab0..2f126fdce2 100644
--- a/grr/proto/grr_response_proto/artifact.proto
+++ b/grr/proto/grr_response_proto/artifact.proto
@@ -25,7 +25,7 @@ message ArtifactSource {
GRR_CLIENT_ACTION = 40;
LIST_FILES = 41;
ARTIFACT_FILES = 42;
- GREP = 43;
+ reserved 43;
COMMAND = 45;
REKALL_PLUGIN = 46;
@@ -105,16 +105,6 @@ message Artifact {
}];
}
-message ArtifactProcessorDescriptor {
- optional string name = 1
- [(sem_type) = { description: "Processor's name as registered in GRR." }];
- optional string description = 2
- [(sem_type) = { description: "Description of this processor." }];
- repeated string output_types = 3 [(sem_type) = {
- description: "The semantic types that can be produced by the processor."
- }];
-}
-
message ArtifactDescriptor {
optional Artifact artifact = 1
[(sem_type) = { description: "Artifact itself." }];
@@ -124,9 +114,7 @@ message ArtifactDescriptor {
repeated string path_dependencies = 3 [(sem_type) = {
description: "Names of KB objects this artifact depends on."
}];
- repeated ArtifactProcessorDescriptor processors = 5 [(sem_type) = {
- description: "Processors that will process this artifact's output."
- }];
+ reserved 5;
optional bool is_custom = 6 [(sem_type) = {
description: "If True, this artifact was manually uploaded by the user."
}];
diff --git a/grr/proto/grr_response_proto/config.proto b/grr/proto/grr_response_proto/config.proto
index 96343237cf..294298e2ae 100644
--- a/grr/proto/grr_response_proto/config.proto
+++ b/grr/proto/grr_response_proto/config.proto
@@ -17,3 +17,19 @@ message AdminUIClientWarningRule {
(sem_type) = { description: "Warning message text (may contain markdown)." }
];
}
+
+message AdminUIHuntConfig {
+ repeated string default_include_labels = 1 [(sem_type) = {
+ description: "List of labels to be included in a hunt by default."
+ }];
+ repeated string default_exclude_labels = 2 [(sem_type) = {
+ description: "List of labels to be excluded from a hunt by default."
+ }];
+ optional bool make_default_exclude_labels_a_presubmit_check = 3
+ [(sem_type) = {
+ description: "Whether to make default exclude labels a presubmit check."
+ }];
+ optional string presubmit_warning_message = 4 [(sem_type) = {
+ description: "Warning message text to be shown to users when `exclude_labels` are included in a hunt (may contain markdown)."
+ }];
+}
diff --git a/grr/proto/grr_response_proto/deprecated.proto b/grr/proto/grr_response_proto/deprecated.proto
index d210cbf538..dea847a614 100644
--- a/grr/proto/grr_response_proto/deprecated.proto
+++ b/grr/proto/grr_response_proto/deprecated.proto
@@ -1434,3 +1434,9 @@ message ApiListParsedFlowResultsResult {
// one can succeed.
repeated string errors = 2;
}
+
+message ArtifactProcessorDescriptor {
+ optional string name = 1;
+ optional string description = 2;
+ repeated string output_types = 3;
+}
diff --git a/grr/proto/grr_response_proto/flows.proto b/grr/proto/grr_response_proto/flows.proto
index 7a41155fbc..bc8e4c2d6b 100644
--- a/grr/proto/grr_response_proto/flows.proto
+++ b/grr/proto/grr_response_proto/flows.proto
@@ -618,14 +618,7 @@ message ArtifactCollectorFlowArgs {
default = false
];
- optional bool apply_parsers = 8 [
- (sem_type) = {
- description: "If True, apply any relevant parser to the collected data. "
- "If False, return the raw collected data e.g Files or Registry Keys.",
- label: ADVANCED,
- },
- default = true
- ];
+ reserved 8;
optional uint64 max_file_size = 9 [
(sem_type) = {
diff --git a/grr/proto/grr_response_proto/knowledge_base.proto b/grr/proto/grr_response_proto/knowledge_base.proto
index d9f60bc2a1..274d76cc9e 100644
--- a/grr/proto/grr_response_proto/knowledge_base.proto
+++ b/grr/proto/grr_response_proto/knowledge_base.proto
@@ -203,7 +203,7 @@ message User {
}];
}
-// Next ID: 40
+// Next ID: 41
message KnowledgeBase {
repeated User users = 32;
@@ -342,4 +342,5 @@ message KnowledgeBase {
repeated bytes DEPRECATED_users = 1;
reserved 39;
+ reserved 40;
}
diff --git a/grr/proto/grr_response_proto/large_file.proto b/grr/proto/grr_response_proto/large_file.proto
index 0416670d02..12b428ce33 100644
--- a/grr/proto/grr_response_proto/large_file.proto
+++ b/grr/proto/grr_response_proto/large_file.proto
@@ -29,7 +29,8 @@ message CollectLargeFileArgs {
// [1]: https://en.wikipedia.org/wiki/Authenticated_encryption
// [2]: https://en.wikipedia.org/wiki/Advanced_Encryption_Standard
// [3]: https://en.wikipedia.org/wiki/Galois/Counter_Mode
- optional bytes encryption_key = 3;
+ optional bytes encryption_key = 3
+ ;
}
// Output that the large file collection action returns upon initialization.
diff --git a/grr/proto/grr_response_proto/rrg.proto b/grr/proto/grr_response_proto/rrg.proto
index 5b7828005e..ce1f093bae 100644
--- a/grr/proto/grr_response_proto/rrg.proto
+++ b/grr/proto/grr_response_proto/rrg.proto
@@ -38,6 +38,8 @@ enum Action {
LIST_INTERFACES = 11;
// List filesystem mounts available on the system.
LIST_MOUNTS = 12;
+ // Get a value from the Windows Registry (Windows-only).
+ GET_WINREG_VALUE = 13;
// TODO: Define more actions that should be supported.
diff --git a/grr/proto/grr_response_proto/rrg/action/get_winreg_value.proto b/grr/proto/grr_response_proto/rrg/action/get_winreg_value.proto
new file mode 100644
index 0000000000..1ba46aa969
--- /dev/null
+++ b/grr/proto/grr_response_proto/rrg/action/get_winreg_value.proto
@@ -0,0 +1,31 @@
+// Copyright 2024 Google LLC
+//
+// Use of this source code is governed by an MIT-style license that can be found
+// in the LICENSE file or at https://opensource.org/licenses/MIT.
+syntax = "proto3";
+
+package rrg.action.get_winreg_value;
+
+import "grr_response_proto/rrg/winreg.proto";
+
+message Args {
+ // Root predefined key of the value to get.
+ rrg.winreg.PredefinedKey root = 1;
+
+ // Key relative to `root` of the value to get (e.g. `SOFTWARE\Microsoft`).
+ string key = 2;
+
+ // Name of the value to get.
+ string name = 3;
+}
+
+message Result {
+ // Root predefined key of the retrieved value.
+ rrg.winreg.PredefinedKey root = 1;
+
+ // Key relative to `root` of the retrieved value.
+ string key = 2;
+
+ // Retrieved value.
+ rrg.winreg.Value value = 3;
+}
diff --git a/grr/proto/grr_response_proto/rrg/winreg.proto b/grr/proto/grr_response_proto/rrg/winreg.proto
new file mode 100644
index 0000000000..df8cd82b77
--- /dev/null
+++ b/grr/proto/grr_response_proto/rrg/winreg.proto
@@ -0,0 +1,59 @@
+// Copyright 2024 Google LLC
+//
+// Use of this source code is governed by an MIT-style license that can be found
+// in the LICENSE file or at https://opensource.org/licenses/MIT.
+syntax = "proto3";
+
+package rrg.winreg;
+
+// [Predefined key][1] of the Windows registry.
+//
+// Note that the integer representation **does not** correspond to the `HKEY_*`
+// constants as defined in the [`winreg.h`] header (they are out of the allowed
+// range for Protocol Buffer enums).
+//
+// [1]: https://learn.microsoft.com/en-us/windows/win32/sysinfo/predefined-keys
+enum PredefinedKey {
+ UNKNOWN = 0;
+ CLASSES_ROOT = 1;
+ CURRENT_USER = 2;
+ LOCAL_MACHINE = 3;
+ USERS = 4;
+ PERFORMANCE_DATA = 5;
+ CURRENT_CONFIG = 6;
+ PERFORMANCE_TEXT = 7;
+ PERFORMANCE_NLSTEXT = 8;
+ CURRENT_USER_LOCAL_SETTINGS = 9;
+}
+
+// [Value][1] of the Windows registry.
+//
+// [1]: https://learn.microsoft.com/en-us/windows/win32/sysinfo/registry-value-types
+message Value {
+ // Name of the value.
+ string name = 1;
+
+ // Data associated with the value.
+ oneof data {
+ // Byte string.
+ bytes bytes = 2;
+ // Unicode string.
+ string string = 3;
+ // Unicode string with unexpanded references to environment variables.
+ string expand_string = 4;
+ // Sequence of unicode strings.
+ StringList multi_string = 5;
+ // Symbolic link to another registry key.
+ string link = 6;
+ // 32-bit number.
+ uint32 uint32 = 7;
+ // 64-bit number.
+ uint64 uint64 = 8;
+ }
+
+ // Wrapper for list of strings to be used in `oneof` fields.
+ message StringList {
+ // Actual list of strings.
+ repeated string values = 1;
+ }
+}
diff --git a/grr/server/grr_response_server/artifact.py b/grr/server/grr_response_server/artifact.py
index 8bec2a2398..e2d0dcec9b 100644
--- a/grr/server/grr_response_server/artifact.py
+++ b/grr/server/grr_response_server/artifact.py
@@ -6,14 +6,8 @@
import pathlib
import re
import stat
-from typing import Iterable
-from typing import Iterator
-from typing import List
-from typing import Optional
-from typing import Sequence
from grr_response_core.lib import artifact_utils
-from grr_response_core.lib import parsers
from grr_response_core.lib import rdfvalue
from grr_response_core.lib import utils
from grr_response_core.lib.rdfvalues import client as rdf_client
@@ -21,17 +15,20 @@
from grr_response_core.lib.rdfvalues import client_fs as rdf_client_fs
from grr_response_core.lib.rdfvalues import file_finder as rdf_file_finder
from grr_response_core.lib.rdfvalues import mig_artifacts
+from grr_response_core.lib.rdfvalues import mig_client
+from grr_response_core.lib.rdfvalues import mig_client_action
+from grr_response_core.lib.rdfvalues import mig_client_fs
from grr_response_core.lib.rdfvalues import paths as rdf_paths
from grr_response_core.lib.rdfvalues import protodict as rdf_protodict
from grr_response_core.lib.rdfvalues import structs as rdf_structs
from grr_response_proto import flows_pb2
+from grr_response_proto import jobs_pb2
+from grr_response_proto import knowledge_base_pb2
from grr_response_server import artifact_registry
from grr_response_server import data_store
-from grr_response_server import file_store
from grr_response_server import flow_base
from grr_response_server import flow_responses
from grr_response_server import server_stubs
-from grr_response_server.databases import db
from grr_response_server.flows.general import distro
@@ -196,6 +193,48 @@ def Start(self):
next_state=self._ProcessWindowsTimeZoneKeyName.__name__,
)
+ args.pathspec.path = r"HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Session Manager\Environment\TEMP"
+ self.CallClient(
+ server_stubs.GetFileStat,
+ args,
+ next_state=self._ProcessWindowsEnvTemp.__name__,
+ )
+
+ args.pathspec.path = r"HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Session Manager\Environment\Path"
+ self.CallClient(
+ server_stubs.GetFileStat,
+ args,
+ next_state=self._ProcessWindowsEnvPath.__name__,
+ )
+
+ args.pathspec.path = r"HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Control\Session Manager\Environment\ComSpec"
+ self.CallClient(
+ server_stubs.GetFileStat,
+ args,
+ next_state=self._ProcessWindowsEnvComSpec.__name__,
+ )
+
+ args.pathspec.path = r"HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Session Manager\Environment\windir"
+ self.CallClient(
+ server_stubs.GetFileStat,
+ args,
+ next_state=self._ProcessWindowsEnvWindir.__name__,
+ )
+
+ args.pathspec.path = r"HKEY_LOCAL_MACHINE\Software\Microsoft\Windows NT\CurrentVersion\ProfileList\ProfilesDirectory"
+ self.CallClient(
+ server_stubs.GetFileStat,
+ args,
+ next_state=self._ProcessWindowsProfilesDirectory.__name__,
+ )
+
+ args.pathspec.path = r"HKEY_LOCAL_MACHINE\Software\Microsoft\Windows NT\CurrentVersion\ProfileList\AllUsersProfile"
+ self.CallClient(
+ server_stubs.GetFileStat,
+ args,
+ next_state=self._ProcessWindowsEnvAllUsersProfile.__name__,
+ )
+
args = rdf_file_finder.FileFinderArgs()
# TODO: There is no dedicated action for obtaining registry
# values but `STAT` action of the file-finder will get it. This should be
@@ -295,58 +334,15 @@ def _ProcessWindowsEnvSystemRoot(
self.state.knowledge_base.environ_systemroot = system_root
self.state.knowledge_base.environ_systemdrive = system_drive
- # pylint: disable=line-too-long
- # pyformat: disable
- #
- # TODO: The following values depend on `SystemRoot` so we have
- # to schedule its collection after we have root. However, this requires
- # intrinsic knowledge and is not much better than just hardcoding them.
- # Instead, we should collect all variables as they are and then do the
- # interpolation without hardcoding the dependencies.
- #
- # TODO: There is no dedicated action for obtaining registry
- # values. The existing artifact collector uses `GetFileStat` action for
- # this which is horrible.
- args = rdf_client_action.GetFileStatRequest()
- args.pathspec.pathtype = rdf_paths.PathSpec.PathType.REGISTRY
-
- args.pathspec.path = r"HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Session Manager\Environment\TEMP"
- self.CallClient(
- server_stubs.GetFileStat,
- args,
- next_state=self._ProcessWindowsEnvTemp.__name__,
- )
-
- args.pathspec.path = r"HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Session Manager\Environment\Path"
- self.CallClient(
- server_stubs.GetFileStat,
- args,
- next_state=self._ProcessWindowsEnvPath.__name__,
- )
-
- args.pathspec.path = r"HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Control\Session Manager\Environment\ComSpec"
+ list_users_dir_args = jobs_pb2.ListDirRequest()
+ list_users_dir_args.pathspec.pathtype = jobs_pb2.PathSpec.PathType.OS
+ list_users_dir_args.pathspec.path = f"{system_drive}\\Users"
self.CallClient(
- server_stubs.GetFileStat,
- args,
- next_state=self._ProcessWindowsEnvComSpec.__name__,
- )
-
- args.pathspec.path = r"HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Session Manager\Environment\windir"
- self.CallClient(
- server_stubs.GetFileStat,
- args,
- next_state=self._ProcessWindowsEnvWindir.__name__,
+ server_stubs.ListDirectory,
+ mig_client_action.ToRDFListDirRequest(list_users_dir_args),
+ next_state=self._ProcessWindowsListUsersDir.__name__,
)
- args.pathspec.path = r"HKEY_LOCAL_MACHINE\Software\Microsoft\Windows NT\CurrentVersion\ProfileList\ProfilesDirectory"
- self.CallClient(
- server_stubs.GetFileStat,
- args,
- next_state=self._ProcessWindowsProfilesDirectory.__name__,
- )
- # pylint: enable=line-too-long
- # pyformat: enable
-
def _ProcessWindowsEnvProgramFilesDir(
self,
responses: flow_responses.Responses[rdfvalue.RDFValue],
@@ -450,47 +446,10 @@ def _ProcessWindowsEnvProgramData(
message = f"Unexpected response type: {type(response)}"
raise flow_base.FlowError(message)
- program_data = response.registry_data.string
- # TODO: We should not hardcode the dependency on `%SystemRoot%`
- # and do an interpolation pass once all variables are there.
- program_data = artifact_utils.ExpandWindowsEnvironmentVariables(
- program_data,
- self.state.knowledge_base,
+ self.state.knowledge_base.environ_programdata = (
+ response.registry_data.string
)
- self.state.knowledge_base.environ_programdata = program_data
- # TODO: Remove once this knowledge base field is removed.
- self.state.knowledge_base.environ_allusersappdata = program_data
-
- # pylint: disable=line-too-long
- # pyformat: disable
- #
- # Interestingly, it looks like there is no such value in the registry on
- # Windows 10. But the original artifact uses this path and there are other
- # websites stating that it should be there [1, 2] we try this anyway.
- #
- # According to Wikipedia [3] this value since Windows Vista is deprecated in
- # favour of `%PRORGAMDATA%` so e fallback to that in case we cannot retrieve
- # it.
- #
- # [1]: https://renenyffenegger.ch/notes/Windows/dirs/ProgramData/index
- # [2]: https://winreg-kb.readthedocs.io/en/latest/sources/system-keys/Environment-variables.html#currentversion-profilelist-key
- # [3]: https://en.wikipedia.org/wiki/Environment_variable#ALLUSERSPROFILE
- #
- # TODO: There is no dedicated action for obtaining registry
- # values. The existing artifact collector uses `GetFileStat` action for
- # this which is horrible.
- args = rdf_client_action.GetFileStatRequest()
- args.pathspec.pathtype = rdf_paths.PathSpec.PathType.REGISTRY
- args.pathspec.path = r"HKEY_LOCAL_MACHINE\Software\Microsoft\Windows NT\CurrentVersion\ProfileList\AllUsersProfile"
- self.CallClient(
- server_stubs.GetFileStat,
- args,
- next_state=self._ProcessWindowsEnvAllUsersProfile.__name__,
- )
- # pylint: enable=line-too-long
- # pyformat: enable
-
def _ProcessWindowsEnvDriverData(
self,
responses: flow_responses.Responses[rdfvalue.RDFValue],
@@ -681,15 +640,7 @@ def _ProcessWindowsEnvTemp(
message = f"Unexpected response type: {type(response)}"
raise flow_base.FlowError(message)
- temp = response.registry_data.string
- # TODO: We should not hardcode the dependency of `TEMP` on
- # `SystemRoot` and do an interpolation pass once all variables are there.
- temp = artifact_utils.ExpandWindowsEnvironmentVariables(
- temp,
- self.state.knowledge_base,
- )
-
- self.state.knowledge_base.environ_temp = temp
+ self.state.knowledge_base.environ_temp = response.registry_data.string
def _ProcessWindowsEnvPath(
self,
@@ -708,15 +659,7 @@ def _ProcessWindowsEnvPath(
message = f"Unexpected response type: {type(response)}"
raise flow_base.FlowError(message)
- path = response.registry_data.string
- # TODO: We should not hardcode the dependency of `Path` on
- # `SystemRoot` and do an interpolation pass once all variables are there.
- path = artifact_utils.ExpandWindowsEnvironmentVariables(
- path,
- self.state.knowledge_base,
- )
-
- self.state.knowledge_base.environ_path = path
+ self.state.knowledge_base.environ_path = response.registry_data.string
def _ProcessWindowsEnvComSpec(
self,
@@ -735,15 +678,7 @@ def _ProcessWindowsEnvComSpec(
message = f"Unexpected response type: {type(response)}"
raise flow_base.FlowError(message)
- com_spec = response.registry_data.string
- # TODO: We should not hardcode the dependency of `ComSpec` on
- # `SystemRoot` and do an interpolation pass once all variables are there.
- com_spec = artifact_utils.ExpandWindowsEnvironmentVariables(
- com_spec,
- self.state.knowledge_base,
- )
-
- self.state.knowledge_base.environ_comspec = com_spec
+ self.state.knowledge_base.environ_comspec = response.registry_data.string
def _ProcessWindowsEnvWindir(
self,
@@ -762,15 +697,7 @@ def _ProcessWindowsEnvWindir(
message = f"Unexpected response type: {type(response)}"
raise flow_base.FlowError(message)
- windir = response.registry_data.string
- # TODO: We should not hardcode the dependency of `windir` on
- # `SystemRoot` and do an interpolation pass once all variables are there.
- windir = artifact_utils.ExpandWindowsEnvironmentVariables(
- windir,
- self.state.knowledge_base,
- )
-
- self.state.knowledge_base.environ_windir = windir
+ self.state.knowledge_base.environ_windir = response.registry_data.string
def _ProcessWindowsProfilesDirectory(
self,
@@ -789,47 +716,34 @@ def _ProcessWindowsProfilesDirectory(
message = f"Unexpected response type: {type(response)}"
raise flow_base.FlowError(message)
- profiles_directory = response.registry_data.string
- # TODO: We should not hardcode the dependency on `SystemDrive`
- # and do an interpolation pass once all variables are there.
- profiles_directory = artifact_utils.ExpandWindowsEnvironmentVariables(
- profiles_directory,
- self.state.knowledge_base,
+ self.state.knowledge_base.environ_profilesdirectory = (
+ response.registry_data.string
)
- self.state.knowledge_base.environ_profilesdirectory = profiles_directory
-
def _ProcessWindowsEnvAllUsersProfile(
self,
responses: flow_responses.Responses[rdfvalue.RDFValue],
) -> None:
- if responses.success:
- if len(responses) != 1:
- message = f"Unexpected number of responses: {len(responses)}"
- raise flow_base.FlowError(message)
-
- response = responses.First()
- if not isinstance(response, rdf_client_fs.StatEntry):
- message = f"Unexpected response type: {type(response)}"
- raise flow_base.FlowError(message)
-
- allusersprofile = response.registry_data.string
- else:
+ if not responses.success:
# Since Windows Vista `%PROGRAMDATA%` superseded `%ALLUSERSPROFILE%` [1],
- # so we fall back to that in case we cannot obtain it (which is expected
- # on most modern machines and thus we don't even log an error).
+ # so we actually expect this call to fail most of the time. Thus, we don't
+ # log anything or raise any errors.
#
# [1]: https://en.wikipedia.org/wiki/Environment_variable#ALLUSERSPROFILE
- allusersprofile = self.state.knowledge_base.environ_programdata
+ return
- # TODO: We should not hardcode dependency on `%ProgramData%`
- # and do an interpolation pass once all variables are there.
- allusersprofile = artifact_utils.ExpandWindowsEnvironmentVariables(
- allusersprofile,
- self.state.knowledge_base,
- )
+ if len(responses) != 1:
+ message = f"Unexpected number of responses: {len(responses)}"
+ raise flow_base.FlowError(message)
- self.state.knowledge_base.environ_allusersprofile = allusersprofile
+ response = responses.First()
+ if not isinstance(response, rdf_client_fs.StatEntry):
+ message = f"Unexpected response type: {type(response)}"
+ raise flow_base.FlowError(message)
+
+ self.state.knowledge_base.environ_allusersprofile = (
+ response.registry_data.string
+ )
def _ProcessWindowsProfiles(
self,
@@ -1014,10 +928,60 @@ def _ProcessWindowsWMIUserAccounts(
user.userdomain = domain
+ def _ProcessWindowsListUsersDir(
+ self,
+ responses: flow_responses.Responses[rdfvalue.RDFValue],
+ ) -> None:
+ if not responses.success:
+ self.Log("Failed to list Windows `Users` directory: %s", responses.status)
+ return
+
+ for response in responses:
+ if not isinstance(response, rdf_client_fs.StatEntry):
+ raise flow_base.FlowError(f"Unexpected response type: {type(response)}")
+
+ response = mig_client_fs.ToProtoStatEntry(response)
+
+ # There can be random files there as well. We are interested exclusively
+ # in folders as file does not indicate user profile.
+ if not stat.S_ISDIR(response.st_mode):
+ continue
+
+ # TODO: Remove once the `ListDirectory` action is fixed not
+ # to yield results with leading slashes on Windows.
+ response.pathspec.path = response.pathspec.path.removeprefix("/")
+
+ path = pathlib.PureWindowsPath(response.pathspec.path)
+
+ # There are certain profiles there that are not real "users" active on the
+ # machine and so we should not report them as such.
+ if path.name.upper() in [
+ "ADMINISTRATOR",
+ "ALL USERS",
+ "DEFAULT",
+ "DEFAULT USER",
+ "DEFAULTUSER0",
+ "PUBLIC",
+ ]:
+ continue
+
+ user = knowledge_base_pb2.User()
+ user.username = path.name
+ user.homedir = str(path)
+
+ self.state.knowledge_base.MergeOrAddUser(mig_client.ToRDFUser(user))
+
def End(self, responses):
"""Finish up."""
del responses
+ if self.client_os == "Windows":
+ self.state.knowledge_base = mig_client.ToRDFKnowledgeBase(
+ artifact_utils.ExpandKnowledgebaseWindowsEnvVars(
+ mig_client.ToProtoKnowledgeBase(self.state.knowledge_base),
+ ),
+ )
+
# TODO: `%LOCALAPPDATA%` is a very often used variable that we
# potentially not collect due to limitations of the Windows registry. For
# now, in case we did not collect it, we set it to the default Windows value
@@ -1061,194 +1025,6 @@ def InitializeKnowledgeBase(self):
pass
-class ParseResults(object):
- """A class representing results of parsing flow responses."""
-
- def __init__(self):
- self._responses: List[rdfvalue.RDFValue] = []
- self._errors: List[parsers.ParseError] = []
-
- def AddResponses(self, responses: Iterator[rdfvalue.RDFValue]) -> None:
- self._responses.extend(responses)
-
- def AddError(self, error: parsers.ParseError) -> None:
- self._errors.append(error)
-
- def Responses(self) -> Iterator[rdfvalue.RDFValue]:
- return iter(self._responses)
-
- def Errors(self) -> Iterator[parsers.ParseError]:
- return iter(self._errors)
-
-
-class ParserApplicator(object):
- """An utility class for applying many parsers to responses."""
-
- def __init__(
- self,
- factory: parsers.ArtifactParserFactory,
- client_id: str,
- knowledge_base: rdf_client.KnowledgeBase,
- timestamp: Optional[rdfvalue.RDFDatetime] = None,
- ):
- """Initializes the applicator.
-
- Args:
- factory: A parser factory that produces parsers to apply.
- client_id: An identifier of the client for which the responses were
- collected.
- knowledge_base: A knowledge base of the client from which the responses
- were collected.
- timestamp: An optional timestamp at which parsers should interpret the
- results. For example, parsers that depend on files, will receive content
- of files as it was at the given timestamp.
- """
- self._factory = factory
- self._client_id = client_id
- self._knowledge_base = knowledge_base
- self._timestamp = timestamp
- self._results = ParseResults()
-
- def Apply(self, responses: Sequence[rdfvalue.RDFValue]):
- """Applies all known parsers to the specified responses.
-
- Args:
- responses: A sequence of responses to apply the parsers to.
- """
- for response in responses:
- self._ApplySingleResponse(response)
-
- self._ApplyMultiResponse(responses)
-
- # File parsers accept only stat responses. It might be possible that an
- # artifact declares multiple sources and has multiple parsers attached (each
- # for different kind of source). Thus, artifacts are not "well typed" now
- # we must supply parsers only with something they support.
- stat_responses: List[rdf_client_fs.StatEntry] = []
- for response in responses:
- if isinstance(response, rdf_client_fs.StatEntry):
- stat_responses.append(response)
-
- has_single_file_parsers = self._factory.HasSingleFileParsers()
- has_multi_file_parsers = self._factory.HasMultiFileParsers()
-
- if has_single_file_parsers or has_multi_file_parsers:
- pathspecs = [response.pathspec for response in stat_responses]
- # It might be also the case that artifact has both regular response parser
- # and file parser attached and sources that don't collect files but yield
- # stat entries.
- #
- # TODO(hanuszczak): This is a quick workaround that works for now, but
- # can lead to spurious file being parsed if the file was collected in the
- # past and now only a stat entry response came. A proper solution would be
- # to tag responses with artifact source and then make parsers define what
- # sources they support.
- pathspecs = list(filter(self._HasFile, pathspecs))
- filedescs = [self._OpenFile(pathspec) for pathspec in pathspecs]
-
- for pathspec, filedesc in zip(pathspecs, filedescs):
- self._ApplySingleFile(pathspec, filedesc)
-
- self._ApplyMultiFile(pathspecs, filedescs)
-
- def Responses(self) -> Iterator[rdfvalue.RDFValue]:
- """Returns an iterator over all parsed responses."""
- yield from self._results.Responses()
-
- def Errors(self) -> Iterator[parsers.ParseError]:
- """Returns an iterator over errors that occurred during parsing."""
- yield from self._results.Errors()
-
- def _ApplySingleResponse(
- self,
- response: rdfvalue.RDFValue,
- ) -> None:
- """Applies all single-response parsers to the given response."""
- for parser in self._factory.SingleResponseParsers():
- try:
- results = parser.ParseResponse(self._knowledge_base, response)
- self._results.AddResponses(results)
- except parsers.ParseError as error:
- self._results.AddError(error)
-
- def _ApplyMultiResponse(
- self,
- responses: Iterable[rdfvalue.RDFValue],
- ) -> None:
- """Applies all multi-response parsers to the given responses."""
- for parser in self._factory.MultiResponseParsers():
- try:
- results = parser.ParseResponses(self._knowledge_base, responses)
- self._results.AddResponses(results)
- except parsers.ParseError as error:
- self._results.AddError(error)
-
- def _ApplySingleFile(
- self,
- pathspec: rdf_paths.PathSpec,
- filedesc: file_store.BlobStream,
- ) -> None:
- """Applies all single-file parsers to the given file."""
- for parser in self._factory.SingleFileParsers():
- try:
- results = parser.ParseFile(self._knowledge_base, pathspec, filedesc)
- self._results.AddResponses(results)
- except parsers.ParseError as error:
- self._results.AddError(error)
-
- def _ApplyMultiFile(
- self,
- pathspecs: Iterable[rdf_paths.PathSpec],
- filedescs: Iterable[file_store.BlobStream],
- ) -> None:
- """Applies all multi-file parsers to the given file."""
- for parser in self._factory.MultiFileParsers():
- try:
- results = parser.ParseFiles(self._knowledge_base, pathspecs, filedescs)
- self._results.AddResponses(results)
- except parsers.ParseError as error:
- self._results.AddError(error)
-
- def _HasFile(self, pathspec: rdf_paths.PathSpec) -> bool:
- """Checks whether any file for the given pathspec was ever collected."""
- client_path = db.ClientPath.FromPathSpec(self._client_id, pathspec)
- return file_store.GetLastCollectionPathInfo(client_path) is not None
-
- def _OpenFile(self, pathspec: rdf_paths.PathSpec) -> file_store.BlobStream:
- # TODO(amoser): This is not super efficient, AFF4 provided an api to open
- # all pathspecs at the same time, investigate if optimizing this is worth
- # it.
- client_path = db.ClientPath.FromPathSpec(self._client_id, pathspec)
- return file_store.OpenFile(client_path, max_timestamp=self._timestamp)
-
-
-def ApplyParsersToResponses(parser_factory, responses, flow_obj):
- """Parse responses with applicable parsers.
-
- Args:
- parser_factory: A parser factory for specific artifact.
- responses: A list of responses from the client.
- flow_obj: An artifact collection flow.
-
- Returns:
- A list of (possibly parsed) responses.
- """
- if not parser_factory.HasParsers():
- # If we don't have any parsers, we expect to use the unparsed responses.
- return responses
-
- knowledge_base = flow_obj.state.knowledge_base
- client_id = flow_obj.client_id
-
- applicator = ParserApplicator(parser_factory, client_id, knowledge_base)
- applicator.Apply(responses)
-
- for error in applicator.Errors():
- flow_obj.Log("Error encountered when parsing responses: %s", error)
-
- return list(applicator.Responses())
-
-
def UploadArtifactYamlFile(file_content,
overwrite=True,
overwrite_system_artifacts=False):
diff --git a/grr/server/grr_response_server/artifact_registry.py b/grr/server/grr_response_server/artifact_registry.py
index 85d9b89376..e4f738a9ad 100644
--- a/grr/server/grr_response_server/artifact_registry.py
+++ b/grr/server/grr_response_server/artifact_registry.py
@@ -10,7 +10,6 @@
from grr_response_core import config
from grr_response_core.lib import artifact_utils
-from grr_response_core.lib import parsers
from grr_response_core.lib import type_info
from grr_response_core.lib import utils
from grr_response_core.lib.rdfvalues import artifacts as rdf_artifacts
@@ -630,23 +629,4 @@ def GetArtifactPathDependencies(rdf_artifact):
for path in paths:
for match in artifact_utils.INTERPOLATED_REGEX.finditer(path):
deps.add(match.group()[2:-2]) # Strip off %%.
- deps.update(GetArtifactParserDependencies(rdf_artifact))
- return deps
-
-
-def GetArtifactParserDependencies(rdf_artifact):
- """Return the set of knowledgebase path dependencies required by the parser.
-
- Args:
- rdf_artifact: RDF artifact object.
-
- Returns:
- A set of strings for the required kb objects e.g.
- ["users.appdata", "systemroot"]
- """
- factory = parsers.ArtifactParserFactory(str(rdf_artifact.name))
-
- deps = set()
- for p in factory.AllParserTypes():
- deps.update(p.knowledgebase_dependencies)
return deps
diff --git a/grr/server/grr_response_server/artifact_registry_test.py b/grr/server/grr_response_server/artifact_registry_test.py
index 99ff7adff4..2e345f686a 100644
--- a/grr/server/grr_response_server/artifact_registry_test.py
+++ b/grr/server/grr_response_server/artifact_registry_test.py
@@ -216,12 +216,11 @@ def testValidatePathIsAString(self):
def testValidateMissingRequiredAttributes(self):
source = rdf_artifacts.ArtifactSource(
- type=rdf_artifacts.ArtifactSource.SourceType.GREP,
- attributes={
- "paths": ["/etc", "/dev", "/opt"],
- })
+ type=rdf_artifacts.ArtifactSource.SourceType.PATH,
+ attributes={},
+ )
- expected = "missing required attributes: 'content_regex_list'"
+ expected = "missing required attributes: 'paths'"
with self.assertRaisesRegex(rdf_artifacts.ArtifactSourceSyntaxError,
expected):
source.Validate()
diff --git a/grr/server/grr_response_server/artifact_test.py b/grr/server/grr_response_server/artifact_test.py
index 4a0594e7e4..60416c9235 100644
--- a/grr/server/grr_response_server/artifact_test.py
+++ b/grr/server/grr_response_server/artifact_test.py
@@ -3,30 +3,26 @@
import io
import os
-import subprocess
-from typing import Collection
-from typing import IO
-from typing import Iterable
from typing import Iterator
-from unittest import mock
from absl import app
-from absl.testing import absltest
from grr_response_client import actions
from grr_response_client.client_actions import file_fingerprint
from grr_response_client.client_actions import searching
from grr_response_client.client_actions import standard
from grr_response_core import config
-from grr_response_core.lib import parser
-from grr_response_core.lib import parsers
from grr_response_core.lib import rdfvalue
-from grr_response_core.lib.rdfvalues import anomaly as rdf_anomaly
from grr_response_core.lib.rdfvalues import artifacts as rdf_artifacts
from grr_response_core.lib.rdfvalues import client as rdf_client
+from grr_response_core.lib.rdfvalues import client_action as rdf_client_action
from grr_response_core.lib.rdfvalues import client_fs as rdf_client_fs
+from grr_response_core.lib.rdfvalues import mig_client_action
+from grr_response_core.lib.rdfvalues import mig_client_fs
from grr_response_core.lib.rdfvalues import paths as rdf_paths
from grr_response_core.lib.rdfvalues import protodict as rdf_protodict
+from grr_response_proto import jobs_pb2
+from grr_response_proto import knowledge_base_pb2
from grr_response_proto import objects_pb2
from grr_response_server import action_registry
from grr_response_server import artifact
@@ -37,15 +33,10 @@
from grr_response_server.databases import db
from grr_response_server.databases import db_test_utils
from grr_response_server.flows.general import collectors
-from grr_response_server.rdfvalues import mig_objects
-from grr_response_server.rdfvalues import objects as rdf_objects
from grr.test_lib import action_mocks
from grr.test_lib import artifact_test_lib
-from grr.test_lib import client_test_lib
from grr.test_lib import flow_test_lib
-from grr.test_lib import parser_test_lib
from grr.test_lib import test_lib
-from grr.test_lib import time
from grr.test_lib import vfs_test_lib
# pylint: mode=test
@@ -81,73 +72,6 @@
]
-# TODO(hanuszczak): Rename it back to `TestCmdProcessor` once new testing
-# framework is properly set up.
-#
-# Class of this name is clashing with other `TestCmdProcessor` (declared in
-# `//grr/server/grr_response_server/gui/selenium_tests/
-# artifact_view_test.py`) and breaks the test class register. This should be
-# fixed when the test class register is gone and new test discovery (`pytest`)
-# is deployed.
-class CmdProcessor(parser.CommandParser):
-
- output_types = [rdf_client.SoftwarePackages]
- supported_artifacts = ["TestCmdArtifact"]
-
- def Parse(self, cmd, args, stdout, stderr, return_val, knowledge_base):
- _ = cmd, args, stdout, stderr, return_val, knowledge_base
- packages = []
- packages.append(
- rdf_client.SoftwarePackage.Installed(
- name="Package1",
- description="Desc1",
- version="1",
- architecture="amd64"))
- packages.append(
- rdf_client.SoftwarePackage.Installed(
- name="Package2",
- description="Desc2",
- version="1",
- architecture="i386"))
-
- yield rdf_client.SoftwarePackages(packages=packages)
-
- # Also yield something random so we can test return type filtering.
- yield rdf_client_fs.StatEntry()
-
- # Also yield an anomaly to test that.
- yield rdf_anomaly.Anomaly(
- type="PARSER_ANOMALY", symptom="could not parse gremlins.")
-
-
-class MultiProvideParser(parser.RegistryValueParser):
-
- output_types = [rdf_protodict.Dict]
- supported_artifacts = ["DepsProvidesMultiple"]
-
- def Parse(self, stat, knowledge_base):
- _ = stat, knowledge_base
- test_dict = {
- "environ_temp": rdfvalue.RDFString("tempvalue"),
- "environ_path": rdfvalue.RDFString("pathvalue")
- }
- yield rdf_protodict.Dict(test_dict)
-
-
-class RaisingParser(parsers.SingleResponseParser[None]):
-
- output_types = [None]
- supported_artifacts = ["RaisingArtifact"]
-
- def ParseResponse(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- response: rdfvalue.RDFValue,
- ) -> Iterator[None]:
- del knowledge_base, response # Unused.
- raise parsers.ParseError("It was bound to happen.")
-
-
# TODO: These should be defined next to the test they are used in
# once the metaclass registry madness is resolved.
@@ -239,7 +163,7 @@ def testUploadArtifactYamlFileAndDumpToYaml(self):
artifacts_by_name = {
artifact.name: artifact for artifact in loaded_artifacts
}
- self.assertIn("DepsWindirRegex", artifacts_by_name)
+ self.assertIn("DepsWindir", artifacts_by_name)
self.assertIn("TestFilesArtifact", artifacts_by_name)
self.assertStartsWith(
artifacts_by_name["WMIActiveScriptEventConsumer"].urls[0],
@@ -263,10 +187,9 @@ def testUploadArtifactYamlFileAndDumpToYaml(self):
def testUploadArtifactYamlFileMissingDoc(self):
content = """name: Nodoc
sources:
-- type: GREP
+- type: PATH
attributes:
paths: [/etc/blah]
- content_regex_list: ["stuff"]
supported_os: [Linux]
"""
with self.assertRaises(rdf_artifacts.ArtifactDefinitionError):
@@ -276,10 +199,9 @@ def testUploadArtifactYamlFileBadList(self):
content = """name: BadList
doc: here's the doc
sources:
-- type: GREP
+- type: PATH
attributes:
paths: /etc/blah
- content_regex_list: ["stuff"]
supported_os: [Linux]
"""
with self.assertRaises(rdf_artifacts.ArtifactDefinitionError):
@@ -392,41 +314,16 @@ def setUp(self):
"""Make sure things are initialized."""
super().setUp()
users = [
- rdf_client.User(username="gogol"),
- rdf_client.User(username="gevulot"),
- rdf_client.User(username="exomemory"),
- rdf_client.User(username="user1"),
- rdf_client.User(username="user2"),
+ knowledge_base_pb2.User(username="gogol"),
+ knowledge_base_pb2.User(username="gevulot"),
+ knowledge_base_pb2.User(username="exomemory"),
+ knowledge_base_pb2.User(username="user1"),
+ knowledge_base_pb2.User(username="user2"),
]
self.SetupClient(0, system="Linux", os_version="12.04", users=users)
self.LoadTestArtifacts()
- @parser_test_lib.WithParser("Cmd", CmdProcessor)
- def testCmdArtifact(self):
- """Check we can run command based artifacts and get anomalies."""
- client_id = test_lib.TEST_CLIENT_ID
- client_mock = self.MockClient(standard.ExecuteCommand, client_id=client_id)
- with mock.patch.object(subprocess, "Popen", client_test_lib.Popen):
- session_id = flow_test_lib.TestFlowHelper(
- collectors.ArtifactCollectorFlow.__name__,
- client_mock,
- client_id=client_id,
- use_raw_filesystem_access=False,
- artifact_list=["TestCmdArtifact"],
- creator=self.test_username)
-
- results = flow_test_lib.GetFlowResults(client_id, session_id)
- self.assertLen(results, 2)
- packages = [
- p for p in results if isinstance(p, rdf_client.SoftwarePackages)
- ]
- self.assertLen(packages, 1)
-
- anomalies = [a for a in results if isinstance(a, rdf_anomaly.Anomaly)]
- self.assertLen(anomalies, 1)
- self.assertIn("gremlin", anomalies[0].symptom)
-
def testFilesArtifact(self):
"""Check GetFiles artifacts."""
client_id = test_lib.TEST_CLIENT_ID
@@ -466,54 +363,6 @@ def testArtifactOutput(self):
if "collector returned 0 responses" not in str(context.exception):
raise RuntimeError("0 responses should have been returned")
- @parser_test_lib.WithParser("Raising", RaisingParser)
- def testFailuresAreLogged(self):
- client_id = "C.4815162342abcdef"
-
- now = rdfvalue.RDFDatetime.Now()
- data_store.REL_DB.WriteClientMetadata(client_id=client_id, last_ping=now)
-
- snapshot = objects_pb2.ClientSnapshot(client_id=client_id)
- snapshot.knowledge_base.os = "fakeos"
- data_store.REL_DB.WriteClientSnapshot(snapshot)
-
- raising_artifact_source = rdf_artifacts.ArtifactSource(
- type=rdf_artifacts.ArtifactSource.SourceType.COMMAND,
- attributes={
- "cmd": "/bin/echo",
- "args": ["1"],
- })
-
- raising_artifact = rdf_artifacts.Artifact(
- name="RaisingArtifact",
- doc="Lorem ipsum.",
- sources=[raising_artifact_source])
-
- registry = artifact_registry.ArtifactRegistry()
- with mock.patch.object(artifact_registry, "REGISTRY", registry):
- registry.RegisterArtifact(raising_artifact)
-
- flow_id = flow_test_lib.TestFlowHelper(
- collectors.ArtifactCollectorFlow.__name__,
- client_mock=action_mocks.ActionMock(standard.ExecuteCommand),
- client_id=client_id,
- artifact_list=["RaisingArtifact"],
- apply_parsers=True,
- check_flow_errors=True,
- creator=self.test_username)
-
- results = flow_test_lib.GetFlowResults(client_id=client_id, flow_id=flow_id)
- self.assertEmpty(results)
-
- logs = data_store.REL_DB.ReadFlowLogEntries(
- client_id=client_id, flow_id=flow_id, offset=0, count=1024)
-
- # Log should contain two entries. First one about successful execution of
- # the command (not interesting), the other one containing the error about
- # unsuccessful parsing.
- self.assertLen(logs, 2)
- self.assertIn("It was bound to happen.", logs[1].message)
-
class GrrKbTest(ArtifactTest):
@@ -559,7 +408,6 @@ def setUp(self):
reg_overrider.Start()
self.addCleanup(reg_overrider.Stop)
- @parser_test_lib.WithAllParsers
def testKnowledgeBaseRetrievalWindows(self):
"""Check we can retrieve a knowledge base from a client."""
kb = self._RunKBI()
@@ -587,7 +435,6 @@ def setUp(self):
super().setUp()
self.SetupClient(0, system="Linux", os_version="12.04")
- @parser_test_lib.WithAllParsers
def testKnowledgeBaseRetrievalLinux(self):
"""Check we can retrieve a Linux kb."""
@@ -655,7 +502,6 @@ def setUp(self):
super().setUp()
self.SetupClient(0, system="Darwin", os_version="10.9")
- @parser_test_lib.WithAllParsers
def testKnowledgeBaseRetrievalDarwin(self):
"""Check we can retrieve a Darwin kb."""
with vfs_test_lib.VFSOverrider(
@@ -672,448 +518,186 @@ def testKnowledgeBaseRetrievalDarwin(self):
self.assertEqual(user.homedir, "/Users/scalzi")
-class ParserApplicatorTest(absltest.TestCase):
-
- def setUp(self):
- super().setUp()
- self.client_id = db_test_utils.InitializeClient(data_store.REL_DB)
-
- def testApplySingleResponseSuccessful(self):
-
- class FooParser(parsers.SingleResponseParser):
-
- supported_artifacts = ["Foo"]
-
- def ParseResponse(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- response: rdf_client_fs.StatEntry,
- ) -> Iterable[rdfvalue.RDFString]:
- return [rdfvalue.RDFString(f"{knowledge_base.os}:{response.st_dev}")]
-
- with parser_test_lib._ParserContext("Foo", FooParser):
- factory = parsers.ArtifactParserFactory("Foo")
- client_id = self.client_id
- knowledge_base = rdf_client.KnowledgeBase(os="Redox")
-
- applicator = artifact.ParserApplicator(factory, client_id, knowledge_base)
- applicator.Apply([rdf_client_fs.StatEntry(st_dev=1337)])
-
- errors = list(applicator.Errors())
- self.assertEmpty(errors)
-
- responses = list(applicator.Responses())
- self.assertEqual(responses, ["Redox:1337"])
-
- def testApplySingleResponseError(self):
-
- class FooParseError(parsers.ParseError):
- pass
-
- class FooParser(parsers.SingleResponseParser):
-
- supported_artifacts = ["Foo"]
-
- def ParseResponse(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- response: rdf_client_fs.StatEntry,
- ) -> Iterable[rdfvalue.RDFString]:
- del knowledge_base, response # Unused.
- raise FooParseError("Lorem ipsum.")
-
- with parser_test_lib._ParserContext("Foo", FooParser):
- factory = parsers.ArtifactParserFactory("Foo")
- client_id = self.client_id
- knowledge_base = rdf_client.KnowledgeBase()
-
- applicator = artifact.ParserApplicator(factory, client_id, knowledge_base)
- applicator.Apply([rdf_client_fs.StatEntry()])
-
- errors = list(applicator.Errors())
- self.assertLen(errors, 1)
- self.assertIsInstance(errors[0], FooParseError)
-
- responses = list(applicator.Responses())
- self.assertEmpty(responses)
-
- def testApplyMultiResponseSuccess(self):
-
- class QuuxParser(parsers.MultiResponseParser[rdfvalue.RDFInteger]):
-
- supported_artifacts = ["Quux"]
-
- def ParseResponses(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- responses: Collection[rdf_client_fs.StatEntry],
- ) -> Iterable[rdfvalue.RDFInteger]:
- return [stat_entry.st_dev for stat_entry in responses]
-
- with parser_test_lib._ParserContext("Quux", QuuxParser):
- factory = parsers.ArtifactParserFactory("Quux")
- client_id = self.client_id
- knowledge_base = rdf_client.KnowledgeBase()
-
- applicator = artifact.ParserApplicator(factory, client_id, knowledge_base)
- applicator.Apply([
- rdf_client_fs.StatEntry(st_dev=42),
- rdf_client_fs.StatEntry(st_dev=1337),
- ])
-
- errors = list(applicator.Errors())
- self.assertEmpty(errors)
-
- responses = list(applicator.Responses())
- self.assertCountEqual(responses, [42, 1337])
-
- def testApplyMultipleParsersError(self):
-
- class QuuxParseError(parsers.ParseError):
- pass
-
- class QuuxParser(parsers.MultiResponseParser[rdfvalue.RDFInteger]):
+class KnowledgeBaseInitializationFlowTest(flow_test_lib.FlowTestsBaseclass):
- supported_artifacts = ["Quux"]
+ def testWindowsListUsersDir(self):
+ assert data_store.REL_DB is not None
+ rel_db: db.Database = data_store.REL_DB
- def ParseResponses(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- responses: Collection[rdf_client_fs.StatEntry],
- ) -> Iterable[rdfvalue.RDFInteger]:
- del knowledge_base, responses # Unused.
- raise QuuxParseError("Lorem ipsum.")
-
- with parser_test_lib._ParserContext("Quux", QuuxParser):
- factory = parsers.ArtifactParserFactory("Quux")
- client_id = self.client_id
- knowledge_base = rdf_client.KnowledgeBase()
-
- applicator = artifact.ParserApplicator(factory, client_id, knowledge_base)
- applicator.Apply([rdf_client_fs.StatEntry()])
-
- errors = list(applicator.Errors())
- self.assertLen(errors, 1)
- self.assertIsInstance(errors[0], QuuxParseError)
-
- responses = list(applicator.Responses())
- self.assertEmpty(responses)
+ creator = db_test_utils.InitializeUser(rel_db)
+ client_id = db_test_utils.InitializeClient(rel_db)
- def testSingleFileResponse(self):
+ snapshot = objects_pb2.ClientSnapshot()
+ snapshot.client_id = client_id
+ snapshot.knowledge_base.os = "Windows"
+ rel_db.WriteClientSnapshot(snapshot)
- class NorfParser(parsers.SingleFileParser[rdfvalue.RDFBytes]):
+ class ActionMock(action_mocks.ActionMock):
- supported_artifacts = ["Norf"]
-
- def ParseFile(
+ def GetFileStat(
self,
- knowledge_base: rdf_client.KnowledgeBase,
- pathspec: rdf_paths.PathSpec,
- filedesc: file_store.BlobStream,
- ) -> Iterable[rdfvalue.RDFBytes]:
- del knowledge_base, pathspec # Unused.
- return [rdfvalue.RDFBytes(filedesc.Read())]
-
- with parser_test_lib._ParserContext("Norf", NorfParser):
- factory = parsers.ArtifactParserFactory("Norf")
- client_id = self.client_id
- knowledge_base = rdf_client.KnowledgeBase()
-
- stat_entry = rdf_client_fs.StatEntry()
- stat_entry.pathspec.path = "foo/bar/baz"
- stat_entry.pathspec.pathtype = rdf_paths.PathSpec.PathType.OS
- self._WriteFile(stat_entry.pathspec.path, b"4815162342")
-
- applicator = artifact.ParserApplicator(factory, client_id, knowledge_base)
- applicator.Apply([stat_entry])
-
- errors = list(applicator.Errors())
- self.assertEmpty(errors)
-
- responses = list(applicator.Responses())
- self.assertLen(responses, 1)
- self.assertEqual(responses[0], b"4815162342")
-
- def testSingleFileError(self):
-
- class NorfParseError(parsers.ParseError):
- pass
-
- class NorfParser(parsers.SingleFileParser[None]):
-
- supported_artifacts = ["Norf"]
-
- def ParseFile(
+ args: rdf_client_action.GetFileStatRequest,
+ ) -> Iterator[rdf_client_fs.StatEntry]:
+ args = mig_client_action.ToProtoGetFileStatRequest(args)
+
+ # pylint: disable=line-too-long
+ # pyformat: disable
+ if (args.pathspec.pathtype != jobs_pb2.PathSpec.PathType.REGISTRY or
+ args.pathspec.path != r"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows NT\CurrentVersion\SystemRoot"):
+ # pylint: enable=line-too-long
+ # pyformat: enable
+ raise OSError(f"Unsupported path: {args.pathspec}")
+
+ result = jobs_pb2.StatEntry()
+ result.registry_type = jobs_pb2.StatEntry.RegistryType.REG_SZ
+ result.registry_data.string = "X:\\"
+ yield mig_client_fs.ToRDFStatEntry(result)
+
+ def ListDirectory(
self,
- knowledge_base: rdf_client.KnowledgeBase,
- pathspec: rdf_paths.PathSpec,
- filedesc: file_store.BlobStream,
- ) -> Iterable[rdfvalue.RDFBytes]:
- del knowledge_base, pathspec, filedesc # Unused.
- raise NorfParseError("Lorem ipsum.")
-
- with parser_test_lib._ParserContext("Norf", NorfParser):
- factory = parsers.ArtifactParserFactory("Norf")
- client_id = self.client_id
- knowledge_base = rdf_client.KnowledgeBase()
-
- stat_entry = rdf_client_fs.StatEntry()
- stat_entry.pathspec.path = "foo/bar/baz"
- stat_entry.pathspec.pathtype = rdf_paths.PathSpec.PathType.OS
- self._WriteFile(stat_entry.pathspec.path, b"")
-
- applicator = artifact.ParserApplicator(factory, client_id, knowledge_base)
- applicator.Apply([stat_entry])
-
- errors = list(applicator.Errors())
- self.assertLen(errors, 1)
- self.assertIsInstance(errors[0], NorfParseError)
-
- responses = list(applicator.Responses())
- self.assertEmpty(responses)
-
- def testMultiFileSuccess(self):
-
- class ThudParser(parsers.MultiFileParser[rdf_protodict.Dict]):
-
- supported_artifacts = ["Thud"]
-
- def ParseFiles(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- pathspecs: Collection[rdf_paths.PathSpec],
- filedescs: Collection[file_store.BlobStream],
- ) -> Iterable[rdf_protodict.Dict]:
- results = []
- for pathspec, filedesc in zip(pathspecs, filedescs):
- result = rdf_protodict.Dict()
- result["path"] = pathspec.path
- result["content"] = filedesc.Read()
- results.append(result)
- return results
-
- with parser_test_lib._ParserContext("Thud", ThudParser):
- factory = parsers.ArtifactParserFactory("Thud")
- client_id = self.client_id
- knowledge_base = rdf_client.KnowledgeBase()
-
- stat_entry_foo = rdf_client_fs.StatEntry()
- stat_entry_foo.pathspec.path = "quux/foo"
- stat_entry_foo.pathspec.pathtype = rdf_paths.PathSpec.PathType.OS
- self._WriteFile(stat_entry_foo.pathspec.path, b"FOO")
-
- stat_entry_bar = rdf_client_fs.StatEntry()
- stat_entry_bar.pathspec.path = "quux/bar"
- stat_entry_bar.pathspec.pathtype = rdf_paths.PathSpec.PathType.OS
- self._WriteFile(stat_entry_bar.pathspec.path, b"BAR")
-
- applicator = artifact.ParserApplicator(factory, client_id, knowledge_base)
- applicator.Apply([stat_entry_foo, stat_entry_bar])
-
- errors = list(applicator.Errors())
- self.assertEmpty(errors)
-
- responses = list(applicator.Responses())
- self.assertLen(responses, 2)
- self.assertEqual(responses[0], {"path": "quux/foo", "content": b"FOO"})
- self.assertEqual(responses[1], {"path": "quux/bar", "content": b"BAR"})
-
- def testMultiFileError(self):
-
- class ThudParseError(parsers.ParseError):
- pass
-
- class ThudParser(parsers.MultiFileParser[rdf_protodict.Dict]):
-
- supported_artifacts = ["Thud"]
-
- def ParseFiles(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- pathspecs: Collection[rdf_paths.PathSpec],
- filedescs: Collection[file_store.BlobStream],
- ) -> Iterable[rdf_protodict.Dict]:
- del knowledge_base, pathspecs, filedescs # Unused.
- raise ThudParseError("Lorem ipsum.")
-
- with parser_test_lib._ParserContext("Thud", ThudParser):
- factory = parsers.ArtifactParserFactory("Thud")
- client_id = self.client_id
- knowledge_base = rdf_client.KnowledgeBase()
-
- stat_entry = rdf_client_fs.StatEntry()
- stat_entry.pathspec.path = "foo/bar/baz"
- stat_entry.pathspec.pathtype = rdf_paths.PathSpec.PathType.OS
- self._WriteFile(stat_entry.pathspec.path, b"\xff\x00\xff")
-
- applicator = artifact.ParserApplicator(factory, client_id, knowledge_base)
- applicator.Apply([stat_entry])
-
- errors = list(applicator.Errors())
- self.assertLen(errors, 1)
- self.assertIsInstance(errors[0], ThudParseError)
-
- responses = list(applicator.Responses())
- self.assertEmpty(responses)
-
- def _WriteFile(self, path: str, data: bytes) -> None:
- components = tuple(path.split("/"))
-
- blob_id = data_store.BLOBS.WriteBlobWithUnknownHash(blob_data=data)
- blob_ref = rdf_objects.BlobReference(
- offset=0, size=len(data), blob_id=bytes(blob_id)
- )
-
- path_info = rdf_objects.PathInfo.OS(components=components)
- path_info.hash_entry.sha256 = bytes(blob_id)
- data_store.REL_DB.WritePathInfos(
- self.client_id, [mig_objects.ToProtoPathInfo(path_info)]
+ args: rdf_client_action.ListDirRequest,
+ ) -> Iterator[rdf_client_fs.StatEntry]:
+ args = mig_client_action.ToProtoListDirRequest(args)
+
+ # pyformat: disable
+ if (args.pathspec.pathtype != jobs_pb2.PathSpec.PathType.OS or
+ args.pathspec.path != "X:\\Users"):
+ # pyformat: enable
+ raise OSError(f"Unsupported path: {args.pathspec}")
+
+ result = jobs_pb2.StatEntry()
+ result.pathspec.pathtype = jobs_pb2.PathSpec.PathType.OS
+
+ result.pathspec.path = "X:\\Users\\Administrator"
+ result.st_mode = 0o40777
+ yield mig_client_fs.ToRDFStatEntry(result)
+
+ result.pathspec.path = "X:\\Users\\All Users"
+ result.st_mode = 0o40777
+ yield mig_client_fs.ToRDFStatEntry(result)
+
+ result.pathspec.path = "X:\\Users\\bar"
+ result.st_mode = 0o40777
+ yield mig_client_fs.ToRDFStatEntry(result)
+
+ result.pathspec.path = "X:\\Users\\baz"
+ result.st_mode = 0o40777
+ yield mig_client_fs.ToRDFStatEntry(result)
+
+ result.pathspec.path = "X:\\Users\\Default"
+ result.st_mode = 0o40555
+ yield mig_client_fs.ToRDFStatEntry(result)
+
+ result.pathspec.path = "X:\\Users\\Default User"
+ result.st_mode = 0o40555
+ yield mig_client_fs.ToRDFStatEntry(result)
+
+ result.pathspec.path = "X:\\Users\\defaultuser0"
+ result.st_mode = 0o40777
+ yield mig_client_fs.ToRDFStatEntry(result)
+
+ result.pathspec.path = "X:\\Users\\desktop.ini"
+ result.st_mode = 0o100666
+ yield mig_client_fs.ToRDFStatEntry(result)
+
+ result.pathspec.path = "X:\\Users\\foo"
+ result.st_mode = 0o40777
+ yield mig_client_fs.ToRDFStatEntry(result)
+
+ result.pathspec.path = "X:\\Users\\Public"
+ result.st_mode = 0o40555
+ yield mig_client_fs.ToRDFStatEntry(result)
+
+ flow_id = flow_test_lib.StartAndRunFlow(
+ artifact.KnowledgeBaseInitializationFlow,
+ ActionMock(),
+ client_id=client_id,
+ creator=creator,
)
- client_path = db.ClientPath.OS(
- client_id=self.client_id, components=components)
-
- file_store.AddFileWithUnknownHash(client_path, [blob_ref])
-
- def testSingleResponseAndSingleFileParser(self):
-
- class FooParser(parsers.SingleResponseParser[rdfvalue.RDFString]):
+ results = flow_test_lib.GetFlowResults(client_id, flow_id)
- supported_artifacts = ["Quux"]
-
- def ParseResponse(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- response: rdfvalue.RDFValue,
- ) -> Iterator[rdfvalue.RDFString]:
- del knowledge_base # Unused.
-
- if not isinstance(response, rdfvalue.RDFString):
- raise TypeError(f"Unexpected response type: {type(response)}")
-
- yield rdfvalue.RDFString(f"FOO-{response}")
-
- class BarParser(parsers.SingleFileParser[rdfvalue.RDFString]):
-
- supported_artifacts = ["Quux"]
-
- def ParseFile(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- pathspec: rdf_paths.PathSpec,
- filedesc: IO[bytes],
- ) -> Iterator[rdfvalue.RDFString]:
- del knowledge_base, pathspec, filedesc # Unused.
- yield rdfvalue.RDFString("BAR")
-
- with parser_test_lib._ParserContext("Foo", FooParser):
- with parser_test_lib._ParserContext("Bar", BarParser):
- factory = parsers.ArtifactParserFactory("Quux")
- knowledge_base = rdf_client.KnowledgeBase()
-
- applicator = artifact.ParserApplicator(
- factory, client_id=self.client_id, knowledge_base=knowledge_base)
-
- applicator.Apply([
- rdfvalue.RDFString("THUD"),
- rdfvalue.RDFString("BLARGH"),
- ])
-
- responses = list(applicator.Responses())
- self.assertLen(responses, 2)
- self.assertEqual(responses[0], rdfvalue.RDFString("FOO-THUD"))
- self.assertEqual(responses[1], rdfvalue.RDFString("FOO-BLARGH"))
+ self.assertLen(results, 1)
+ self.assertLen(results[0].users, 3)
- def testSingleResponseAndSingleFileParserWithStatResponse(self):
+ users_by_username = {user.username: user for user in results[0].users}
- class FooParser(parsers.SingleResponseParser[rdfvalue.RDFString]):
+ self.assertIn("foo", users_by_username)
+ self.assertEqual(users_by_username["foo"].homedir, "X:\\Users\\foo")
- supported_artifacts = ["Quux"]
+ self.assertIn("bar", users_by_username)
+ self.assertEqual(users_by_username["bar"].homedir, "X:\\Users\\bar")
- def ParseResponse(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- response: rdfvalue.RDFValue,
- ) -> Iterator[rdfvalue.RDFString]:
- del knowledge_base # Unused.
+ self.assertIn("baz", users_by_username)
+ self.assertEqual(users_by_username["baz"].homedir, "X:\\Users\\baz")
- if not isinstance(response, rdf_client_fs.StatEntry):
- raise TypeError(f"Unexpected response type: {type(response)}")
+ # TODO: Remove once the `ListDirectory` action is fixed not
+ # to yield results with leading slashes on Windows.
+ def testWindowsListUsersDirWithForwardAndLeadingSlashes(self):
+ assert data_store.REL_DB is not None
+ rel_db: db.Database = data_store.REL_DB
- yield rdfvalue.RDFString(f"PATH('{response.pathspec.path}')")
+ creator = db_test_utils.InitializeUser(rel_db)
+ client_id = db_test_utils.InitializeClient(rel_db)
- class BarParser(parsers.SingleFileParser[rdfvalue.RDFString]):
+ snapshot = objects_pb2.ClientSnapshot()
+ snapshot.client_id = client_id
+ snapshot.knowledge_base.os = "Windows"
+ rel_db.WriteClientSnapshot(snapshot)
- supported_artifacts = ["Quux"]
+ class ActionMock(action_mocks.ActionMock):
- def ParseFile(
+ def GetFileStat(
self,
- knowledge_base: rdf_client.KnowledgeBase,
- pathspec: rdf_paths.PathSpec,
- filedesc: IO[bytes],
- ) -> Iterator[rdfvalue.RDFString]:
- raise NotImplementedError()
-
- with parser_test_lib._ParserContext("Foo", FooParser):
- with parser_test_lib._ParserContext("Bar", BarParser):
- factory = parsers.ArtifactParserFactory("Quux")
- knowledge_base = rdf_client.KnowledgeBase()
-
- stat_entry = rdf_client_fs.StatEntry()
- stat_entry.pathspec.path = "foo/bar/baz"
- stat_entry.pathspec.pathtype = rdf_paths.PathSpec.PathType.OS
-
- applicator = artifact.ParserApplicator(
- factory, client_id=self.client_id, knowledge_base=knowledge_base)
-
- applicator.Apply([stat_entry])
-
- responses = list(applicator.Responses())
- self.assertLen(responses, 1)
- self.assertEqual(responses[0], "PATH('foo/bar/baz')")
-
- def testTimestamp(self):
-
- class BlarghParser(parsers.SingleFileParser[rdfvalue.RDFBytes]):
-
- supported_artifacts = ["Blargh"]
-
- def ParseFile(
+ args: rdf_client_action.GetFileStatRequest,
+ ) -> Iterator[rdf_client_fs.StatEntry]:
+ args = mig_client_action.ToProtoGetFileStatRequest(args)
+
+ # pylint: disable=line-too-long
+ # pyformat: disable
+ if (args.pathspec.pathtype != jobs_pb2.PathSpec.PathType.REGISTRY or
+ args.pathspec.path != r"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows NT\CurrentVersion\SystemRoot"):
+ # pylint: enable=line-too-long
+ # pyformat: enable
+ raise OSError(f"Unsupported path: {args.pathspec}")
+
+ result = jobs_pb2.StatEntry()
+ result.registry_type = jobs_pb2.StatEntry.RegistryType.REG_SZ
+ result.registry_data.string = "X:\\"
+ yield mig_client_fs.ToRDFStatEntry(result)
+
+ def ListDirectory(
self,
- knowledge_base: rdf_client.KnowledgeBase,
- pathspec: rdf_paths.PathSpec,
- filedesc: file_store.BlobStream,
- ) -> Iterable[rdfvalue.RDFBytes]:
- del knowledge_base, pathspec # Unused.
- return [rdfvalue.RDFBytes(filedesc.Read())]
-
- with parser_test_lib._ParserContext("Blargh", BlarghParser):
- factory = parsers.ArtifactParserFactory("Blargh")
-
- stat_entry = rdf_client_fs.StatEntry()
- stat_entry.pathspec.path = "foo/bar/baz"
- stat_entry.pathspec.pathtype = rdf_paths.PathSpec.PathType.OS
-
- self._WriteFile(stat_entry.pathspec.path, b"OLD")
-
- time.Step()
- timestamp = rdfvalue.RDFDatetime.Now()
-
- self._WriteFile(stat_entry.pathspec.path, b"NEW")
-
- applicator = artifact.ParserApplicator(
- factory,
- client_id=self.client_id,
- knowledge_base=rdf_client.KnowledgeBase(),
- timestamp=timestamp)
- applicator.Apply([stat_entry])
+ args: rdf_client_action.ListDirRequest,
+ ) -> Iterator[rdf_client_fs.StatEntry]:
+ args = mig_client_action.ToProtoListDirRequest(args)
+
+ # pyformat: disable
+ if (args.pathspec.pathtype != jobs_pb2.PathSpec.PathType.OS or
+ args.pathspec.path != "X:\\Users"):
+ # pyformat: enable
+ raise OSError(f"Unsupported path: {args.pathspec}")
+
+ result = jobs_pb2.StatEntry()
+ result.pathspec.pathtype = jobs_pb2.PathSpec.PathType.OS
+
+ result.pathspec.path = "/X:/Users/foobar"
+ result.st_mode = 0o40777
+ yield mig_client_fs.ToRDFStatEntry(result)
+
+ flow_id = flow_test_lib.StartAndRunFlow(
+ artifact.KnowledgeBaseInitializationFlow,
+ ActionMock(),
+ client_id=client_id,
+ creator=creator,
+ )
- errors = list(applicator.Errors())
- self.assertEmpty(errors)
+ results = flow_test_lib.GetFlowResults(client_id, flow_id)
- responses = list(applicator.Responses())
- self.assertLen(responses, 1)
- self.assertEqual(responses[0], b"OLD")
+ self.assertLen(results, 1)
+ self.assertLen(results[0].users, 1)
+ self.assertEqual(results[0].users[0].username, "foobar")
+ self.assertEqual(results[0].users[0].homedir, "X:\\Users\\foobar")
def main(argv):
diff --git a/grr/server/grr_response_server/artifact_utils_test.py b/grr/server/grr_response_server/artifact_utils_test.py
index 931fe9a6e3..5f2a00ffc3 100644
--- a/grr/server/grr_response_server/artifact_utils_test.py
+++ b/grr/server/grr_response_server/artifact_utils_test.py
@@ -2,22 +2,17 @@
"""Tests for the artifact libraries."""
import os
-from typing import IO
-from typing import Iterable
-from typing import Iterator
from absl import app
+from absl.testing import absltest
from grr_response_core.lib import artifact_utils
-from grr_response_core.lib import parsers
from grr_response_core.lib.rdfvalues import artifacts as rdf_artifacts
from grr_response_core.lib.rdfvalues import client as rdf_client
-from grr_response_core.lib.rdfvalues import paths as rdf_paths
from grr_response_core.lib.rdfvalues import test_base as rdf_test_base
from grr_response_proto import knowledge_base_pb2
from grr_response_server import artifact_registry as ar
from grr.test_lib import artifact_test_lib
-from grr.test_lib import parser_test_lib
from grr.test_lib import test_lib
@@ -145,145 +140,6 @@ def testArtifactsDependencies(self, registry):
source.attributes["names"] = backup # Restore old source.
-class InterpolateKbAttributesTest(test_lib.GRRBaseTest):
-
- def assertRaisesMissingAttrs(self):
- error_cls = artifact_utils.KbInterpolationMissingAttributesError
- return self.assertRaises(error_cls)
-
- def assertRaisesUnknownAttrs(self):
- error_cls = artifact_utils.KbInterpolationUnknownAttributesError
- return self.assertRaises(error_cls)
-
- def testMultipleUsers(self):
- kb = knowledge_base_pb2.KnowledgeBase()
- kb.users.extend([
- knowledge_base_pb2.User(username="joe", uid=1),
- knowledge_base_pb2.User(username="jim", uid=2),
- ])
-
- paths = artifact_utils.InterpolateKbAttributes(
- "test%%users.username%%test", kb
- )
- paths = list(paths)
- self.assertLen(paths, 2)
- self.assertCountEqual(paths, ["testjoetest", "testjimtest"])
-
- def testSimpleVariable(self):
- kb = knowledge_base_pb2.KnowledgeBase()
- kb.environ_allusersprofile = "c:\\programdata"
-
- paths = artifact_utils.InterpolateKbAttributes(
- "%%environ_allusersprofile%%\\a", kb)
-
- self.assertEqual(list(paths), ["c:\\programdata\\a"])
-
- def testWithoutInterpolationAndKnowledgeBase(self):
- paths = artifact_utils.InterpolateKbAttributes(
- "no/interpolation", knowledge_base=None
- )
-
- self.assertEqual(list(paths), ["no/interpolation"])
-
- def testRaisesIfInterpolationWithoutKnowledgeBase(self):
- with self.assertRaises(artifact_utils.KnowledgeBaseUninitializedError):
- artifact_utils.InterpolateKbAttributes(
- "%%interpolation%%", knowledge_base=None
- )
-
- def testUnknownAttributeVar(self):
- kb = knowledge_base_pb2.KnowledgeBase()
-
- with self.assertRaisesUnknownAttrs() as context:
- artifact_utils.InterpolateKbAttributes("%%nonexistent%%\\a", kb)
-
- self.assertEqual(context.exception.attrs, ["nonexistent"])
-
- def testUnknownAttributeScope(self):
- kb = knowledge_base_pb2.KnowledgeBase()
-
- with self.assertRaisesUnknownAttrs() as context:
- artifact_utils.InterpolateKbAttributes("%%nonexistent.username%%", kb)
-
- self.assertEqual(context.exception.attrs, ["nonexistent"])
-
- def testUnknownAttributeScopeVar(self):
- kb = knowledge_base_pb2.KnowledgeBase()
- kb.users.append(knowledge_base_pb2.User())
- with self.assertRaisesUnknownAttrs() as context:
- artifact_utils.InterpolateKbAttributes("%%users.nonexistent%%", kb)
-
- self.assertEqual(context.exception.attrs, ["users.nonexistent"])
-
- def testUnknownAttributeScopeNotARealScope(self):
- kb = knowledge_base_pb2.KnowledgeBase(os="Linux")
-
- with self.assertRaisesUnknownAttrs() as context:
- artifact_utils.InterpolateKbAttributes("%%os.version%%", kb)
-
- self.assertEqual(context.exception.attrs, ["os"])
-
- def testMissingAttributeScope(self):
- kb = knowledge_base_pb2.KnowledgeBase()
-
- with self.assertRaisesMissingAttrs() as context:
- artifact_utils.InterpolateKbAttributes("test%%users.username%%test", kb)
-
- self.assertEqual(context.exception.attrs, ["users"])
-
- def testMissingAttributeScopeVar(self):
- kb = knowledge_base_pb2.KnowledgeBase()
- kb.users.append(knowledge_base_pb2.User(username="foo", uid=42))
- with self.assertRaisesMissingAttrs() as context:
- artifact_utils.InterpolateKbAttributes("test%%users.temp%%test", kb)
-
- self.assertEqual(context.exception.attrs, ["users.temp"])
-
- def testEmptyAttribute(self):
- kb = knowledge_base_pb2.KnowledgeBase(environ_allusersprofile="")
-
- with self.assertRaisesMissingAttrs() as context:
- artifact_utils.InterpolateKbAttributes("%%environ_allusersprofile%%", kb)
-
- self.assertEqual(context.exception.attrs, ["environ_allusersprofile"])
-
- def testSingleUserHasValueOthersDoNot(self):
- kb = knowledge_base_pb2.KnowledgeBase()
- kb.users.extend([
- knowledge_base_pb2.User(username="foo", uid=1, temp="C:\\Temp"),
- knowledge_base_pb2.User(username="bar", uid=2),
- knowledge_base_pb2.User(username="baz", uid=3),
- ])
-
- paths = artifact_utils.InterpolateKbAttributes(r"%%users.temp%%\abcd", kb)
- self.assertCountEqual(paths, ["C:\\Temp\\abcd"])
-
- def testMultipleUsersHaveValues(self):
- kb = knowledge_base_pb2.KnowledgeBase()
- kb.users.extend([
- knowledge_base_pb2.User(username="joe", uid=1, sid="sid1"),
- knowledge_base_pb2.User(username="jim", uid=2, sid="sid2"),
- ])
- kb.environ_allusersprofile = "c:\\programdata"
-
- paths = artifact_utils.InterpolateKbAttributes(
- "%%environ_allusersprofile%%\\%%users.sid%%\\%%users.username%%", kb)
- self.assertCountEqual(
- paths, ["c:\\programdata\\sid1\\joe", "c:\\programdata\\sid2\\jim"])
-
- def testMultipleUsersWithOneFullyApplicableValue(self):
- kb = knowledge_base_pb2.KnowledgeBase()
- kb.users.extend([
- knowledge_base_pb2.User(username="foo", uid=1, temp="C:\\Temp"),
- knowledge_base_pb2.User(username="bar", uid=2),
- knowledge_base_pb2.User(username="baz", uid=3),
- ])
-
- paths = artifact_utils.InterpolateKbAttributes(
- "%%users.temp%%\\%%users.username%%.txt", kb)
- self.assertCountEqual(paths, ["C:\\Temp\\foo.txt"])
-
-
class UserMergeTest(test_lib.GRRBaseTest):
def testUserMergeWindows(self):
@@ -345,30 +201,6 @@ def testUserMergeLinux(self):
[("desktop", u"/home/blake/Desktop", u"/home/blakey/Desktop")])
-class Parser1(parsers.SingleResponseParser):
-
- supported_artifacts = ["artifact"]
- knowledgebase_dependencies = ["appdata", "sid"]
-
- def ParseResponse(self, knowledge_base, response):
- raise NotImplementedError()
-
-
-class Parser2(parsers.MultiFileParser[None]):
-
- supported_artifacts = ["artifact"]
- knowledgebase_dependencies = ["sid", "desktop"]
-
- def ParseFiles(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- pathspecs: Iterable[rdf_paths.PathSpec],
- filedescs: Iterable[IO[bytes]],
- ) -> Iterator[None]:
- del knowledge_base, pathspecs, filedescs # Unused.
- raise NotImplementedError()
-
-
class ArtifactTests(rdf_test_base.RDFValueTestMixin, test_lib.GRRBaseTest):
"""Test the Artifact implementation."""
@@ -382,31 +214,37 @@ def GenerateSample(self, number=0):
urls="http://blah")
return result
- @parser_test_lib.WithParser("Parser1", Parser1)
- @parser_test_lib.WithParser("Parser2", Parser2)
def testGetArtifactPathDependencies(self):
- sources = [{
- "type": rdf_artifacts.ArtifactSource.SourceType.REGISTRY_KEY,
- "attributes": {
- "keys": [
- r"%%current_control_set%%\Control\Session "
- r"Manager\Environment\Path"
- ]
- }
- },
- {
- "type": rdf_artifacts.ArtifactSource.SourceType.WMI,
- "attributes": {
- "query": "SELECT * FROM Win32_UserProfile "
- "WHERE SID='%%users.sid%%'"
- }
- },
- {
- "type": rdf_artifacts.ArtifactSource.SourceType.GREP,
- "attributes": {
- "content_regex_list": ["^%%users.username%%:"]
- }
- }]
+ sources = [
+ {
+ "type": rdf_artifacts.ArtifactSource.SourceType.REGISTRY_KEY,
+ "attributes": {
+ "keys": [
+ # pylint: disable=line-too-long
+ # pyformat: disable
+ r"%%current_control_set%%\Control\Session Manager\Environment\Path",
+ # pylint: enable=line-too-long
+ # pyformat: enable
+ ],
+ },
+ },
+ {
+ "type": rdf_artifacts.ArtifactSource.SourceType.WMI,
+ "attributes": {
+ "query": """
+ SELECT *
+ FROM Win32_UserProfile
+ WHERE SID='%%users.sid%%'
+ """,
+ },
+ },
+ {
+ "type": rdf_artifacts.ArtifactSource.SourceType.PATH,
+ "attributes": {
+ "paths": ["/home/%%users.username%%"],
+ },
+ },
+ ]
artifact = rdf_artifacts.Artifact(
name="artifact",
@@ -417,13 +255,13 @@ def testGetArtifactPathDependencies(self):
self.assertCountEqual(
[x["type"] for x in artifact.ToPrimitiveDict()["sources"]],
- ["REGISTRY_KEY", "WMI", "GREP"])
+ ["REGISTRY_KEY", "WMI", "PATH"],
+ )
self.assertCountEqual(
- ar.GetArtifactPathDependencies(artifact), [
- "appdata", "sid", "desktop", "current_control_set", "users.sid",
- "users.username"
- ])
+ ar.GetArtifactPathDependencies(artifact),
+ ["current_control_set", "users.sid", "users.username"],
+ )
def testValidateSyntax(self):
sources = [{
@@ -525,6 +363,320 @@ def testKnowledgeBaseUsersAttributesExpandIntoLists(self):
})
+class ExpandKnowledgebaseWindowsEnvVars(absltest.TestCase):
+
+ def testInvalidSystem(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+ kb.os = "Linux"
+
+ with self.assertRaises(ValueError) as context:
+ artifact_utils.ExpandKnowledgebaseWindowsEnvVars(kb)
+
+ self.assertEqual(str(context.exception), "Invalid system: 'Linux'")
+
+ def testCircularDependency(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+ kb.os = "Windows"
+ kb.environ_systemdrive = "%SystemRoot%\\.."
+ kb.environ_systemroot = "%SystemDrive%\\Windows"
+
+ with self.assertRaises(ValueError) as context:
+ artifact_utils.ExpandKnowledgebaseWindowsEnvVars(kb)
+
+ self.assertStartsWith(str(context.exception), "Circular dependency")
+
+ def testDefaults(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+ kb.os = "Windows"
+
+ kb = artifact_utils.ExpandKnowledgebaseWindowsEnvVars(kb)
+
+ self.assertEqual(kb.environ_systemdrive, "C:")
+ self.assertEqual(kb.environ_systemroot, "C:\\Windows")
+ self.assertEqual(kb.environ_temp, "C:\\Windows\\TEMP")
+ self.assertEqual(kb.environ_programfiles, "C:\\Program Files")
+ self.assertEqual(kb.environ_programfilesx86, "C:\\Program Files (x86)")
+
+ def testSimpleExpansion(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+ kb.os = "Windows"
+ kb.environ_systemdrive = "X:"
+ kb.environ_temp = "%SystemDrive%\\Temporary"
+
+ kb = artifact_utils.ExpandKnowledgebaseWindowsEnvVars(kb)
+
+ self.assertEqual(kb.environ_systemdrive, "X:")
+ self.assertEqual(kb.environ_temp, "X:\\Temporary")
+
+ def testRecursiveExpansion(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+ kb.os = "Windows"
+ kb.environ_systemdrive = "X:"
+ kb.environ_systemroot = "%SystemDrive%\\W1nd0w5"
+ kb.environ_temp = "%SystemRoot%\\T3mp"
+ kb.environ_allusersappdata = "%TEMP%\\U53r5"
+
+ kb = artifact_utils.ExpandKnowledgebaseWindowsEnvVars(kb)
+
+ self.assertEqual(kb.environ_systemdrive, "X:")
+ self.assertEqual(kb.environ_systemroot, "X:\\W1nd0w5")
+ self.assertEqual(kb.environ_temp, "X:\\W1nd0w5\\T3mp")
+ self.assertEqual(kb.environ_allusersappdata, "X:\\W1nd0w5\\T3mp\\U53r5")
+
+ def testMultiExpansion(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+ kb.os = "Windows"
+ kb.environ_systemdrive = "X:"
+ kb.environ_comspec = "%SystemDrive%\\.."
+ kb.environ_temp = "%ComSpec%\\%SystemDrive%"
+
+ kb = artifact_utils.ExpandKnowledgebaseWindowsEnvVars(kb)
+
+ self.assertEqual(kb.environ_systemdrive, "X:")
+ self.assertEqual(kb.environ_comspec, "X:\\..")
+ self.assertEqual(kb.environ_temp, "X:\\..\\X:")
+
+ def testUnknownEnvVarRefs(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+ kb.os = "Windows"
+ kb.environ_systemdrive = "X:"
+ kb.environ_systemroot = "%SystemDrive%\\%SystemName%"
+
+ kb = artifact_utils.ExpandKnowledgebaseWindowsEnvVars(kb)
+
+ self.assertEqual(kb.environ_systemroot, "X:\\%SystemName%")
+
+
+class KnowledgeBaseInterpolationTest(absltest.TestCase):
+
+ def testSinglePattern(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+ kb.fqdn = "foo.example.com"
+
+ interpolation = artifact_utils.KnowledgeBaseInterpolation(
+ pattern="ping %%fqdn%%",
+ kb=kb,
+ )
+
+ self.assertLen(interpolation.results, 1)
+ self.assertEqual(interpolation.results[0], "ping foo.example.com")
+
+ def testMultiplePatterns(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+ kb.os = "Linux"
+ kb.fqdn = "foo.example.com"
+
+ interpolation = artifact_utils.KnowledgeBaseInterpolation(
+ pattern="%%fqdn%% (%%os%%)",
+ kb=kb,
+ )
+
+ self.assertLen(interpolation.results, 1)
+ self.assertEqual(interpolation.results[0], "foo.example.com (Linux)")
+
+ def testSingleUserSinglePattern(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+ kb.users.add(username="user0")
+
+ interpolation = artifact_utils.KnowledgeBaseInterpolation(
+ pattern="X:\\Users\\%%users.username%%",
+ kb=kb,
+ )
+
+ self.assertLen(interpolation.results, 1)
+ self.assertEqual(interpolation.results[0], "X:\\Users\\user0")
+
+ def testSingleUserMultiplePatterns(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+ kb.environ_systemdrive = "X:"
+ kb.users.add(username="user0")
+
+ interpolation = artifact_utils.KnowledgeBaseInterpolation(
+ pattern="%%environ_systemdrive%%\\Users\\%%users.username%%",
+ kb=kb,
+ )
+
+ self.assertLen(interpolation.results, 1)
+ self.assertEqual(interpolation.results[0], "X:\\Users\\user0")
+
+ def testMultipleUsers(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+ kb.environ_systemdrive = "X:"
+ kb.users.add(username="user0", sid="S-0-X-X-770")
+ kb.users.add(username="user1", sid="S-1-X-X-771")
+ kb.users.add(username="user2", sid="S-2-X-X-772")
+
+ interpolation = artifact_utils.KnowledgeBaseInterpolation(
+ pattern="%%environ_systemdrive%%\\%%users.sid%%\\%%users.username%%",
+ kb=kb,
+ )
+
+ self.assertLen(interpolation.results, 3)
+ self.assertEqual(interpolation.results[0], "X:\\S-0-X-X-770\\user0")
+ self.assertEqual(interpolation.results[1], "X:\\S-1-X-X-771\\user1")
+ self.assertEqual(interpolation.results[2], "X:\\S-2-X-X-772\\user2")
+
+ def testMultipleUsersNoPatterns(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+ kb.users.add(username="foo")
+ kb.users.add(username="bar")
+
+ interpolation = artifact_utils.KnowledgeBaseInterpolation(
+ pattern="X:\\Users\\foo",
+ kb=kb,
+ )
+
+ self.assertLen(interpolation.results, 1)
+ self.assertEqual(interpolation.results[0], "X:\\Users\\foo")
+
+ def testNoUsers(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+
+ interpolation = artifact_utils.KnowledgeBaseInterpolation(
+ pattern="X:\\Users\\%%users.username%%",
+ kb=kb,
+ )
+ self.assertEmpty(interpolation.results)
+ self.assertEmpty(interpolation.logs)
+
+ def testNoUsersNoPatterns(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+
+ interpolation = artifact_utils.KnowledgeBaseInterpolation(
+ pattern="X:\\Users\\foo",
+ kb=kb,
+ )
+ self.assertLen(interpolation.results, 1)
+ self.assertEqual(interpolation.results[0], "X:\\Users\\foo")
+
+ def testUserWithoutUsername(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+ kb.users.add(sid="S-0-X-X-770")
+
+ interpolation = artifact_utils.KnowledgeBaseInterpolation(
+ pattern="X:\\Users\\%%users.username%%",
+ kb=kb,
+ )
+ self.assertEmpty(interpolation.results)
+ self.assertIn("without username", interpolation.logs[0])
+
+ def testSingleUserMissingAttribute(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+ kb.users.add(username="foo")
+
+ interpolation = artifact_utils.KnowledgeBaseInterpolation(
+ pattern="X:\\SID\\%%users.sid%%",
+ kb=kb,
+ )
+ self.assertEmpty(interpolation.results)
+ self.assertIn("user 'foo' is missing 'sid'", interpolation.logs)
+
+ def testMultipleUsersMissingAttribute(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+ kb.users.add(username="foo", sid="S-0-X-X-770")
+ kb.users.add(username="bar")
+
+ interpolation = artifact_utils.KnowledgeBaseInterpolation(
+ pattern="X:\\SID\\%%users.sid%%",
+ kb=kb,
+ )
+ self.assertLen(interpolation.results, 1)
+ self.assertEqual(interpolation.results[0], "X:\\SID\\S-0-X-X-770")
+ self.assertIn("user 'bar' is missing 'sid'", interpolation.logs)
+
+ def testMissingAttribute(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+ kb.fqdn = "foo.example.com"
+
+ interpolation = artifact_utils.KnowledgeBaseInterpolation(
+ pattern="%%fqdn%% (%%os%%)",
+ kb=kb,
+ )
+ self.assertEmpty(interpolation.results)
+ self.assertIn("'os' is missing", interpolation.logs)
+
+ def testUserNonExistingAttribute(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+ kb.users.add(username="foo")
+
+ with self.assertRaises(ValueError) as context:
+ artifact_utils.KnowledgeBaseInterpolation(
+ pattern="X:\\Users\\%%users.foobar%%",
+ kb=kb,
+ )
+
+ error = context.exception
+ self.assertEqual(str(error), "`%%users.foobar%%` does not exist")
+
+ def testNonExistingAttribute(self):
+ with self.assertRaises(ValueError) as context:
+ artifact_utils.KnowledgeBaseInterpolation(
+ pattern="X:\\%%foobar%%",
+ kb=knowledge_base_pb2.KnowledgeBase(),
+ )
+
+ error = context.exception
+ self.assertEqual(str(error), "`%%foobar%%` does not exist")
+
+ def testUserprofileFromUserprofile(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+ kb.users.add(username="foo", userprofile="X:\\Users\\foo")
+
+ interpolation = artifact_utils.KnowledgeBaseInterpolation(
+ pattern="%%users.userprofile%%\\file.txt",
+ kb=kb,
+ )
+ self.assertLen(interpolation.results, 1)
+ self.assertEqual(interpolation.results[0], "X:\\Users\\foo\\file.txt")
+
+ def testDefaultUserprofileFromHomedir(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+ kb.users.add(username="foo", homedir="X:\\Users\\foo")
+
+ interpolation = artifact_utils.KnowledgeBaseInterpolation(
+ pattern="%%users.userprofile%%\\file.txt",
+ kb=kb,
+ )
+ self.assertLen(interpolation.results, 1)
+ self.assertEqual(interpolation.results[0], "X:\\Users\\foo\\file.txt")
+ self.assertIn(
+ "using default 'X:\\\\Users\\\\foo' for 'userprofile' for user 'foo'",
+ interpolation.logs,
+ )
+
+ def testDefaultUserprofileFromUsernameSystemDrive(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+ kb.environ_systemdrive = "X:"
+ kb.users.add(username="foo")
+
+ interpolation = artifact_utils.KnowledgeBaseInterpolation(
+ pattern="%%users.userprofile%%\\file.txt",
+ kb=kb,
+ )
+ self.assertLen(interpolation.results, 1)
+ self.assertEqual(interpolation.results[0], "X:\\Users\\foo\\file.txt")
+ self.assertIn(
+ "using default 'X:\\\\Users\\\\foo' for 'userprofile' for user 'foo'",
+ interpolation.logs,
+ )
+
+ def testDefaultUserprofileFromUsername(self):
+ kb = knowledge_base_pb2.KnowledgeBase()
+ kb.environ_systemdrive = "C:"
+ kb.users.add(username="foo")
+
+ interpolation = artifact_utils.KnowledgeBaseInterpolation(
+ pattern="%%users.userprofile%%\\file.txt",
+ kb=kb,
+ )
+ self.assertLen(interpolation.results, 1)
+ self.assertEqual(interpolation.results[0], "C:\\Users\\foo\\file.txt")
+ self.assertIn(
+ "using default 'C:\\\\Users\\\\foo' for 'userprofile' for user 'foo'",
+ interpolation.logs,
+ )
+
+
def main(argv):
# Run the full test suite
test_lib.main(argv)
diff --git a/grr/server/grr_response_server/bin/config_updater_util_test.py b/grr/server/grr_response_server/bin/config_updater_util_test.py
index 7cafa920fe..ef19a207e3 100644
--- a/grr/server/grr_response_server/bin/config_updater_util_test.py
+++ b/grr/server/grr_response_server/bin/config_updater_util_test.py
@@ -13,12 +13,12 @@
from grr_response_core import config as grr_config
from grr_response_core.lib import rdfvalue
from grr_response_core.lib import utils
+from grr_response_core.lib.rdfvalues import crypto as rdf_crypto
from grr_response_proto import objects_pb2
from grr_response_proto.api import config_pb2
from grr_response_server import data_store
from grr_response_server import signed_binary_utils
from grr_response_server.bin import config_updater_util
-from grr_response_server.rdfvalues import mig_objects
from grr.test_lib import test_lib
@@ -272,12 +272,11 @@ def testDeleteUser(self):
config_updater_util.GetUserSummary("foo_user")
def _AssertStoredUserDetailsAre(self, username, password, is_admin):
- proto_user = data_store.REL_DB.ReadGRRUser(username)
- rdf_user = mig_objects.ToRDFGRRUser(proto_user)
- self.assertTrue(rdf_user.password.CheckPassword(password))
+ user = data_store.REL_DB.ReadGRRUser(username)
+ self.assertTrue(rdf_crypto.CheckPassword(user.password, password))
if is_admin:
self.assertEqual(
- rdf_user.user_type, objects_pb2.GRRUser.UserType.USER_TYPE_ADMIN
+ user.user_type, objects_pb2.GRRUser.UserType.USER_TYPE_ADMIN
)
def testArgparseBool_CaseInsensitive(self):
diff --git a/grr/server/grr_response_server/databases/db_flows_test.py b/grr/server/grr_response_server/databases/db_flows_test.py
index 13d7150cd9..efcc76d3e2 100644
--- a/grr/server/grr_response_server/databases/db_flows_test.py
+++ b/grr/server/grr_response_server/databases/db_flows_test.py
@@ -16,7 +16,6 @@
from grr_response_proto import flows_pb2
from grr_response_proto import hunts_pb2
from grr_response_proto import jobs_pb2
-from grr_response_proto import objects_pb2
from grr_response_server import flow
from grr_response_server.databases import db
from grr_response_server.databases import db_test_utils
@@ -2635,31 +2634,6 @@ def testReadFlowResultsCorrectlyAppliesVariousCombinationsOfFilters(self):
% (tag_value, type_value, substring_value, expected, results),
)
- def testReadFlowResultsReturnsPayloadWithMissingTypeAsSpecialValue(self):
- client_id = db_test_utils.InitializeClient(self.db)
- flow_id = db_test_utils.InitializeFlow(self.db, client_id)
-
- sample_results = self._WriteFlowResults(
- self._SampleResults(client_id, flow_id), multiple_timestamps=True
- )
-
- type_name = jobs_pb2.ClientSummary.__name__
- try:
- cls = rdfvalue.RDFValue.classes.pop(type_name)
-
- results = self.db.ReadFlowResults(client_id, flow_id, 0, 100)
- finally:
- rdfvalue.RDFValue.classes[type_name] = cls
-
- self.assertLen(sample_results, len(results))
- for r in results:
- self.assertTrue(
- r.payload.Is(objects_pb2.SerializedValueOfUnrecognizedType.DESCRIPTOR)
- )
- unrecognized_value = objects_pb2.SerializedValueOfUnrecognizedType()
- r.payload.Unpack(unrecognized_value)
- self.assertEqual(unrecognized_value.type_name, type_name)
-
def testCountFlowResultsReturnsCorrectResultsCount(self):
client_id = db_test_utils.InitializeClient(self.db)
flow_id = db_test_utils.InitializeFlow(self.db, client_id)
@@ -3041,31 +3015,6 @@ def testReadFlowErrorsCorrectlyAppliesVariousCombinationsOfFilters(self):
% (tag_value, type_value, expected, errors),
)
- def testReadFlowErrorsReturnsPayloadWithMissingTypeAsSpecialValue(self):
- client_id = db_test_utils.InitializeClient(self.db)
- flow_id = db_test_utils.InitializeFlow(self.db, client_id)
-
- sample_errors = self._WriteFlowErrors(
- self._CreateErrors(client_id, flow_id), multiple_timestamps=True
- )
-
- type_name = rdf_client.ClientSummary.__name__
- try:
- cls = rdfvalue.RDFValue.classes.pop(type_name)
-
- errors = self.db.ReadFlowErrors(client_id, flow_id, 0, 100)
- finally:
- rdfvalue.RDFValue.classes[type_name] = cls
-
- self.assertLen(sample_errors, len(errors))
- for r in errors:
- self.assertTrue(
- r.payload.Is(objects_pb2.SerializedValueOfUnrecognizedType.DESCRIPTOR)
- )
- unrecognized_value = objects_pb2.SerializedValueOfUnrecognizedType()
- r.payload.Unpack(unrecognized_value)
- self.assertEqual(unrecognized_value.type_name, type_name)
-
def testCountFlowErrorsReturnsCorrectErrorsCount(self):
client_id = db_test_utils.InitializeClient(self.db)
flow_id = db_test_utils.InitializeFlow(self.db, client_id)
diff --git a/grr/server/grr_response_server/databases/db_hunts_test.py b/grr/server/grr_response_server/databases/db_hunts_test.py
index 2770ca764e..1d62d0c00e 100644
--- a/grr/server/grr_response_server/databases/db_hunts_test.py
+++ b/grr/server/grr_response_server/databases/db_hunts_test.py
@@ -13,7 +13,6 @@
from grr_response_proto import flows_pb2
from grr_response_proto import hunts_pb2
from grr_response_proto import jobs_pb2
-from grr_response_proto import objects_pb2
from grr_response_proto import output_plugin_pb2
from grr_response_server import flow
from grr_response_server.databases import db
@@ -1554,30 +1553,6 @@ def testReadHuntResultsCorrectlyAppliesVariousCombinationsOfFilters(self):
% (tag_value, type_value, substring_value, expected, results),
)
- def testReadHuntResultsReturnsPayloadWithMissingTypeAsSpecialValue(self):
- hunt_id = db_test_utils.InitializeHunt(self.db)
-
- client_id, flow_id = self._SetupHuntClientAndFlow(hunt_id=hunt_id)
- sample_results = self._SampleSingleTypeHuntResults(
- client_id=client_id, flow_id=flow_id, hunt_id=hunt_id
- )
- self._WriteHuntResults(sample_results)
-
- type_name = rdf_client.ClientSummary.__name__
- cls = rdfvalue.RDFValue.classes.pop(type_name)
-
- results = self.db.ReadHuntResults(hunt_id, 0, 100)
- rdfvalue.RDFValue.classes[type_name] = cls
-
- self.assertLen(sample_results, len(results))
- for r in results:
- self.assertTrue(
- r.payload.Is(objects_pb2.SerializedValueOfUnrecognizedType.DESCRIPTOR)
- )
- payload = objects_pb2.SerializedValueOfUnrecognizedType()
- r.payload.Unpack(payload)
- self.assertEqual(payload.type_name, type_name)
-
def testReadHuntResultsIgnoresChildFlowsResults(self):
client_id = db_test_utils.InitializeClient(self.db)
hunt_id = db_test_utils.InitializeHunt(self.db)
diff --git a/grr/server/grr_response_server/databases/db_users_test.py b/grr/server/grr_response_server/databases/db_users_test.py
index 670ea8a89f..8239f2a393 100644
--- a/grr/server/grr_response_server/databases/db_users_test.py
+++ b/grr/server/grr_response_server/databases/db_users_test.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python
from grr_response_core.lib import rdfvalue
from grr_response_core.lib.rdfvalues import crypto as rdf_crypto
-from grr_response_core.lib.rdfvalues import mig_crypto
+from grr_response_proto import jobs_pb2
from grr_response_proto import objects_pb2
from grr_response_proto import user_pb2
from grr_response_server.databases import db
@@ -36,11 +36,10 @@ def testFilledGRRUserReadWrite(self):
# TODO: Stop using `rdf_crypto.Password`
# TODO(hanuszczak): Passwords should be required to be unicode strings.
- rdf_password = rdf_crypto.Password()
- rdf_password.SetPassword(b"blah")
- proto_password = mig_crypto.ToProtoPassword(rdf_password)
+ password_proto = jobs_pb2.Password()
+ rdf_crypto.SetPassword(password_proto, "blah")
- expected.password.CopyFrom(proto_password)
+ expected.password.CopyFrom(password_proto)
d.WriteGRRUser(
EXAMPLE_NAME,
diff --git a/grr/server/grr_response_server/databases/mem_flows.py b/grr/server/grr_response_server/databases/mem_flows.py
index 8d5ccc29d6..9c522c6764 100644
--- a/grr/server/grr_response_server/databases/mem_flows.py
+++ b/grr/server/grr_response_server/databases/mem_flows.py
@@ -932,22 +932,6 @@ def _ReadFlowResultsOrErrors(
container_copy.append(x)
results = sorted(container_copy, key=lambda r: r.timestamp)
- # This is done in order to pass the tests that try to deserialize
- # value of an unrecognized type.
- for r in results:
- # TODO: for separation of concerns reasons,
- # ReadFlowResults/ReadFlowErrors shouldn't do the payload type validation,
- # they should be completely agnostic to what payloads get written/read
- # to/from the database. Keeping this logic here temporarily
- # to narrow the scope of the RDFProtoStruct->protos migration.
- cls_name = db_utils.TypeURLToRDFTypeName(r.payload.type_url)
- if cls_name not in rdfvalue.RDFValue.classes:
- unrecognized_value = objects_pb2.SerializedValueOfUnrecognizedType(
- type_name=cls_name,
- value=r.payload.value,
- )
- r.payload.Pack(unrecognized_value)
-
if with_tag is not None:
results = [i for i in results if i.tag == with_tag]
diff --git a/grr/server/grr_response_server/databases/mem_hunts.py b/grr/server/grr_response_server/databases/mem_hunts.py
index a80ebfdeba..9f6cd83120 100644
--- a/grr/server/grr_response_server/databases/mem_hunts.py
+++ b/grr/server/grr_response_server/databases/mem_hunts.py
@@ -15,6 +15,7 @@
from grr_response_proto import objects_pb2
from grr_response_proto import output_plugin_pb2
from grr_response_server.databases import db
+from grr_response_server.databases import db_utils
from grr_response_server.models import hunts
from grr_response_server.rdfvalues import hunt_objects as rdf_hunt_objects
from grr_response_server.rdfvalues import mig_flow_objects
@@ -415,8 +416,7 @@ def CountHuntResults(
def CountHuntResultsByType(self, hunt_id: str) -> Mapping[str, int]:
result = {}
for hr in self.ReadHuntResults(hunt_id, 0, sys.maxsize):
- hr = mig_flow_objects.ToRDFFlowResult(hr)
- key = hr.payload.__class__.__name__
+ key = db_utils.TypeURLToRDFTypeName(hr.payload.type_url)
result[key] = result.setdefault(key, 0) + 1
return result
diff --git a/grr/server/grr_response_server/databases/mysql_flows.py b/grr/server/grr_response_server/databases/mysql_flows.py
index 38cf640c9d..98d2f809c8 100644
--- a/grr/server/grr_response_server/databases/mysql_flows.py
+++ b/grr/server/grr_response_server/databases/mysql_flows.py
@@ -4,19 +4,7 @@
import logging
import threading
import time
-from typing import AbstractSet
-from typing import Callable
-from typing import Collection
-from typing import Dict
-from typing import Iterable
-from typing import List
-from typing import Mapping
-from typing import Optional
-from typing import Sequence
-from typing import Tuple
-from typing import Type
-from typing import TypeVar
-from typing import Union
+from typing import AbstractSet, Callable, Collection, Dict, Iterable, List, Mapping, Optional, Sequence, Tuple, Type, Union
import MySQLdb
from MySQLdb import cursors
@@ -25,7 +13,6 @@
from google.protobuf import any_pb2
from grr_response_core.lib import rdfvalue
from grr_response_core.lib import utils
-from grr_response_core.lib.rdfvalues import flows as rdf_flows
from grr_response_core.lib.util import collection
from grr_response_core.lib.util import random
from grr_response_proto import flows_pb2
@@ -35,22 +22,20 @@
from grr_response_server.databases import db_utils
from grr_response_server.databases import mysql_utils
from grr_response_server.models import hunts
-from grr_response_server.rdfvalues import flow_objects as rdf_flow_objects
-T = TypeVar("T")
-
-
-class MySQLDBFlowMixin(object):
+class MySQLDBFlowMixin:
"""MySQLDB mixin for flow handling."""
@mysql_utils.WithTransaction()
def WriteMessageHandlerRequests(
self,
requests: Iterable[objects_pb2.MessageHandlerRequest],
- cursor: Optional[MySQLdb.cursors.Cursor] = None,
+ cursor: Optional[cursors.Cursor] = None,
) -> None:
"""Writes a list of message handler requests to the database."""
+ assert cursor is not None
+
query = (
"INSERT IGNORE INTO message_handler_requests "
"(handlername, request_id, request) VALUES "
@@ -68,9 +53,10 @@ def WriteMessageHandlerRequests(
@mysql_utils.WithTransaction(readonly=True)
def ReadMessageHandlerRequests(
self,
- cursor: Optional[MySQLdb.cursors.Cursor] = None,
+ cursor: Optional[cursors.Cursor] = None,
) -> Sequence[objects_pb2.MessageHandlerRequest]:
"""Reads all message handler requests from the database."""
+ assert cursor is not None
query = (
"SELECT UNIX_TIMESTAMP(timestamp), request,"
@@ -99,9 +85,10 @@ def ReadMessageHandlerRequests(
def DeleteMessageHandlerRequests(
self,
requests: Iterable[objects_pb2.MessageHandlerRequest],
- cursor: Optional[MySQLdb.cursors.Cursor] = None,
+ cursor: Optional[cursors.Cursor] = None,
) -> None:
"""Deletes a list of message handler requests from the database."""
+ assert cursor is not None
query = "DELETE FROM message_handler_requests WHERE request_id IN ({})"
request_ids = set([r.request_id for r in requests])
@@ -162,9 +149,10 @@ def _LeaseMessageHandlerRequests(
self,
lease_time: rdfvalue.Duration,
limit: int = 1000,
- cursor: Optional[MySQLdb.cursors.Cursor] = None,
+ cursor: Optional[cursors.Cursor] = None,
) -> Iterable[objects_pb2.MessageHandlerRequest]:
"""Leases a number of message handler requests up to the indicated limit."""
+ assert cursor is not None
now = rdfvalue.RDFDatetime.Now()
now_str = mysql_utils.RDFDatetimeToTimestamp(now)
@@ -207,9 +195,10 @@ def WriteFlowObject(
self,
flow_obj: flows_pb2.Flow,
allow_update: bool = True,
- cursor: Optional[MySQLdb.cursors.Cursor] = None,
+ cursor: Optional[cursors.Cursor] = None,
) -> None:
"""Writes a flow object to the database."""
+ assert cursor is not None
query = """
INSERT INTO flows (client_id, flow_id, long_flow_id, parent_flow_id,
@@ -304,7 +293,7 @@ def _FlowObjectFromRow(self, row) -> flows_pb2.Flow:
flow_obj.flow_class_name = name
if creator is not None:
flow_obj.creator = creator
- if flow_state not in [None, rdf_flow_objects.Flow.FlowState.UNSET]:
+ if flow_state not in [None, flows_pb2.Flow.FlowState.UNSET]:
flow_obj.flow_state = flow_state
if next_request_to_process:
flow_obj.next_request_to_process = next_request_to_process
@@ -374,9 +363,11 @@ def ReadFlowObject(
self,
client_id: str,
flow_id: str,
- cursor: Optional[MySQLdb.cursors.Cursor] = None,
+ cursor: Optional[cursors.Cursor] = None,
) -> flows_pb2.Flow:
"""Reads a flow object from the database."""
+ assert cursor is not None
+
query = (
f"SELECT {self.FLOW_DB_FIELDS} "
"FROM flows WHERE client_id=%s AND flow_id=%s"
@@ -400,9 +391,11 @@ def ReadAllFlowObjects(
max_create_time: Optional[rdfvalue.RDFDatetime] = None,
include_child_flows: bool = True,
not_created_by: Optional[Iterable[str]] = None,
- cursor: Optional[MySQLdb.cursors.Cursor] = None,
+ cursor: Optional[cursors.Cursor] = None,
) -> List[flows_pb2.Flow]:
"""Returns all flow objects."""
+ assert cursor is not None
+
conditions = []
args = []
@@ -448,6 +441,8 @@ def LeaseFlowForProcessing(
cursor: Optional[cursors.Cursor] = None,
) -> flows_pb2.Flow:
"""Marks a flow as being processed on this worker and returns it."""
+ assert cursor is not None
+
query = (
f"SELECT {self.FLOW_DB_FIELDS} "
"FROM flows WHERE client_id=%s AND flow_id=%s"
@@ -536,6 +531,8 @@ def UpdateFlow(
cursor: Optional[cursors.Cursor] = None,
) -> None:
"""Updates flow objects in the database."""
+ assert cursor is not None
+
updates = []
args = []
if isinstance(flow_obj, flows_pb2.Flow):
@@ -600,6 +597,8 @@ def _WriteFlowProcessingRequests(
cursor: Optional[cursors.Cursor],
) -> None:
"""Returns a (query, args) tuple that inserts the given requests."""
+ assert cursor is not None
+
templates = []
args = []
for req in requests:
@@ -628,6 +627,8 @@ def WriteFlowRequests(
cursor: Optional[cursors.Cursor] = None,
) -> None:
"""Writes a list of flow requests to the database."""
+ assert cursor is not None
+
args = []
templates = []
flow_keys = []
@@ -714,6 +715,7 @@ def _WriteResponses(
cursor: Optional[cursors.Cursor],
) -> None:
"""Builds the writes to store the given responses in the db."""
+ assert cursor is not None
query = (
"INSERT IGNORE INTO flow_responses "
@@ -770,9 +772,10 @@ def _WriteFlowResponsesAndExpectedUpdates(
flows_pb2.FlowIterator,
]
],
- cursor: Optional[MySQLdb.cursors.Cursor] = None,
+ cursor: Optional[cursors.Cursor] = None,
) -> None:
"""Writes a flow responses and updates flow requests expected counts."""
+ assert cursor is not None
self._WriteResponses(responses, cursor)
@@ -803,6 +806,7 @@ def _ReadFlowResponseCounts(
cursor: Optional[cursors.Cursor] = None,
) -> Mapping[Tuple[str, str, str], int]:
"""Reads counts of responses for the given requests."""
+ assert cursor is not None
query = """
SELECT
@@ -850,6 +854,7 @@ def _ReadAndLockNextRequestsToProcess(
cursor: Optional[cursors.Cursor] = None,
) -> Mapping[Tuple[str, str], str]:
"""Reads and locks the next_request_to_process for a number of flows."""
+ assert cursor is not None
query = """
SELECT client_id, flow_id, next_request_to_process
@@ -885,8 +890,9 @@ def _ReadLockAndUpdateAffectedRequests(
request_keys: AbstractSet[Tuple[str, str, str]],
response_counts: Mapping[Tuple[str, str, str], int],
cursor: Optional[cursors.Cursor] = None,
- ) -> Mapping[Tuple[str, str, str], rdf_flow_objects.FlowRequest]:
+ ) -> Mapping[Tuple[str, str, str], flows_pb2.FlowRequest]:
"""Reads, locks, and updates completed requests."""
+ assert cursor is not None
condition_template = """
(flow_requests.client_id = %s AND
@@ -934,8 +940,10 @@ def _ReadLockAndUpdateAffectedRequests(
db_utils.IntToFlowID(flow_id_int),
request_id,
)
- r = rdf_flow_objects.FlowRequest.FromSerializedBytes(request)
- affected_requests[request_key] = r
+ parsed_request = flows_pb2.FlowRequest()
+ parsed_request.ParseFromString(request)
+
+ affected_requests[request_key] = parsed_request
query = """
UPDATE flow_requests
@@ -960,6 +968,7 @@ def _UpdateRequestsAndScheduleFPRs(
cursor: Optional[cursors.Cursor] = None,
) -> Sequence[flows_pb2.FlowProcessingRequest]:
"""Updates requests and writes FlowProcessingRequests if needed."""
+ assert cursor is not None
request_keys = set(
(r.client_id, r.flow_id, r.request_id) for r in responses
@@ -978,15 +987,15 @@ def _UpdateRequestsAndScheduleFPRs(
return []
fprs_to_write = []
- for request_key, r in affected_requests.items():
+ for request_key, request in affected_requests.items():
client_id, flow_id, request_id = request_key
if next_requests[(client_id, flow_id)] == request_id:
flow_processing_request = flows_pb2.FlowProcessingRequest(
- client_id=r.client_id,
- flow_id=r.flow_id,
+ client_id=request.client_id,
+ flow_id=request.flow_id,
)
- if r.start_time is not None:
- flow_processing_request.delivery_time = int(r.start_time)
+ if request.HasField("start_time"):
+ flow_processing_request.delivery_time = request.start_time
fprs_to_write.append(flow_processing_request)
@@ -1025,6 +1034,8 @@ def UpdateIncrementalFlowRequests(
cursor: Optional[cursors.Cursor] = None,
) -> None:
"""Updates next response ids of given requests."""
+ assert cursor is not None
+
if not next_response_id_updates:
return
@@ -1048,6 +1059,8 @@ def DeleteFlowRequests(
cursor: Optional[cursors.Cursor] = None,
) -> None:
"""Deletes a list of flow requests from the database."""
+ assert cursor is not None
+
if not requests:
return
@@ -1097,6 +1110,8 @@ def ReadAllFlowRequestsAndResponses(
]
]:
"""Reads all requests and responses for a given flow from the database."""
+ assert cursor is not None
+
query = (
"SELECT request, needs_processing, responses_expected, "
"callback_state, next_response_id, UNIX_TIMESTAMP(timestamp) "
@@ -1160,6 +1175,8 @@ def DeleteAllFlowRequestsAndResponses(
cursor: Optional[cursors.Cursor] = None,
) -> None:
"""Deletes all requests and responses for a given flow from the database."""
+ assert cursor is not None
+
args = [db_utils.ClientIDToInt(client_id), db_utils.FlowIDToInt(flow_id)]
res_query = "DELETE FROM flow_responses WHERE client_id=%s AND flow_id=%s"
cursor.execute(res_query, args)
@@ -1187,6 +1204,8 @@ def ReadFlowRequestsReadyForProcessing(
],
]:
"""Reads all requests for a flow that can be processed by the worker."""
+ assert cursor is not None
+
query = (
"SELECT request, needs_processing, responses_expected, "
"callback_state, next_response_id, "
@@ -1276,6 +1295,7 @@ def ReleaseProcessedFlow(
cursor: Optional[cursors.Cursor] = None,
) -> bool:
"""Releases a flow that the worker was processing to the database."""
+ assert cursor is not None
update_query = """
UPDATE flows
@@ -1347,8 +1367,10 @@ def WriteFlowProcessingRequests(
def ReadFlowProcessingRequests(
self,
cursor: Optional[cursors.Cursor] = None,
- ) -> Sequence[rdf_flows.FlowProcessingRequest]:
+ ) -> Sequence[flows_pb2.FlowProcessingRequest]:
"""Reads all flow processing requests from the database."""
+ assert cursor is not None
+
query = (
"SELECT request, UNIX_TIMESTAMP(timestamp) "
"FROM flow_processing_requests"
@@ -1370,6 +1392,8 @@ def AckFlowProcessingRequests(
cursor: Optional[cursors.Cursor] = None,
) -> None:
"""Deletes a list of flow processing requests from the database."""
+ assert cursor is not None
+
if not requests:
return
@@ -1395,6 +1419,8 @@ def DeleteAllFlowProcessingRequests(
self, cursor: Optional[cursors.Cursor] = None
) -> None:
"""Deletes all flow processing requests from the database."""
+ assert cursor is not None
+
query = "DELETE FROM flow_processing_requests WHERE true"
cursor.execute(query)
@@ -1521,21 +1547,22 @@ def UnregisterFlowProcessingHandler(
def _WriteFlowResultsOrErrors(
self,
table_name: str,
- results: Sequence[T],
+ results: Sequence[Union[flows_pb2.FlowResult, flows_pb2.FlowError]],
cursor: Optional[cursors.Cursor] = None,
):
"""Writes flow results/errors for a given flow."""
+ assert cursor is not None
- query = (
- f"INSERT INTO {table_name} "
- "(client_id, flow_id, hunt_id, timestamp, payload, type, tag) "
- "VALUES "
- )
+ query = f"""
+ INSERT INTO {table_name} (
+ client_id, flow_id, hunt_id, timestamp, payload, payload_any, type, tag
+ ) VALUES
+ """
templates = []
args = []
for r in results:
- templates.append("(%s, %s, %s, FROM_UNIXTIME(%s), %s, %s, %s)")
+ templates.append("(%s, %s, %s, FROM_UNIXTIME(%s), %s, %s, %s, %s)")
args.append(db_utils.ClientIDToInt(r.client_id))
args.append(db_utils.FlowIDToInt(r.flow_id))
if r.hunt_id:
@@ -1545,7 +1572,10 @@ def _WriteFlowResultsOrErrors(
args.append(
mysql_utils.RDFDatetimeToTimestamp(rdfvalue.RDFDatetime.Now())
)
+ # TODO: Remove writing to payload column after a transition
+ # period.
args.append(r.payload.value)
+ args.append(r.payload.SerializeToString())
args.append(db_utils.TypeURLToRDFTypeName(r.payload.type_url))
args.append(r.tag)
@@ -1566,7 +1596,7 @@ def WriteFlowResults(self, results: Sequence[flows_pb2.FlowResult]) -> None:
def _ReadFlowResultsOrErrors(
self,
table_name: str,
- result_cls: Type[T],
+ result_cls: Union[Type[flows_pb2.FlowResult], Type[flows_pb2.FlowError]],
client_id: str,
flow_id: str,
offset: int,
@@ -1575,13 +1605,15 @@ def _ReadFlowResultsOrErrors(
with_type: Optional[str] = None,
with_substring: Optional[str] = None,
cursor: Optional[cursors.Cursor] = None,
- ) -> Sequence[T]:
+ ) -> Union[Sequence[flows_pb2.FlowResult], Sequence[flows_pb2.FlowError]]:
"""Reads flow results/errors of a given flow using given query options."""
+ assert cursor is not None
+
client_id_int = db_utils.ClientIDToInt(client_id)
flow_id_int = db_utils.FlowIDToInt(flow_id)
query = f"""
- SELECT payload, type, UNIX_TIMESTAMP(timestamp), tag, hunt_id
+ SELECT payload, payload_any, type, UNIX_TIMESTAMP(timestamp), tag, hunt_id
FROM {table_name}
FORCE INDEX ({table_name}_by_client_id_flow_id_timestamp)
WHERE client_id = %s AND flow_id = %s """
@@ -1606,13 +1638,17 @@ def _ReadFlowResultsOrErrors(
cursor.execute(query, args)
ret = []
- for serialized_payload, payload_type, ts, tag, hid in cursor.fetchall():
- # TODO: for separation of concerns reasons,
- # ReadFlowResults/ReadFlowErrors shouldn't do the payload type validation,
- # they should be completely agnostic to what payloads get written/read
- # to/from the database. Keeping this logic here temporarily
- # to narrow the scope of the RDFProtoStruct->protos migration.
- if payload_type in rdfvalue.RDFValue.classes:
+ for (
+ serialized_payload,
+ payload_any,
+ payload_type,
+ ts,
+ tag,
+ hid,
+ ) in cursor.fetchall():
+ if payload_any is not None:
+ payload = any_pb2.Any.FromString(payload_any)
+ elif payload_type in rdfvalue.RDFValue.classes:
payload = any_pb2.Any(
type_url=db_utils.RDFTypeNameToTypeURL(payload_type),
value=serialized_payload,
@@ -1676,6 +1712,8 @@ def _CountFlowResultsOrErrors(
cursor: Optional[cursors.Cursor] = None,
) -> int:
"""Counts flow results/errors of a given flow using given query options."""
+ assert cursor is not None
+
query = (
"SELECT COUNT(*) "
f"FROM {table_name} "
@@ -1720,6 +1758,8 @@ def _CountFlowResultsOrErrorsByType(
cursor: Optional[cursors.Cursor] = None,
) -> Mapping[str, int]:
"""Returns counts of flow results/errors grouped by result type."""
+ assert cursor is not None
+
query = (
f"SELECT type, COUNT(*) FROM {table_name} "
f"FORCE INDEX ({table_name}_by_client_id_flow_id_timestamp) "
@@ -1794,7 +1834,9 @@ def CountFlowErrors(
)
def CountFlowErrorsByType(
- self, client_id: str, flow_id: str
+ self,
+ client_id: str,
+ flow_id: str,
) -> Mapping[str, int]:
"""Returns counts of flow errors grouped by error type."""
# Errors are similar to results, as they represent a somewhat related
@@ -1812,6 +1854,8 @@ def WriteFlowLogEntry(
cursor: Optional[cursors.Cursor] = None,
) -> None:
"""Writes a single flow log entry to the database."""
+ assert cursor is not None
+
query = """
INSERT INTO flow_log_entries
(client_id, flow_id, hunt_id, message)
@@ -1844,6 +1888,7 @@ def ReadFlowLogEntries(
cursor: Optional[cursors.Cursor] = None,
) -> Sequence[flows_pb2.FlowLogEntry]:
"""Reads flow log entries of a given flow using given query options."""
+ assert cursor is not None
query = (
"SELECT message, UNIX_TIMESTAMP(timestamp) "
@@ -1884,6 +1929,7 @@ def CountFlowLogEntries(
cursor: Optional[cursors.Cursor] = None,
) -> int:
"""Returns number of flow log entries of a given flow."""
+ assert cursor is not None
query = (
"SELECT COUNT(*) "
@@ -1900,9 +1946,11 @@ def CountFlowLogEntries(
def WriteFlowOutputPluginLogEntry(
self,
entry: flows_pb2.FlowOutputPluginLogEntry,
- cursor: Optional[MySQLdb.cursors.Cursor] = None,
+ cursor: Optional[cursors.Cursor] = None,
) -> None:
"""Writes a single output plugin log entry to the database."""
+ assert cursor is not None
+
query = """
INSERT INTO flow_output_plugin_log_entries
(client_id, flow_id, hunt_id, output_plugin_id,
@@ -1944,6 +1992,8 @@ def ReadFlowOutputPluginLogEntries(
cursor: Optional[cursors.Cursor] = None,
) -> Sequence[flows_pb2.FlowOutputPluginLogEntry]:
"""Reads flow output plugin log entries."""
+ assert cursor is not None
+
query = (
"SELECT log_entry_type, message, UNIX_TIMESTAMP(timestamp) "
"FROM flow_output_plugin_log_entries "
@@ -1995,6 +2045,8 @@ def CountFlowOutputPluginLogEntries(
cursor: Optional[cursors.Cursor] = None,
) -> int:
"""Returns number of flow output plugin log entries of a given flow."""
+ assert cursor is not None
+
query = (
"SELECT COUNT(*) "
"FROM flow_output_plugin_log_entries "
@@ -2018,9 +2070,11 @@ def CountFlowOutputPluginLogEntries(
def WriteScheduledFlow(
self,
scheduled_flow: flows_pb2.ScheduledFlow,
- cursor: Optional[MySQLdb.cursors.Cursor] = None,
+ cursor: Optional[cursors.Cursor] = None,
) -> None:
"""See base class."""
+ assert cursor is not None
+
sf = scheduled_flow
args = {
@@ -2059,9 +2113,10 @@ def DeleteScheduledFlow(
client_id: str,
creator: str,
scheduled_flow_id: str,
- cursor: Optional[MySQLdb.cursors.Cursor] = None,
+ cursor: Optional[cursors.Cursor] = None,
) -> None:
"""See base class."""
+ assert cursor is not None
cursor.execute(
"""
@@ -2094,6 +2149,7 @@ def ListScheduledFlows(
cursor: Optional[MySQLdb.cursors.Cursor] = None,
) -> Sequence[flows_pb2.ScheduledFlow]:
"""See base class."""
+ assert cursor is not None
query = """
SELECT
diff --git a/grr/server/grr_response_server/databases/mysql_migrations/0025.sql b/grr/server/grr_response_server/databases/mysql_migrations/0025.sql
new file mode 100644
index 0000000000..a7fd76e6cb
--- /dev/null
+++ b/grr/server/grr_response_server/databases/mysql_migrations/0025.sql
@@ -0,0 +1,5 @@
+ALTER TABLE flow_results
+ADD COLUMN payload_any MEDIUMBLOB DEFAULT NULL AFTER payload;
+
+ALTER TABLE flow_errors
+ADD COLUMN payload_any MEDIUMBLOB DEFAULT NULL AFTER payload;
diff --git a/grr/server/grr_response_server/file_store.py b/grr/server/grr_response_server/file_store.py
index 97b9f18355..483e82303b 100644
--- a/grr/server/grr_response_server/file_store.py
+++ b/grr/server/grr_response_server/file_store.py
@@ -372,55 +372,6 @@ def CheckHashes(
}
-def GetLastCollectionPathInfos(
- client_paths: Collection[db.ClientPath],
- max_timestamp: Optional[rdfvalue.RDFDatetime] = None,
-) -> Dict[db.ClientPath, rdf_objects.PathInfo]:
- """Returns PathInfos corresponding to last collection times.
-
- Args:
- client_paths: An iterable of ClientPath objects.
- max_timestamp: When specified, onlys PathInfo with a timestamp lower or
- equal to max_timestamp will be returned.
-
- Returns:
- A dict of ClientPath -> PathInfo where each PathInfo corresponds to a
- collected
- file. PathInfo may be None if no collection happened (ever or with a
- timestamp
- lower or equal then max_timestamp).
- """
- proto_dict = data_store.REL_DB.ReadLatestPathInfosWithHashBlobReferences(
- client_paths, max_timestamp=max_timestamp
- )
- rdf_dict = {}
- for k, v in proto_dict.items():
- rdf_dict[k] = mig_objects.ToRDFPathInfo(v) if v is not None else None
- return rdf_dict
-
-
-def GetLastCollectionPathInfo(
- client_path: str,
- max_timestamp: Optional[rdfvalue.RDFDatetime] = None,
-) -> rdf_objects.PathInfo:
- """Returns PathInfo corresponding to the last file collection time.
-
- Args:
- client_path: A ClientPath object.
- max_timestamp: When specified, the returned PathInfo will correspond to the
- latest file collection with a timestamp lower or equal to max_timestamp.
-
- Returns:
- PathInfo object corresponding to the latest collection or None if file
- wasn't collected (ever or, when max_timestamp is specified, before
- max_timestamp).
- """
-
- return GetLastCollectionPathInfos([client_path], max_timestamp=max_timestamp)[
- client_path
- ]
-
-
def OpenFile(
client_path: db.ClientPath,
max_timestamp: Optional[rdfvalue.RDFDatetime] = None,
diff --git a/grr/server/grr_response_server/flows/general/collectors.py b/grr/server/grr_response_server/flows/general/collectors.py
index 23f042dd3f..4c4431d609 100644
--- a/grr/server/grr_response_server/flows/general/collectors.py
+++ b/grr/server/grr_response_server/flows/general/collectors.py
@@ -6,7 +6,6 @@
from grr_response_core import config
from grr_response_core.lib import artifact_utils
-from grr_response_core.lib import parsers
from grr_response_core.lib import rdfvalue
from grr_response_core.lib.rdfvalues import anomaly as rdf_anomaly
from grr_response_core.lib.rdfvalues import artifacts as rdf_artifacts
@@ -221,12 +220,6 @@ def Collect(self, artifact_obj):
max_size=self.args.max_file_size
),
)
- elif type_name == source_type.GREP:
- self.Grep(
- source,
- _GetPathType(self.args, self.client_os),
- _GetImplementationType(self.args),
- )
elif type_name == source_type.REGISTRY_KEY:
self.GetRegistryKey(source)
elif type_name == source_type.REGISTRY_VALUE:
@@ -310,60 +303,6 @@ def _CombineRegex(self, regex_list):
regex_combined = b"(%s)" % regex
return regex_combined
- def Grep(self, source, pathtype, implementation_type):
- """Grep files in paths for any matches to content_regex_list.
-
- When multiple regexes are supplied, combine
- them into a single regex as an OR match so that we check all regexes at
- once.
-
- Args:
- source: artifact source
- pathtype: pathspec path typed
- implementation_type: Pathspec implementation type to use.
- """
- path_list = self.InterpolateList(source.attributes.get("paths", []))
-
- # `content_regex_list` elements should be binary strings, but forcing
- # artifact creators to use verbose YAML syntax for binary literals would
- # be cruel. Therefore, we allow both kind of strings and we convert to bytes
- # if required.
- content_regex_list = []
- for content_regex in source.attributes.get("content_regex_list", []):
- if isinstance(content_regex, Text):
- content_regex = content_regex.encode("utf-8")
- content_regex_list.append(content_regex)
-
- content_regex_list = self.InterpolateList(content_regex_list)
-
- regex_condition = rdf_file_finder.FileFinderContentsRegexMatchCondition(
- regex=self._CombineRegex(content_regex_list),
- bytes_before=0,
- bytes_after=0,
- mode="ALL_HITS",
- )
-
- file_finder_condition = rdf_file_finder.FileFinderCondition(
- condition_type=(
- rdf_file_finder.FileFinderCondition.Type.CONTENTS_REGEX_MATCH
- ),
- contents_regex_match=regex_condition,
- )
-
- self.CallFlow(
- file_finder.FileFinder.__name__,
- paths=path_list,
- conditions=[file_finder_condition],
- action=rdf_file_finder.FileFinderAction(),
- pathtype=pathtype,
- implementation_type=implementation_type,
- request_data={
- "artifact_name": self.current_artifact_name,
- "source": source.ToPrimitiveDict(),
- },
- next_state=self.ProcessCollected.__name__,
- )
-
def GetRegistryKey(self, source):
self.CallFlow(
filesystem.Glob.__name__,
@@ -396,17 +335,21 @@ def GetRegistryValue(self, source):
# we do here.
path = kvdict["key"]
- expanded_paths = []
- try:
- expanded_paths = artifact_utils.InterpolateKbAttributes(
- path, mig_client.ToProtoKnowledgeBase(self.state.knowledge_base)
- )
- except artifact_utils.KbInterpolationMissingAttributesError as error:
- logging.error(str(error))
- if not self.args.ignore_interpolation_errors:
- raise
+ interpolation = artifact_utils.KnowledgeBaseInterpolation(
+ pattern=path,
+ kb=mig_client.ToProtoKnowledgeBase(self.state.knowledge_base),
+ )
+
+ for log in interpolation.logs:
+ self.Log("knowledgebase registry path interpolation: %s", log)
- new_paths.update(expanded_paths)
+ if (
+ not interpolation.results
+ and not self.args.ignore_interpolation_errors
+ ):
+ raise flow_base.FlowError(f"Registry path {path!r} interpolation error")
+
+ new_paths.update(interpolation.results)
if has_glob:
self.CallFlow(
@@ -443,7 +386,6 @@ def _StartSubArtifactCollector(self, artifact_list, source, next_state):
ArtifactCollectorFlow.__name__,
artifact_list=artifact_list,
use_raw_filesystem_access=self.args.use_raw_filesystem_access,
- apply_parsers=self.args.apply_parsers,
implementation_type=self.args.implementation_type,
max_file_size=self.args.max_file_size,
ignore_interpolation_errors=self.args.ignore_interpolation_errors,
@@ -577,17 +519,21 @@ def _Interpolate(
if knowledgebase is None:
knowledgebase = self.state.knowledge_base
- try:
- return artifact_utils.InterpolateKbAttributes(
- pattern, mig_client.ToProtoKnowledgeBase(knowledgebase)
- )
- except artifact_utils.KbInterpolationMissingAttributesError as error:
+ interpolation = artifact_utils.KnowledgeBaseInterpolation(
+ pattern=pattern,
+ kb=mig_client.ToProtoKnowledgeBase(knowledgebase),
+ )
+
+ for log in interpolation.logs:
+ self.Log("knowledgebase interpolation: %s", log)
+
+ if not interpolation.results:
if self.args.old_client_snapshot_fallback:
return []
- if self.args.ignore_interpolation_errors:
- logging.error(str(error))
- return []
- raise
+ if not self.args.ignore_interpolation_errors:
+ raise flow_base.FlowError(f"{pattern} interpolation error")
+
+ return interpolation.results
def RunGrrClientAction(self, source):
"""Call a GRR Client Action."""
@@ -641,7 +587,7 @@ def ProcessCollected(self, responses):
self.state.failed_count += 1
self.state.artifacts_failed.append(artifact_name)
- self._ParseResponses(list(responses), artifact_name, source)
+ self._ProcessResponses(list(responses), artifact_name, source)
def ProcessCollectedRegistryStatEntry(self, responses):
"""Create AFF4 objects for registry statentries.
@@ -737,7 +683,7 @@ def _GetArtifactReturnTypes(self, source):
if source:
return source["returned_types"]
- def _ParseResponses(self, responses, artifact_name, source):
+ def _ProcessResponses(self, responses, artifact_name, source):
"""Create a result parser sending different arguments for diff parsers.
Args:
@@ -747,19 +693,11 @@ def _ParseResponses(self, responses, artifact_name, source):
"""
artifact_return_types = self._GetArtifactReturnTypes(source)
- if self.args.apply_parsers:
- parser_factory = parsers.ArtifactParserFactory(artifact_name)
- results = artifact.ApplyParsersToResponses(
- parser_factory, responses, self
- )
- else:
- results = responses
-
# Increment artifact result count in flow progress.
progress = self._GetOrInsertArtifactProgress(artifact_name)
- progress.num_results += len(results)
+ progress.num_results += len(responses)
- for result in results:
+ for result in responses:
result_type = result.__class__.__name__
if result_type == "Anomaly":
self.SendReply(result)
diff --git a/grr/server/grr_response_server/flows/general/collectors_test.py b/grr/server/grr_response_server/flows/general/collectors_test.py
index 08f55b79c6..f5f172419d 100644
--- a/grr/server/grr_response_server/flows/general/collectors_test.py
+++ b/grr/server/grr_response_server/flows/general/collectors_test.py
@@ -5,10 +5,8 @@
into collectors_*_test.py files.
"""
-import itertools
import os
import shutil
-from typing import IO
from unittest import mock
from absl import app
@@ -17,16 +15,12 @@
from grr_response_client import actions
from grr_response_client.client_actions import standard
from grr_response_core import config
-from grr_response_core.lib import factory
-from grr_response_core.lib import parser
-from grr_response_core.lib import parsers
from grr_response_core.lib import rdfvalue
from grr_response_core.lib.rdfvalues import artifacts as rdf_artifacts
from grr_response_core.lib.rdfvalues import client as rdf_client
from grr_response_core.lib.rdfvalues import client_fs as rdf_client_fs
from grr_response_core.lib.rdfvalues import mig_artifacts
from grr_response_core.lib.rdfvalues import paths as rdf_paths
-from grr_response_core.lib.rdfvalues import protodict as rdf_protodict
from grr_response_core.lib.util import temp
from grr_response_proto import knowledge_base_pb2
from grr_response_proto import objects_pb2
@@ -84,9 +78,12 @@ class TestArtifactCollectors(
"""Test the artifact collection mechanism with fake artifacts."""
def testInterpolateArgs(self):
+ client_id = db_test_utils.InitializeClient(data_store.REL_DB)
+ flow_id = db_test_utils.InitializeFlow(data_store.REL_DB, client_id)
+
args = rdf_artifacts.ArtifactCollectorFlowArgs()
collect_flow = collectors.ArtifactCollectorFlow(
- rdf_flow_objects.Flow(args=args)
+ rdf_flow_objects.Flow(client_id=client_id, flow_id=flow_id, args=args)
)
kb = rdf_client.KnowledgeBase()
@@ -129,13 +126,13 @@ def testInterpolateArgs(self):
# Ignore the failure in users.desktop, report the others.
collect_flow.args.ignore_interpolation_errors = True
list_args = collect_flow.InterpolateList(
- ["%%users.desktop%%", r"%%users.username%%\aa"]
+ ["%%users.uid%%", r"%%users.username%%\aa"]
)
self.assertCountEqual(list_args, [r"test1\aa", r"test2\aa"])
# Both fail.
list_args = collect_flow.InterpolateList(
- [r"%%users.desktop%%\aa", r"%%users.sid%%\aa"]
+ [r"%%users.uid%%\aa", r"%%users.sid%%\aa"]
)
self.assertCountEqual(list_args, [])
@@ -155,44 +152,6 @@ def testGrepRegexCombination(self):
b"(a|b)|([^_]b)|(c|d)",
)
- def testGrep(self):
- class MockCallFlow(object):
-
- def CallFlow(self, *args, **kwargs):
- self.args = args
- self.kwargs = kwargs
-
- mock_call_flow = MockCallFlow()
- with mock.patch.object(
- collectors.ArtifactCollectorFlow, "CallFlow", mock_call_flow.CallFlow
- ):
- args = mock.Mock()
- args.ignore_interpolation_errors = False
-
- collect_flow = collectors.ArtifactCollectorFlow(
- rdf_flow_objects.Flow(args=args)
- )
- kb = rdf_client.KnowledgeBase()
- kb.MergeOrAddUser(rdf_client.User(username="test1"))
- kb.MergeOrAddUser(rdf_client.User(username="test2"))
- collect_flow.state["knowledge_base"] = kb
- collect_flow.current_artifact_name = "blah"
-
- collector = rdf_artifacts.ArtifactSource(
- type=rdf_artifacts.ArtifactSource.SourceType.GREP,
- attributes={
- "paths": ["/etc/passwd"],
- "content_regex_list": [b"^a%%users.username%%b$"],
- },
- )
- collect_flow.Grep(collector, rdf_paths.PathSpec.PathType.TSK, None)
-
- conditions = mock_call_flow.kwargs["conditions"]
- self.assertLen(conditions, 1)
- regexes = conditions[0].contents_regex_match.regex.AsBytes()
- self.assertCountEqual(regexes.split(b"|"), [b"(^atest1b$)", b"(^atest2b$)"])
- self.assertEqual(mock_call_flow.kwargs["paths"], ["/etc/passwd"])
-
def testGetArtifact(self):
"""Test we can get a basic artifact."""
# Dynamically add an ArtifactSource specifying the base path.
@@ -424,35 +383,6 @@ def _RunClientActionArtifact(
return flow_test_lib.GetFlowResults(client_id, flow_id)
- @mock.patch.object(
- parsers,
- "SINGLE_RESPONSE_PARSER_FACTORY",
- factory.Factory(parsers.SingleResponseParser),
- )
- def testParsingFailure(self):
- """Test a command artifact where parsing the response fails."""
-
- filesystem_test_lib.Command("/bin/echo", args=["1"])
-
- InitGRRWithTestArtifacts(self)
-
- client_id = self.SetupClient(0, system="Linux")
-
- parsers.SINGLE_RESPONSE_PARSER_FACTORY.Register(
- "TestCmd", TestCmdNullParser
- )
- artifact_list = ["TestUntypedEchoArtifact"]
-
- flow_id = flow_test_lib.StartAndRunFlow(
- collectors.ArtifactCollectorFlow,
- action_mocks.ActionMock(standard.ExecuteCommand),
- artifact_list=artifact_list,
- apply_parsers=True,
- client_id=client_id,
- )
- results = flow_test_lib.GetFlowResults(client_id, flow_id)
- self.assertEmpty(results)
-
def testFlowProgressHasEntryForArtifactWithoutResults(self):
client_id = self.SetupClient(0, system="Linux")
with mock.patch.object(psutil, "process_iter", lambda: iter([])):
@@ -589,7 +519,6 @@ def Run(self, args: any) -> None:
action_mocks.ActionMock(),
artifact_list=["Planta"],
client_id=client_id,
- apply_parsers=False,
use_raw_filesystem_access=True,
implementation_type=rdf_paths.PathSpec.ImplementationType.DIRECT,
max_file_size=1,
@@ -599,7 +528,6 @@ def Run(self, args: any) -> None:
child_flows = data_store.REL_DB.ReadChildFlowObjects(client_id, flow_id)
self.assertLen(child_flows, 1)
args = mig_flow_objects.ToRDFFlow(child_flows[0]).args
- self.assertEqual(args.apply_parsers, False)
self.assertEqual(args.use_raw_filesystem_access, True)
self.assertEqual(
args.implementation_type,
@@ -608,29 +536,6 @@ def Run(self, args: any) -> None:
self.assertEqual(args.max_file_size, 1)
self.assertEqual(args.ignore_interpolation_errors, True)
- def testGrep2(self):
- client_id = self.SetupClient(0, system="Linux")
- client_mock = action_mocks.ClientFileFinderClientMock()
- with temp.AutoTempFilePath() as temp_file_path:
- with open(temp_file_path, "w") as f:
- f.write("foo")
- coll1 = rdf_artifacts.ArtifactSource(
- type=rdf_artifacts.ArtifactSource.SourceType.GREP,
- attributes={
- "paths": [temp_file_path],
- "content_regex_list": ["f|o+"],
- },
- )
- self.fakeartifact.sources.append(coll1)
- results = self._RunClientActionArtifact(
- client_id, client_mock, ["FakeArtifact"]
- )
- matches = itertools.chain.from_iterable(
- [m.data for m in r.matches] for r in results
- )
- expected_matches = [b"f", b"oo"]
- self.assertCountEqual(matches, expected_matches)
-
def testDirectory(self):
client_id = self.SetupClient(0, system="Linux")
client_mock = action_mocks.FileFinderClientMock()
@@ -869,56 +774,6 @@ def testSourceMeetsOSConditions(self):
self.assertTrue(collectors.MeetsOSConditions(knowledge_base, source))
-class TestCmdParser(parser.CommandParser):
- output_types = [rdf_client.SoftwarePackages]
- supported_artifacts = ["TestEchoArtifact"]
-
- def Parse(self, cmd, args, stdout, stderr, return_val, knowledge_base):
- del cmd, args, stderr, return_val, knowledge_base # Unused
- yield rdf_client.SoftwarePackages(
- packages=[
- rdf_client.SoftwarePackage.Installed(
- name="Package",
- description=stdout,
- version="1",
- architecture="amd64",
- ),
- ]
- )
-
-
-class TestCmdNullParser(parser.CommandParser):
- output_types = [rdf_client.SoftwarePackages]
- supported_artifacts = ["TestUntypedEchoArtifact"]
-
- def Parse(self, cmd, args, stdout, stderr, return_val, knowledge_base):
- del cmd, args, stderr, return_val, knowledge_base # Unused
- # This parser tests flow behavior when the input can't be parsed.
- return []
-
-
-class TestFileParser(parsers.SingleFileParser[rdf_protodict.AttributedDict]):
- output_types = [rdf_protodict.AttributedDict]
- supported_artifacts = ["TestFileArtifact"]
-
- def ParseFile(
- self,
- knowledge_base: rdf_client.KnowledgeBase,
- pathspec: rdf_paths.PathSpec,
- filedesc: IO[bytes],
- ):
- del knowledge_base # Unused.
-
- lines = set([l.strip() for l in filedesc.read().splitlines()])
-
- users = list(filter(None, lines))
-
- filename = pathspec.path
- cfg = {"filename": filename, "users": users}
-
- yield rdf_protodict.AttributedDict(**cfg)
-
-
def InitGRRWithTestArtifacts(self):
artifact_registry.REGISTRY.ClearSources()
artifact_registry.REGISTRY.ClearRegistry()
diff --git a/grr/server/grr_response_server/flows/general/deprecated.py b/grr/server/grr_response_server/flows/general/deprecated.py
new file mode 100644
index 0000000000..f0c85fd206
--- /dev/null
+++ b/grr/server/grr_response_server/flows/general/deprecated.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+"""Deprecated flows."""
+
+import logging
+from grr_response_server import flow_base
+
+
+class AbstractDeprecatedFlow(flow_base.FlowBase):
+ """Extend this class to mark a flow as deprecated."""
+
+ deprecated = True
+
+ def __init__(self, *args, **kwargs):
+ logging.warning("Deprecated flow %s was called", self.__class__.__name__)
+
+
+class GetExecutables(AbstractDeprecatedFlow):
+ """Stub for deprecated GetExecutables flow."""
diff --git a/grr/server/grr_response_server/flows/general/deprecated_test.py b/grr/server/grr_response_server/flows/general/deprecated_test.py
new file mode 100644
index 0000000000..a82bd9cc6c
--- /dev/null
+++ b/grr/server/grr_response_server/flows/general/deprecated_test.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+"""Tests for deprecated flows."""
+
+from absl import app
+
+from grr_response_core.lib import registry
+from grr_response_server import flow_base
+from grr_response_server.flows.general import deprecated
+from grr.test_lib import flow_test_lib
+from grr.test_lib import test_lib
+
+
+class DeprecatedFlowsTest(flow_test_lib.FlowTestsBaseclass):
+
+ class DeprecatedFlow(deprecated.AbstractDeprecatedFlow):
+ pass
+
+ class ValidFlow(flow_base.FlowBase):
+ pass
+
+ def testRegistryGetDeprecatedFlow(self):
+ fetched_deprecated_flow = registry.FlowRegistry.FlowClassByName(
+ self.DeprecatedFlow.__name__
+ )
+ self.assertEqual(fetched_deprecated_flow, self.DeprecatedFlow)
+ self.assertIn(
+ deprecated.AbstractDeprecatedFlow, fetched_deprecated_flow.__bases__
+ )
+
+ def testGetNotDeprecatedFlow(self):
+ fetched_valid_flow = registry.FlowRegistry.FlowClassByName(
+ self.ValidFlow.__name__
+ )
+ self.assertEqual(fetched_valid_flow, self.ValidFlow)
+ self.assertNotIn(
+ deprecated.AbstractDeprecatedFlow, fetched_valid_flow.__bases__
+ )
+
+ def testRegistryStoresFlowsInAccordingToTheirDeprecationStatus(self):
+ self.assertNotIn(
+ self.DeprecatedFlow.__name__,
+ registry.FlowRegistry.FLOW_REGISTRY,
+ )
+ self.assertIn(
+ self.DeprecatedFlow.__name__,
+ registry.FlowRegistry.DEPRECATED_FLOWS,
+ )
+ self.assertIn(
+ self.ValidFlow.__name__,
+ registry.FlowRegistry.FLOW_REGISTRY,
+ )
+ self.assertNotIn(
+ self.ValidFlow.__name__,
+ registry.FlowRegistry.DEPRECATED_FLOWS,
+ )
+
+
+def main(argv):
+ # Run the full test suite
+ test_lib.main(argv)
+
+
+if __name__ == "__main__":
+ app.run(main)
diff --git a/grr/server/grr_response_server/flows/general/discovery_test.py b/grr/server/grr_response_server/flows/general/discovery_test.py
index db4fd7794a..31237436c9 100644
--- a/grr/server/grr_response_server/flows/general/discovery_test.py
+++ b/grr/server/grr_response_server/flows/general/discovery_test.py
@@ -45,7 +45,6 @@
from grr.test_lib import db_test_lib
from grr.test_lib import flow_test_lib
from grr.test_lib import notification_test_lib
-from grr.test_lib import parser_test_lib
from grr.test_lib import stats_test_lib
from grr.test_lib import test_lib
from grr.test_lib import vfs_test_lib
@@ -230,7 +229,6 @@ def testInterrogateCloudMetadataLinux(self):
client = self._OpenClient(client_id)
self._CheckCloudMetadata(client)
- @parser_test_lib.WithAllParsers
def testInterrogateCloudMetadataWindows(self):
"""Check google cloud metadata on windows."""
client_id = self._SetupMinimalClient()
@@ -251,7 +249,6 @@ def testInterrogateCloudMetadataWindows(self):
client = self._OpenClient(client_id)
self._CheckCloudMetadata(client)
- @parser_test_lib.WithAllParsers
def testInterrogateLinux(self):
"""Test the Interrogate flow."""
client_id = self._SetupMinimalClient()
@@ -292,7 +289,6 @@ def testInterrogateLinux(self):
self._CheckClientLibraries(client)
self._CheckMemory(client)
- @parser_test_lib.WithAllParsers
def testInterrogateWindows(self):
"""Test the Interrogate flow."""
client_id = self._SetupMinimalClient()
@@ -454,7 +450,6 @@ def StatFS(
self.assertLen(snapshot.volumes, 1)
self.assertEqual(snapshot.volumes[0].unixvolume.mount_point, "/")
- @parser_test_lib.WithAllParsers
@mock.patch.object(fleetspeak_utils, "GetLabelsFromFleetspeak")
def testFleetspeakClient(self, mock_labels_fn):
mock_labels_fn.return_value = ["foo", "bar"]
@@ -492,7 +487,6 @@ def testFleetspeakClient(self, mock_labels_fn):
self.assertEqual(fs_validation_tags[0].key, "IP")
self.assertEqual(fs_validation_tags[0].value, "12.34.56.78")
- @parser_test_lib.WithAllParsers
@mock.patch.object(fleetspeak_utils, "GetLabelsFromFleetspeak")
def testFleetspeakClient_OnlyGRRLabels(self, mock_labels_fn):
mock_labels_fn.return_value = []
@@ -570,7 +564,6 @@ def ExecuteCommand(
self.assertEqual(client_snapshot.edr_agents[0].name, "CrowdStrike")
self.assertEqual(client_snapshot.edr_agents[0].agent_id, agent_id)
- @parser_test_lib.WithAllParsers
def testSourceFlowIdIsSet(self):
client_id = self._SetupMinimalClient()
client_mock = action_mocks.InterrogatedClient()
@@ -850,7 +843,6 @@ def testProcessPasswdCacheUsers(
self.assertEqual(users[1].full_name, "Fó Fózyńczak")
self.assertEqual(users[1].shell, "/bin/bash")
- @parser_test_lib.WithAllParsers
def testForemanTimeIsResetOnClientSnapshotWrite(self):
client_id = self._SetupMinimalClient()
data_store.REL_DB.WriteClientMetadata(
diff --git a/grr/server/grr_response_server/flows/general/file_finder.py b/grr/server/grr_response_server/flows/general/file_finder.py
index aec9ec348b..e730d6c829 100644
--- a/grr/server/grr_response_server/flows/general/file_finder.py
+++ b/grr/server/grr_response_server/flows/general/file_finder.py
@@ -5,7 +5,6 @@
from typing import Collection, Optional, Sequence, Set, Tuple
from grr_response_core.lib import artifact_utils
-from grr_response_core.lib import interpolation
from grr_response_core.lib import rdfvalue
from grr_response_core.lib.rdfvalues import client_action as rdf_client_action
from grr_response_core.lib.rdfvalues import client_fs as rdf_client_fs
@@ -502,41 +501,22 @@ def Start(self):
self.state.num_blob_waits = 0
def _InterpolatePaths(self, globs: Sequence[str]) -> Optional[Sequence[str]]:
- kb: Optional[knowledge_base_pb2.ClientKnowledgeBase] = (
- self.client_knowledge_base
+ kb: knowledge_base_pb2.KnowledgeBase = (
+ self.client_knowledge_base or knowledge_base_pb2.KnowledgeBase()
)
paths = list()
- missing_attrs = list()
- unknown_attrs = list()
for glob in globs:
- # Only fail hard on missing knowledge base if there's actual
- # interpolation to be done.
- if kb is None:
- interpolator = interpolation.Interpolator(str(glob))
- if interpolator.Vars() or interpolator.Scopes():
- self.Log(
- f"Skipping glob '{glob}': can't interpolate with an "
- "empty knowledge base"
- )
- continue
+ interpolation = artifact_utils.KnowledgeBaseInterpolation(
+ pattern=str(glob),
+ kb=kb,
+ )
- try:
- paths.extend(artifact_utils.InterpolateKbAttributes(str(glob), kb))
- except artifact_utils.KbInterpolationMissingAttributesError as error:
- missing_attrs.extend(error.attrs)
- self.Log("Missing knowledgebase attributes: %s", error.attrs)
- except artifact_utils.KbInterpolationUnknownAttributesError as error:
- unknown_attrs.extend(error.attrs)
- self.Log("Unknown knowledgebase attributes: %s", error.attrs)
-
- if missing_attrs:
- self.Error(f"Missing knowledgebase attributes: {missing_attrs}")
- return None
- if unknown_attrs:
- self.Error(f"Unknown knowledgebase attributes: {unknown_attrs}")
- return None
+ for log in interpolation.logs:
+ self.Log("knowledgebase interpolation: %s", log)
+
+ paths.extend(interpolation.results)
if not paths:
self.Error(
diff --git a/grr/server/grr_response_server/flows/general/file_finder_test.py b/grr/server/grr_response_server/flows/general/file_finder_test.py
index 15a1f52cb4..be56b54ef2 100644
--- a/grr/server/grr_response_server/flows/general/file_finder_test.py
+++ b/grr/server/grr_response_server/flows/general/file_finder_test.py
@@ -17,12 +17,12 @@
from grr_response_client import vfs
from grr_response_client.client_actions.file_finder_utils import uploading
from grr_response_core.lib import rdfvalue
-from grr_response_core.lib.rdfvalues import client as rdf_client
from grr_response_core.lib.rdfvalues import client_fs as rdf_client_fs
from grr_response_core.lib.rdfvalues import file_finder as rdf_file_finder
from grr_response_core.lib.rdfvalues import paths as rdf_paths
from grr_response_core.lib.util import temp
from grr_response_proto import flows_pb2
+from grr_response_proto import knowledge_base_pb2
from grr_response_proto import objects_pb2
from grr_response_server import data_store
from grr_response_server import file_store
@@ -1332,8 +1332,8 @@ def testClientFileFinderUnicodeLiteral(self):
self.assertCountEqual(relpaths, [u"厨房/卫浴洁.txt"])
def testPathInterpolation(self):
- bar = rdf_client.User(username="bar")
- baz = rdf_client.User(username="baz")
+ bar = knowledge_base_pb2.User(username="bar")
+ baz = knowledge_base_pb2.User(username="baz")
self.client_id = self.SetupClient(
0, system="foo", fqdn="norf", users=[bar, baz])
@@ -1438,7 +1438,6 @@ def testInterpolationMissingAttributes(self):
flow_obj = data_store.REL_DB.ReadFlowObject(client_id, flow_id)
self.assertEqual(flow_obj.flow_state, flows_pb2.Flow.FlowState.ERROR)
- self.assertIn("Missing knowledgebase attributes", flow_obj.error_message)
log_entries = data_store.REL_DB.ReadFlowLogEntries(
client_id=client_id, flow_id=flow_id, offset=0, count=1024)
@@ -1468,13 +1467,7 @@ def testInterpolationUnknownAttributes(self):
flow_obj = data_store.REL_DB.ReadFlowObject(client_id, flow_id)
self.assertEqual(flow_obj.flow_state, flows_pb2.Flow.FlowState.ERROR)
- self.assertIn("Unknown knowledgebase attributes", flow_obj.error_message)
-
- log_entries = data_store.REL_DB.ReadFlowLogEntries(
- client_id=client_id, flow_id=flow_id, offset=0, count=1024)
- self.assertLen(log_entries, 2)
- self.assertIn("foo", log_entries[0].message)
- self.assertIn("bar", log_entries[1].message)
+ self.assertEqual("`%%foo%%` does not exist", flow_obj.error_message)
def testSkipsGlobsWithInterpolationWhenNoKnowledgeBase(self):
creator = db_test_utils.InitializeUser(data_store.REL_DB)
@@ -1500,8 +1493,7 @@ def testSkipsGlobsWithInterpolationWhenNoKnowledgeBase(self):
)
self.assertLen(log_entries, 1)
self.assertIn(
- "Skipping glob '%%os%%': can't interpolate with an empty knowledge"
- " base",
+ "knowledgebase interpolation: 'os' is missing",
log_entries[0].message,
)
diff --git a/grr/server/grr_response_server/flows/general/filesystem_test.py b/grr/server/grr_response_server/flows/general/filesystem_test.py
index d3d7d0abf5..0cf15da5ac 100644
--- a/grr/server/grr_response_server/flows/general/filesystem_test.py
+++ b/grr/server/grr_response_server/flows/general/filesystem_test.py
@@ -11,11 +11,11 @@
from absl.testing import absltest
from grr_response_core.lib import utils
-from grr_response_core.lib.rdfvalues import client as rdf_client
from grr_response_core.lib.rdfvalues import file_finder as rdf_file_finder
from grr_response_core.lib.rdfvalues import paths as rdf_paths
from grr_response_core.lib.rdfvalues import structs as rdf_structs
from grr_response_core.lib.util import temp
+from grr_response_proto import knowledge_base_pb2
from grr_response_server import data_store
from grr_response_server import file_store
from grr_response_server import flow_responses
@@ -224,8 +224,8 @@ def testRecursiveListDirectoryDeep(self):
def testGlob(self):
"""Test that glob works properly."""
users = [
- rdf_client.User(username="test"),
- rdf_client.User(username="syslog")
+ knowledge_base_pb2.User(username="test"),
+ knowledge_base_pb2.User(username="syslog"),
]
client_id = self.SetupClient(0, users=users)
@@ -233,8 +233,8 @@ def testGlob(self):
# This glob selects all files which start with the username on this system.
paths = [
- os.path.join(self.base_path, "%%Users.username%%*"),
- os.path.join(self.base_path, "VFSFixture/var/*/wtmp")
+ os.path.join(self.base_path, "%%users.username%%*"),
+ os.path.join(self.base_path, "VFSFixture/var/*/wtmp"),
]
flow_test_lib.TestFlowHelper(
@@ -279,8 +279,8 @@ def _RunGlob(self, paths):
def testGlobWithStarStarRootPath(self):
"""Test ** expressions with root_path."""
users = [
- rdf_client.User(username="test"),
- rdf_client.User(username="syslog")
+ knowledge_base_pb2.User(username="test"),
+ knowledge_base_pb2.User(username="syslog"),
]
self.client_id = self.SetupClient(0, users=users)
@@ -516,9 +516,9 @@ def testGlobWildcardOnImage(self):
def testGlobDirectory(self):
"""Test that glob expands directories."""
users = [
- rdf_client.User(username="test", appdata="test_data/index.dat"),
- rdf_client.User(username="test2", appdata="test_data/History"),
- rdf_client.User(username="test3", appdata="%%PATH%%"),
+ knowledge_base_pb2.User(username="test", appdata="test_data/index.dat"),
+ knowledge_base_pb2.User(username="test2", appdata="test_data/History"),
+ knowledge_base_pb2.User(username="test3", appdata="%%PATH%%"),
]
self.client_id = self.SetupClient(0, users=users)
@@ -580,9 +580,8 @@ def testIllegalGlob(self):
flow_obj = data_store.REL_DB.ReadFlowObject(self.client_id, flow_id)
self.assertEqual(
flow_obj.error_message,
- "Some attributes are not part of the knowledgebase: "
- "weird_illegal_attribute")
- self.assertIn("KbInterpolationUnknownAttributesError", flow_obj.backtrace)
+ "`%%Weird_illegal_attribute%%` does not exist",
+ )
def testGlobRoundtrips(self):
"""Tests that glob doesn't use too many client round trips."""
diff --git a/grr/server/grr_response_server/flows/general/osquery.py b/grr/server/grr_response_server/flows/general/osquery.py
index 269083a6d9..d38b5b89ca 100644
--- a/grr/server/grr_response_server/flows/general/osquery.py
+++ b/grr/server/grr_response_server/flows/general/osquery.py
@@ -258,6 +258,7 @@ def Process(
) -> None:
if not responses.success:
status = responses.status
+ assert status is not None, "Failed response status must be set."
message = f"{status.error_message}: {status.backtrace}"
self._UpdateProgressWithError(message)
diff --git a/grr/server/grr_response_server/flows/general/registry_test.py b/grr/server/grr_response_server/flows/general/registry_test.py
index cea5e8bf60..bdfb647ea7 100644
--- a/grr/server/grr_response_server/flows/general/registry_test.py
+++ b/grr/server/grr_response_server/flows/general/registry_test.py
@@ -6,10 +6,10 @@
from absl import app
from grr_response_core.lib import rdfvalue
-from grr_response_core.lib.rdfvalues import client as rdf_client
from grr_response_core.lib.rdfvalues import file_finder as rdf_file_finder
from grr_response_core.lib.rdfvalues import mig_client
from grr_response_core.lib.rdfvalues import paths as rdf_paths
+from grr_response_proto import knowledge_base_pb2
from grr_response_server import artifact
from grr_response_server import data_store
from grr_response_server.flows.general import registry
@@ -17,7 +17,6 @@
from grr_response_server.flows.general import transfer
from grr.test_lib import action_mocks
from grr.test_lib import flow_test_lib
-from grr.test_lib import parser_test_lib
from grr.test_lib import test_lib
from grr.test_lib import vfs_test_lib
@@ -99,7 +98,7 @@ def testFindsKeysWithTwoGlobsWithoutConditions(self):
self.assertCountEqual(basenames, ["Sidebar", "MctAdmin"])
def testFindsKeyWithInterpolatedGlobWithoutConditions(self):
- user = rdf_client.User(sid="S-1-5-20")
+ user = knowledge_base_pb2.User(username="foo", sid="S-1-5-20")
client_id = self.SetupClient(0, users=[user])
session_id = self.RunFlow(client_id, [
@@ -322,7 +321,6 @@ def testSizeCondition(self):
class TestRegistryFlows(RegistryFlowTest):
"""Test the Run Key registry flows."""
- @parser_test_lib.WithAllParsers
def testCollectRunKeyBinaries(self):
"""Read Run key from the client_fixtures to test parsing and storage."""
client_id = self.SetupClient(0, system="Windows", os_version="6.2")
diff --git a/grr/server/grr_response_server/flows/general/software_test.py b/grr/server/grr_response_server/flows/general/software_test.py
index b1ac42cd9e..52723dca23 100644
--- a/grr/server/grr_response_server/flows/general/software_test.py
+++ b/grr/server/grr_response_server/flows/general/software_test.py
@@ -539,7 +539,7 @@ def WmiQuery(
def testMacos(self) -> None:
assert data_store.REL_DB is not None
- db = abstract_db.Database = data_store.REL_DB
+ db: abstract_db.Database = data_store.REL_DB
creator = db_test_utils.InitializeUser(db)
client_id = db_test_utils.InitializeClient(db)
diff --git a/grr/server/grr_response_server/flows/general/webhistory.py b/grr/server/grr_response_server/flows/general/webhistory.py
index 9b974be890..ae4ddecddd 100644
--- a/grr/server/grr_response_server/flows/general/webhistory.py
+++ b/grr/server/grr_response_server/flows/general/webhistory.py
@@ -122,7 +122,6 @@ def Start(self):
flow_id = self.CallFlow(
collectors.ArtifactCollectorFlow.__name__,
artifact_list=self.BROWSER_TO_ARTIFACTS_MAP[browser],
- apply_parsers=False,
request_data={"browser": browser},
next_state=self.ProcessArtifactResponses.__name__)
self.state.progress.browsers.append(
diff --git a/grr/server/grr_response_server/gui/api_call_router_with_approval_checks.py b/grr/server/grr_response_server/gui/api_call_router_with_approval_checks.py
index 429573ef19..2c7838d209 100644
--- a/grr/server/grr_response_server/gui/api_call_router_with_approval_checks.py
+++ b/grr/server/grr_response_server/gui/api_call_router_with_approval_checks.py
@@ -10,6 +10,7 @@
from grr_response_core.stats import metrics
from grr_response_proto import api_call_router_pb2
from grr_response_proto import objects_pb2
+from grr_response_proto.api import user_pb2 as api_user_pb2
from grr_response_server import access_control
from grr_response_server import data_store
from grr_response_server.authorization import groups
@@ -84,15 +85,15 @@ def _CheckAccess(self, username, subject_id, approval_type):
except KeyError:
APPROVAL_SEARCHES.Increment(fields=["-", "reldb"])
- proto_approvals = data_store.REL_DB.ReadApprovalRequests(
+ approvals = data_store.REL_DB.ReadApprovalRequests(
username, approval_type, subject_id=subject_id, include_expired=False
)
- approvals = [mig_objects.ToRDFApprovalRequest(r) for r in proto_approvals]
errors = []
for approval in approvals:
try:
approval_checks.CheckApprovalRequest(approval)
+ approval = mig_objects.ToRDFApprovalRequest(approval)
self.acl_cache.Put(cache_key, approval)
return approval
except access_control.UnauthorizedAccess as e:
@@ -926,7 +927,25 @@ def ListAndResetUserNotifications(self, args, context=None):
def GetGrrUser(self, args, context=None):
# Everybody can get their own user settings.
- interface_traits = api_user.ApiGrrUserInterfaceTraits().EnableAll()
+ interface_traits = api_user_pb2.ApiGrrUserInterfaceTraits(
+ cron_jobs_nav_item_enabled=True,
+ create_cron_job_action_enabled=True,
+ hunt_manager_nav_item_enabled=True,
+ create_hunt_action_enabled=True,
+ show_statistics_nav_item_enabled=True,
+ server_load_nav_item_enabled=True,
+ manage_binaries_nav_item_enabled=True,
+ upload_binary_action_enabled=True,
+ settings_nav_item_enabled=True,
+ artifact_manager_nav_item_enabled=True,
+ upload_artifact_action_enabled=True,
+ search_clients_action_enabled=True,
+ browse_virtual_file_system_nav_item_enabled=True,
+ start_client_flow_nav_item_enabled=True,
+ manage_client_flows_nav_item_enabled=True,
+ modify_client_labels_action_enabled=True,
+ hunt_approval_required=True,
+ )
try:
# Without access to restricted flows, one can not launch Python hacks and
# binaries. Hence, we don't display the "Manage binaries" page.
diff --git a/grr/server/grr_response_server/gui/api_call_router_with_approval_checks_test.py b/grr/server/grr_response_server/gui/api_call_router_with_approval_checks_test.py
index e5aee27635..ebe0d35d60 100644
--- a/grr/server/grr_response_server/gui/api_call_router_with_approval_checks_test.py
+++ b/grr/server/grr_response_server/gui/api_call_router_with_approval_checks_test.py
@@ -6,6 +6,7 @@
from absl import app
from grr_response_proto import objects_pb2
+from grr_response_proto.api import user_pb2 as api_user_pb2
from grr_response_server import access_control
from grr_response_server import data_store
from grr_response_server import flow
@@ -20,7 +21,6 @@
from grr_response_server.gui.api_plugins import hunt as api_hunt
from grr_response_server.gui.api_plugins import osquery as api_osquery
from grr_response_server.gui.api_plugins import timeline as api_timeline
-from grr_response_server.gui.api_plugins import user as api_user
from grr_response_server.gui.api_plugins import vfs as api_vfs
from grr.test_lib import flow_test_lib
from grr.test_lib import hunt_test_lib
@@ -628,7 +628,25 @@ def testGetGrrUserReturnsFullTraitsForWhenWithRestrictedFlowsAccess(self):
self.assertEqual(
handler.interface_traits,
- api_user.ApiGrrUserInterfaceTraits().EnableAll(),
+ api_user_pb2.ApiGrrUserInterfaceTraits(
+ cron_jobs_nav_item_enabled=True,
+ create_cron_job_action_enabled=True,
+ hunt_manager_nav_item_enabled=True,
+ create_hunt_action_enabled=True,
+ show_statistics_nav_item_enabled=True,
+ server_load_nav_item_enabled=True,
+ manage_binaries_nav_item_enabled=True,
+ upload_binary_action_enabled=True,
+ settings_nav_item_enabled=True,
+ artifact_manager_nav_item_enabled=True,
+ upload_artifact_action_enabled=True,
+ search_clients_action_enabled=True,
+ browse_virtual_file_system_nav_item_enabled=True,
+ start_client_flow_nav_item_enabled=True,
+ manage_client_flows_nav_item_enabled=True,
+ modify_client_labels_action_enabled=True,
+ hunt_approval_required=True,
+ ),
)
def testGetGrrUserReturnsRestrictedTraitsWhenWithoutRestrictedFlowsAccess(
@@ -642,7 +660,25 @@ def testGetGrrUserReturnsRestrictedTraitsWhenWithoutRestrictedFlowsAccess(
self.assertNotEqual(
handler.interface_traits,
- api_user.ApiGrrUserInterfaceTraits().EnableAll(),
+ api_user_pb2.ApiGrrUserInterfaceTraits(
+ cron_jobs_nav_item_enabled=True,
+ create_cron_job_action_enabled=True,
+ hunt_manager_nav_item_enabled=True,
+ create_hunt_action_enabled=True,
+ show_statistics_nav_item_enabled=True,
+ server_load_nav_item_enabled=True,
+ manage_binaries_nav_item_enabled=True,
+ upload_binary_action_enabled=True,
+ settings_nav_item_enabled=True,
+ artifact_manager_nav_item_enabled=True,
+ upload_artifact_action_enabled=True,
+ search_clients_action_enabled=True,
+ browse_virtual_file_system_nav_item_enabled=True,
+ start_client_flow_nav_item_enabled=True,
+ manage_client_flows_nav_item_enabled=True,
+ modify_client_labels_action_enabled=True,
+ hunt_approval_required=True,
+ ),
)
def testAllOtherMethodsAreNotAccessChecked(self):
diff --git a/grr/server/grr_response_server/gui/api_call_router_without_checks.py b/grr/server/grr_response_server/gui/api_call_router_without_checks.py
index c8c10c177d..2d7a709568 100644
--- a/grr/server/grr_response_server/gui/api_call_router_without_checks.py
+++ b/grr/server/grr_response_server/gui/api_call_router_without_checks.py
@@ -3,6 +3,7 @@
from typing import Optional
+from grr_response_proto.api import user_pb2 as api_user_pb2
from grr_response_server.gui import api_call_context
from grr_response_server.gui import api_call_router
from grr_response_server.gui.api_plugins import artifact as api_artifact
@@ -437,8 +438,25 @@ def ListAndResetUserNotifications(self, args, context=None):
return api_user.ApiListAndResetUserNotificationsHandler()
def GetGrrUser(self, args, context=None):
- interface_traits = api_user.ApiGrrUserInterfaceTraits().EnableAll()
- interface_traits.hunt_approval_required = False
+ interface_traits = api_user_pb2.ApiGrrUserInterfaceTraits(
+ cron_jobs_nav_item_enabled=True,
+ create_cron_job_action_enabled=True,
+ hunt_manager_nav_item_enabled=True,
+ create_hunt_action_enabled=True,
+ show_statistics_nav_item_enabled=True,
+ server_load_nav_item_enabled=True,
+ manage_binaries_nav_item_enabled=True,
+ upload_binary_action_enabled=True,
+ settings_nav_item_enabled=True,
+ artifact_manager_nav_item_enabled=True,
+ upload_artifact_action_enabled=True,
+ search_clients_action_enabled=True,
+ browse_virtual_file_system_nav_item_enabled=True,
+ start_client_flow_nav_item_enabled=True,
+ manage_client_flows_nav_item_enabled=True,
+ modify_client_labels_action_enabled=True,
+ hunt_approval_required=False,
+ )
return api_user.ApiGetOwnGrrUserHandler(interface_traits=interface_traits)
def UpdateGrrUser(self, args, context=None):
diff --git a/grr/server/grr_response_server/gui/api_integration_tests/approvals_test.py b/grr/server/grr_response_server/gui/api_integration_tests/approvals_test.py
index 48003f1009..9779ee59f3 100644
--- a/grr/server/grr_response_server/gui/api_integration_tests/approvals_test.py
+++ b/grr/server/grr_response_server/gui/api_integration_tests/approvals_test.py
@@ -8,7 +8,6 @@
from absl import app
from grr_response_core import config
-from grr_response_core.lib import rdfvalue
from grr_response_server.gui import api_auth_manager
from grr_response_server.gui import api_call_router_with_approval_checks
from grr_response_server.gui import api_integration_test_lib
@@ -38,14 +37,14 @@ def setUp(self):
api_auth_manager.InitializeApiAuthManager()
def testCreateClientApproval(self):
- with mock.patch.object(rdfvalue.RDFDatetime, "Now") as mock_now:
- oneday_s = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(24 * 60 * 60)
+ with mock.patch.object(time, "time") as mock_now:
+ oneday_s = 24 * 60 * 60
mock_now.return_value = oneday_s # 'Now' is 1 day past epoch
# 'Now' is one day past epoch, plus default expiration duration
twentyninedays_us = (
config.CONFIG["ACL.token_expiry"] * 1000000
- ) + oneday_s.AsMicrosecondsSinceEpoch()
+ ) + oneday_s * 1e6
client_id = self.SetupClient(0)
self.CreateUser("foo")
@@ -64,11 +63,9 @@ def testCreateClientApproval(self):
def testCreateClientApprovalNonDefaultExpiration(self):
"""Tests requesting approval with a non-default expiration duration."""
- with mock.patch.object(rdfvalue.RDFDatetime, "Now") as mock_now:
- mock_now.return_value = ( # 'Now' is 1 day past epoch
- rdfvalue.RDFDatetime.FromSecondsSinceEpoch(24 * 60 * 60)
- )
- # 'Now' is one day past epoch, plus 120 days
+ with mock.patch.object(time, "time") as mock_now:
+ mock_now.return_value = 24 * 60 * 60 # 'time.time' is 1 day past epoch
+
onetwentydays = 120
onetwentyonedays_us = 121 * 24 * 60 * 60 * 1000000
diff --git a/grr/server/grr_response_server/gui/api_integration_tests/hunt_test.py b/grr/server/grr_response_server/gui/api_integration_tests/hunt_test.py
index 4e75ccac5e..3ad8aae466 100644
--- a/grr/server/grr_response_server/gui/api_integration_tests/hunt_test.py
+++ b/grr/server/grr_response_server/gui/api_integration_tests/hunt_test.py
@@ -12,10 +12,12 @@
from grr_response_core.lib.rdfvalues import timeline as rdf_timeline
from grr_response_core.lib.util import chunked
from grr_response_proto import flows_pb2
+from grr_response_proto import hunts_pb2
from grr_response_proto import jobs_pb2
from grr_response_proto import objects_pb2
+from grr_response_proto import timeline_pb2
from grr_response_proto.api import hunt_pb2
-from grr_response_proto.api import timeline_pb2
+from grr_response_proto.api import timeline_pb2 as api_timeline_pb2
from grr_response_server import data_store
from grr_response_server import flow
from grr_response_server.databases import db
@@ -24,10 +26,6 @@
from grr_response_server.flows.general import timeline
from grr_response_server.gui import api_integration_test_lib
from grr_response_server.output_plugins import csv_plugin
-from grr_response_server.rdfvalues import flow_objects as rdf_flow_objects
-from grr_response_server.rdfvalues import hunt_objects as rdf_hunt_objects
-from grr_response_server.rdfvalues import mig_flow_objects
-from grr_response_server.rdfvalues import mig_hunt_objects
from grr.test_lib import action_mocks
from grr.test_lib import flow_test_lib
from grr.test_lib import hunt_test_lib
@@ -281,21 +279,20 @@ def testGetCollectedTimelinesBody(self):
hunt_id = "B1C2E3D4"
- hunt_obj = rdf_hunt_objects.Hunt()
+ hunt_obj = hunts_pb2.Hunt()
hunt_obj.hunt_id = hunt_id
hunt_obj.args.standard.flow_name = timeline.TimelineFlow.__name__
- hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED
- hunt_obj = mig_hunt_objects.ToProtoHunt(hunt_obj)
+ hunt_obj.hunt_state = hunts_pb2.Hunt.HuntState.PAUSED
data_store.REL_DB.WriteHuntObject(hunt_obj)
- flow_obj = rdf_flow_objects.Flow()
+ flow_obj = flows_pb2.Flow()
flow_obj.client_id = client_id
flow_obj.flow_id = hunt_id
flow_obj.flow_class_name = timeline.TimelineFlow.__name__
flow_obj.parent_hunt_id = hunt_id
- data_store.REL_DB.WriteFlowObject(mig_flow_objects.ToProtoFlow(flow_obj))
+ data_store.REL_DB.WriteFlowObject(flow_obj)
- entry_1 = rdf_timeline.TimelineEntry()
+ entry_1 = timeline_pb2.TimelineEntry()
entry_1.path = "/bar/baz/quux".encode("utf-8")
entry_1.ino = 5926273453
entry_1.size = 13373
@@ -304,7 +301,7 @@ def testGetCollectedTimelinesBody(self):
entry_1.ctime_ns = 333 * 10**9
entry_1.mode = 0o664
- entry_2 = rdf_timeline.TimelineEntry()
+ entry_2 = timeline_pb2.TimelineEntry()
entry_2.path = "/bar/baz/quuz".encode("utf-8")
entry_2.ino = 6037384564
entry_2.size = 13374
@@ -314,25 +311,23 @@ def testGetCollectedTimelinesBody(self):
entry_2.mode = 0o777
entries = [entry_1, entry_2]
- blobs = list(rdf_timeline.TimelineEntry.SerializeStream(iter(entries)))
+ blobs = list(rdf_timeline.SerializeTimelineEntryStream(entries))
blob_ids = data_store.BLOBS.WriteBlobsWithUnknownHashes(blobs)
- result = rdf_timeline.TimelineResult()
- result.entry_batch_blob_ids = list(map(bytes, blob_ids))
+ result = timeline_pb2.TimelineResult()
+ result.entry_batch_blob_ids.extend(list(map(bytes, blob_ids)))
- flow_result = rdf_flow_objects.FlowResult()
+ flow_result = flows_pb2.FlowResult()
flow_result.client_id = client_id
flow_result.flow_id = hunt_id
flow_result.hunt_id = hunt_id
- flow_result.payload = result
+ flow_result.payload.Pack(result)
- data_store.REL_DB.WriteFlowResults(
- [mig_flow_objects.ToProtoFlowResult(flow_result)]
- )
+ data_store.REL_DB.WriteFlowResults([flow_result])
buffer = io.BytesIO()
self.api.Hunt(hunt_id).GetCollectedTimelines(
- timeline_pb2.ApiGetCollectedTimelineArgs.Format.BODY
+ api_timeline_pb2.ApiGetCollectedTimelineArgs.Format.BODY
).WriteToStream(buffer)
with zipfile.ZipFile(buffer, mode="r") as archive:
@@ -369,21 +364,20 @@ def testGetCollectedTimelinesGzchunked(self):
hunt_id = "A0B1D2C3"
- hunt_obj = rdf_hunt_objects.Hunt()
+ hunt_obj = hunts_pb2.Hunt()
hunt_obj.hunt_id = hunt_id
hunt_obj.args.standard.flow_name = timeline.TimelineFlow.__name__
- hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED
- hunt_obj = mig_hunt_objects.ToProtoHunt(hunt_obj)
+ hunt_obj.hunt_state = hunts_pb2.Hunt.HuntState.PAUSED
data_store.REL_DB.WriteHuntObject(hunt_obj)
- flow_obj = rdf_flow_objects.Flow()
+ flow_obj = flows_pb2.Flow()
flow_obj.client_id = client_id
flow_obj.flow_id = hunt_id
flow_obj.flow_class_name = timeline.TimelineFlow.__name__
flow_obj.parent_hunt_id = hunt_id
- data_store.REL_DB.WriteFlowObject(mig_flow_objects.ToProtoFlow(flow_obj))
+ data_store.REL_DB.WriteFlowObject(flow_obj)
- entry_1 = rdf_timeline.TimelineEntry()
+ entry_1 = timeline_pb2.TimelineEntry()
entry_1.path = "/foo/bar".encode("utf-8")
entry_1.ino = 7890178901
entry_1.size = 4815162342
@@ -392,40 +386,38 @@ def testGetCollectedTimelinesGzchunked(self):
entry_1.ctime_ns = 567 * 10**9
entry_1.mode = 0o654
- entry_2 = rdf_timeline.TimelineEntry()
+ entry_2 = timeline_pb2.TimelineEntry()
entry_2.path = "/foo/baz".encode("utf-8")
- entry_1.ino = 8765487654
+ entry_2.ino = 8765487654
entry_2.size = 1337
- entry_1.atime_ns = 987 * 10**9
- entry_1.mtime_ns = 876 * 10**9
- entry_1.ctime_ns = 765 * 10**9
+ entry_2.atime_ns = 987 * 10**9
+ entry_2.mtime_ns = 876 * 10**9
+ entry_2.ctime_ns = 765 * 10**9
entry_2.mode = 0o757
entries = [entry_1, entry_2]
- blobs = list(rdf_timeline.TimelineEntry.SerializeStream(iter(entries)))
+ blobs = list(rdf_timeline.SerializeTimelineEntryStream(entries))
blob_ids = data_store.BLOBS.WriteBlobsWithUnknownHashes(blobs)
- result = rdf_timeline.TimelineResult()
- result.entry_batch_blob_ids = list(map(bytes, blob_ids))
+ result = timeline_pb2.TimelineResult()
+ result.entry_batch_blob_ids.extend(list(map(bytes, blob_ids)))
- flow_result = rdf_flow_objects.FlowResult()
+ flow_result = flows_pb2.FlowResult()
flow_result.client_id = client_id
flow_result.flow_id = hunt_id
- flow_result.payload = result
+ flow_result.payload.Pack(result)
- data_store.REL_DB.WriteFlowResults(
- [mig_flow_objects.ToProtoFlowResult(flow_result)]
- )
+ data_store.REL_DB.WriteFlowResults([flow_result])
buffer = io.BytesIO()
- fmt = timeline_pb2.ApiGetCollectedTimelineArgs.Format.RAW_GZCHUNKED
+ fmt = api_timeline_pb2.ApiGetCollectedTimelineArgs.Format.RAW_GZCHUNKED
self.api.Hunt(hunt_id).GetCollectedTimelines(fmt).WriteToStream(buffer)
with zipfile.ZipFile(buffer, mode="r") as archive:
with archive.open(f"{client_id}_{fqdn}.gzchunked", mode="r") as file:
chunks = chunked.ReadAll(file)
- entries = list(rdf_timeline.TimelineEntry.DeserializeStream(chunks))
+ entries = list(rdf_timeline.DeserializeTimelineEntryStream(chunks))
self.assertEqual(entries, [entry_1, entry_2])
def testCreatePerClientFileCollectionHunt(self):
diff --git a/grr/server/grr_response_server/gui/api_integration_tests/root_api_user_management_test.py b/grr/server/grr_response_server/gui/api_integration_tests/root_api_user_management_test.py
index 103b342848..44682c970b 100644
--- a/grr/server/grr_response_server/gui/api_integration_tests/root_api_user_management_test.py
+++ b/grr/server/grr_response_server/gui/api_integration_tests/root_api_user_management_test.py
@@ -1,13 +1,16 @@
#!/usr/bin/env python
"""Tests for root API user management calls."""
+from typing import Optional
+
from absl import app
from grr_api_client import errors as grr_api_errors
from grr_api_client import root as grr_api_root
+from grr_response_core.lib.rdfvalues import crypto as rdf_crypto
+from grr_response_proto import jobs_pb2
from grr_response_server import data_store
from grr_response_server.gui import api_integration_test_lib
-from grr_response_server.rdfvalues import mig_objects
from grr.test_lib import test_lib
@@ -16,10 +19,9 @@ class RootApiUserManagementTest(
):
"""E2E test for root API user management calls."""
- def _GetPassword(self, username):
- proto_user = data_store.REL_DB.ReadGRRUser(username)
- rdf_user = mig_objects.ToRDFGRRUser(proto_user)
- return rdf_user.password if rdf_user.HasField("password") else None
+ def _GetPassword(self, username: str) -> Optional[jobs_pb2.Password]:
+ user = data_store.REL_DB.ReadGRRUser(username)
+ return user.password if user.HasField("password") else None
def testStandardUserIsCorrectlyAdded(self):
user = self.api.root.CreateGrrUser(username="user_foo")
@@ -44,7 +46,7 @@ def testStandardUserWithPasswordIsCorrectlyAdded(self):
self.assertEqual(user.data.user_type, user.USER_TYPE_STANDARD)
password = self._GetPassword("user_foo")
- self.assertTrue(password.CheckPassword("blah"))
+ self.assertTrue(rdf_crypto.CheckPassword(password, "blah"))
def testUserModificationWorksCorrectly(self):
user = self.api.root.CreateGrrUser(username="user_foo")
@@ -60,12 +62,12 @@ def testUserPasswordCanBeModified(self):
user = self.api.root.CreateGrrUser(username="user_foo", password="blah")
password = self._GetPassword("user_foo")
- self.assertTrue(password.CheckPassword("blah"))
+ self.assertTrue(rdf_crypto.CheckPassword(password, "blah"))
user.Modify(password="ohno")
password = self._GetPassword("user_foo")
- self.assertTrue(password.CheckPassword("ohno"))
+ self.assertTrue(rdf_crypto.CheckPassword(password, "ohno"))
def testUsersAreCorrectlyListed(self):
for i in range(10):
diff --git a/grr/server/grr_response_server/gui/api_integration_tests/timeline_test.py b/grr/server/grr_response_server/gui/api_integration_tests/timeline_test.py
index 5b876aab3b..2a4b2d4c1a 100644
--- a/grr/server/grr_response_server/gui/api_integration_tests/timeline_test.py
+++ b/grr/server/grr_response_server/gui/api_integration_tests/timeline_test.py
@@ -5,7 +5,7 @@
from absl import app
-from grr_response_core.lib.rdfvalues import timeline as rdf_timeline
+from grr_response_proto import timeline_pb2
from grr_response_server import data_store
from grr_response_server.databases import db_test_utils
from grr_response_server.gui import api_integration_test_lib
@@ -16,7 +16,7 @@
class TimelineTest(api_integration_test_lib.ApiIntegrationTest):
def testGetCollectedTimelineBody(self):
- entry = rdf_timeline.TimelineEntry()
+ entry = timeline_pb2.TimelineEntry()
entry.path = "/foo/bar/baz".encode("utf-8")
client_id = db_test_utils.InitializeClient(data_store.REL_DB)
@@ -31,7 +31,7 @@ def testGetCollectedTimelineBody(self):
self.assertIn("|/foo/bar/baz|", content)
def testGetCollectedTimelineBodyBackslashEscape(self):
- entry = rdf_timeline.TimelineEntry()
+ entry = timeline_pb2.TimelineEntry()
entry.path = "C:\\Windows\\system32\\notepad.exe".encode("utf-8")
client_id = db_test_utils.InitializeClient(data_store.REL_DB)
@@ -46,7 +46,7 @@ def testGetCollectedTimelineBodyBackslashEscape(self):
self.assertIn("|C:\\\\Windows\\\\system32\\\\notepad.exe|", content)
def testGetCollectedTimelineBodyCarriageReturnEscape(self):
- entry = rdf_timeline.TimelineEntry()
+ entry = timeline_pb2.TimelineEntry()
entry.path = "/foo\rbar/baz\r\r\rquux".encode("utf-8")
client_id = db_test_utils.InitializeClient(data_store.REL_DB)
@@ -62,7 +62,7 @@ def testGetCollectedTimelineBodyCarriageReturnEscape(self):
self.assertIn("|/foo\\rbar/baz\\r\\r\\rquux|", content)
def testGetCollectedTimelineBodyNonPrintableEscape(self):
- entry = rdf_timeline.TimelineEntry()
+ entry = timeline_pb2.TimelineEntry()
entry.path = b"/f\x00b\x0ar\x1baz"
client_id = db_test_utils.InitializeClient(data_store.REL_DB)
diff --git a/grr/server/grr_response_server/gui/api_plugins/artifact.py b/grr/server/grr_response_server/gui/api_plugins/artifact.py
index 3338432ac6..335fea9c37 100644
--- a/grr/server/grr_response_server/gui/api_plugins/artifact.py
+++ b/grr/server/grr_response_server/gui/api_plugins/artifact.py
@@ -3,7 +3,6 @@
from typing import Optional
-from grr_response_core.lib import parsers
from grr_response_core.lib.rdfvalues import artifacts as rdf_artifacts
from grr_response_core.lib.rdfvalues import mig_artifacts
from grr_response_core.lib.rdfvalues import structs as rdf_structs
@@ -55,16 +54,6 @@ def BuildArtifactDescriptors(
),
)
- # TODO: Remove this once there are no more parsers.
- factory = parsers.ArtifactParserFactory(str(proto_artifact.name))
- for parser_cls in factory.AllParserTypes():
- rdf_desc = rdf_artifacts.ArtifactProcessorDescriptor.FromParser(
- parser_cls
- )
- descriptor.processors.append(
- mig_artifacts.ToProtoArtifactProcessorDescriptor(rdf_desc)
- )
-
result.append(descriptor)
return result
@@ -105,8 +94,13 @@ class ApiUploadArtifactHandler(api_call_handler_base.ApiCallHandler):
"""Handles artifact upload."""
args_type = ApiUploadArtifactArgs
+ proto_args_type = api_artifact_pb2.ApiUploadArtifactArgs
- def Handle(self, args, context=None):
+ def Handle(
+ self,
+ args: api_artifact_pb2.ApiUploadArtifactArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> None:
artifact.UploadArtifactYamlFile(
args.artifact, overwrite=True, overwrite_system_artifacts=False
)
@@ -120,6 +114,11 @@ class ApiDeleteArtifactsHandler(api_call_handler_base.ApiCallHandler):
"""Handles artifact deletion."""
args_type = ApiDeleteArtifactsArgs
+ proto_args_type = api_artifact_pb2.ApiDeleteArtifactsArgs
- def Handle(self, args, context=None):
+ def Handle(
+ self,
+ args: api_artifact_pb2.ApiDeleteArtifactsArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> None:
artifact_registry.DeleteArtifactsFromDatastore(set(args.names))
diff --git a/grr/server/grr_response_server/gui/api_plugins/artifact_test.py b/grr/server/grr_response_server/gui/api_plugins/artifact_test.py
index 88ec47ac4c..4fb41159e2 100644
--- a/grr/server/grr_response_server/gui/api_plugins/artifact_test.py
+++ b/grr/server/grr_response_server/gui/api_plugins/artifact_test.py
@@ -79,7 +79,9 @@ def testUpload(self, registry):
config.CONFIG["Test.data_dir"], "artifacts", "test_artifact.json"
)
with open(test_artifacts_file, "rb") as fd:
- args = self.handler.args_type(artifact=fd.read())
+ args = api_artifact_pb2.ApiUploadArtifactArgs(
+ artifact=fd.read().decode("utf-8")
+ )
with self.assertRaises(rdf_artifacts.ArtifactNotRegisteredError):
registry.GetArtifact("TestDrivers")
@@ -108,7 +110,7 @@ def testDeletesArtifactsWithSpecifiedNames(self, registry):
self.UploadTestArtifacts()
count = len(registry.GetArtifacts(reload_datastore_artifacts=True))
- args = self.handler.args_type(
+ args = api_artifact_pb2.ApiDeleteArtifactsArgs(
names=["TestFilesArtifact", "WMIActiveScriptEventConsumer"]
)
self.handler.Handle(args, context=self.context)
@@ -120,13 +122,17 @@ def testDeletesArtifactsWithSpecifiedNames(self, registry):
def testDeleteDependency(self, registry):
self.UploadTestArtifacts()
- args = self.handler.args_type(names=["TestAggregationArtifact"])
+ args = api_artifact_pb2.ApiDeleteArtifactsArgs(
+ names=["TestAggregationArtifact"]
+ )
with self.assertRaises(ValueError):
self.handler.Handle(args, context=self.context)
def testDeleteNonExistentArtifact(self, registry):
self.UploadTestArtifacts()
- args = self.handler.args_type(names=["NonExistentArtifact"])
+ args = api_artifact_pb2.ApiDeleteArtifactsArgs(
+ names=["NonExistentArtifact"]
+ )
e = self.assertRaises(ValueError)
with e:
self.handler.Handle(args, context=self.context)
diff --git a/grr/server/grr_response_server/gui/api_plugins/client.py b/grr/server/grr_response_server/gui/api_plugins/client.py
index fd798754ae..5f2522b49e 100644
--- a/grr/server/grr_response_server/gui/api_plugins/client.py
+++ b/grr/server/grr_response_server/gui/api_plugins/client.py
@@ -111,117 +111,6 @@ class ApiClient(rdf_structs.RDFProtoStruct):
rdf_client_fs.Volume,
]
- def InitFromClientObject(
- self, client_obj: rdf_objects.ClientSnapshot
- ) -> "ApiClient":
-
- # TODO(amoser): Deprecate all urns.
- self.urn = client_obj.client_id
-
- self.client_id = client_obj.client_id
-
- if client_obj.metadata and client_obj.metadata.source_flow_id:
- self.source_flow_id = client_obj.metadata.source_flow_id
-
- self.agent_info = client_obj.startup_info.client_info
- self.hardware_info = client_obj.hardware_info
-
- os_info = rdf_client.Uname()
- if client_obj.os_version:
- os_info.version = client_obj.os_version
- if client_obj.os_release:
- os_info.release = client_obj.os_release
- if client_obj.kernel:
- os_info.kernel = client_obj.kernel
- if client_obj.arch:
- os_info.machine = client_obj.arch
- if client_obj.install_time:
- os_info.install_date = client_obj.install_time
-
- kb = client_obj.knowledge_base
- if kb:
- self.knowledge_base = kb
- if kb.os:
- os_info.system = kb.os
- if kb.fqdn:
- os_info.fqdn = kb.fqdn
-
- # TODO(amoser): Deprecate this field in favor of the kb.
- if kb.users:
- self.users = sorted(kb.users, key=lambda user: user.username)
-
- self.os_info = os_info
-
- if client_obj.interfaces:
- self.interfaces = client_obj.interfaces
- if client_obj.volumes:
- self.volumes = client_obj.volumes
- if client_obj.cloud_instance:
- self.cloud_instance = client_obj.cloud_instance
-
- self.age = client_obj.timestamp
-
- if client_obj.memory_size:
- self.memory_size = client_obj.memory_size
- if client_obj.startup_info.boot_time:
- self.last_booted_at = client_obj.startup_info.boot_time
-
- return self
-
- def InitFromClientInfo(
- self,
- client_id: str,
- client_info: rdf_objects.ClientFullInfo,
- ) -> "ApiClient":
- self.client_id = client_id
-
- if client_info.HasField("last_snapshot"):
- # Just a sanity check to ensure that the object has correct client id.
- if client_info.last_snapshot.client_id != client_id:
- raise ValueError(
- "Invalid last snapshot client id: "
- f"{client_id} expected but "
- f"{client_info.last_snapshot.client_id} found"
- )
-
- self.InitFromClientObject(client_info.last_snapshot)
- else:
- # Every returned object should have `age` specified. If we cannot get this
- # information from the snapshot (because there is none), we just use the
- # time of the first observation of the client.
- if not client_info.last_snapshot.timestamp:
- self.age = client_info.metadata.first_seen
-
- # If we have it, use the boot_time / agent info from the startup
- # info which might be more recent than the interrogation
- # results. At some point we should have a dedicated API for
- # startup information instead of packing it into the API client
- # object.
- if client_info.last_startup_info.boot_time:
- self.last_booted_at = client_info.last_startup_info.boot_time
- if client_info.last_startup_info.client_info:
- self.agent_info = client_info.last_startup_info.client_info
-
- if client_info.HasField("last_rrg_startup"):
- version = client_info.last_rrg_startup.metadata.version
- self.rrg_version = f"{version.major}.{version.minor}.{version.patch}"
- self.rrg_args = client_info.last_rrg_startup.args
-
- md = client_info.metadata
- if md:
- if md.first_seen:
- self.first_seen_at = md.first_seen
- if md.ping:
- self.last_seen_at = md.ping
- if md.clock:
- self.last_clock = md.clock
- if md.last_crash_timestamp:
- self.last_crash_at = md.last_crash_timestamp
-
- self.labels = client_info.labels
-
- return self
-
def ObjectReference(self):
return rdf_objects.ObjectReference(
reference_type=rdf_objects.ObjectReference.Type.CLIENT,
diff --git a/grr/server/grr_response_server/gui/api_plugins/client_test.py b/grr/server/grr_response_server/gui/api_plugins/client_test.py
index 3b3f722e1a..c672e3693e 100644
--- a/grr/server/grr_response_server/gui/api_plugins/client_test.py
+++ b/grr/server/grr_response_server/gui/api_plugins/client_test.py
@@ -5,15 +5,10 @@
from unittest import mock
from absl import app
-from absl.testing import absltest
from google.protobuf import timestamp_pb2
from google.protobuf import text_format
from grr_response_core.lib import rdfvalue
-from grr_response_core.lib.rdfvalues import client as rdf_client
-from grr_response_core.lib.rdfvalues import client_fs as rdf_client_fs
-from grr_response_core.lib.rdfvalues import client_network as rdf_client_network
-from grr_response_core.lib.rdfvalues import cloud as rdf_cloud
from grr_response_core.lib.rdfvalues import test_base as rdf_test_base
from grr_response_proto import objects_pb2
from grr_response_proto.api import client_pb2
@@ -23,272 +18,11 @@
from grr_response_server import fleetspeak_utils
from grr_response_server.gui import api_test_lib
from grr_response_server.gui.api_plugins import client as client_plugin
-from grr_response_server.rdfvalues import objects as rdf_objects
from grr.test_lib import db_test_lib
from grr.test_lib import test_lib
from fleetspeak.src.server.proto.fleetspeak_server import admin_pb2
-class ApiClientTest(absltest.TestCase):
-
- def _CreateFilledOutClientSnapshot(self):
- snapshot = rdf_objects.ClientSnapshot(client_id="C.0000000000000000")
- snapshot.metadata.source_flow_id = "ABCDEF"
- client_information = rdf_client.ClientInformation(
- client_name="GRR Monitor",
- client_version="1234",
- client_description="some client description",
- build_time="1980-01-01T12:00:00.000000+00:00",
- labels=["label1", "label2"],
- )
- snapshot.startup_info.client_info = client_information
- snapshot.startup_info.boot_time = rdfvalue.RDFDatetime.FromHumanReadable(
- "2024-01-01"
- )
- hardware_info = rdf_client.HardwareInfo(
- system_manufacturer="System-Manufacturer-123",
- bios_version="Bios-Version-123",
- serial_number="123abc",
- system_uuid="a-b-c-1-2-3",
- )
- snapshot.hardware_info = hardware_info
- snapshot.os_release = "Windows"
- snapshot.os_version = "14.4"
- snapshot.kernel = "4.0.0"
- snapshot.arch = "x86_64"
- users = [
- rdf_client.User(username="fred", full_name="Ok Guy Fred"),
- rdf_client.User(username="joe", full_name="Good Guy Joe"),
- ]
- knowledge_base = rdf_client.KnowledgeBase(
- os="Linux",
- os_release="RedHat Linux",
- os_major_version=4,
- os_minor_version=2,
- fqdn="test123.examples.com",
- users=users,
- )
- snapshot.knowledge_base = knowledge_base
- interfaces = [
- rdf_client_network.Interface(
- ifname="if0",
- addresses=[
- rdf_client_network.NetworkAddress(
- human_readable_address="192.168.0.123"
- ),
- rdf_client_network.NetworkAddress(
- human_readable_address="2001:abcd::123"
- ),
- ],
- ),
- rdf_client_network.Interface(
- ifname="if1",
- mac_address=rdf_client_network.MacAddress.FromHumanReadableAddress(
- "aabbccddee%02x" % 123
- ),
- ),
- rdf_client_network.Interface(
- ifname="if2",
- mac_address=rdf_client_network.MacAddress.FromHumanReadableAddress(
- "bbccddeeff%02x" % 123
- ),
- ),
- ]
- snapshot.interfaces = interfaces
- volumes = [
- rdf_client_fs.Volume(
- windowsvolume=rdf_client_fs.WindowsVolume(drive_letter="C:"),
- bytes_per_sector=4096,
- sectors_per_allocation_unit=1,
- actual_available_allocation_units=50,
- total_allocation_units=100,
- ),
- rdf_client_fs.Volume(
- unixvolume=rdf_client_fs.UnixVolume(mount_point="/"),
- bytes_per_sector=4096,
- sectors_per_allocation_unit=1,
- actual_available_allocation_units=10,
- total_allocation_units=100,
- ),
- ]
- snapshot.volumes = volumes
- cloud_instance = rdf_cloud.CloudInstance(
- cloud_type=rdf_cloud.CloudInstance.InstanceType.GOOGLE,
- google=rdf_cloud.GoogleCloudInstance(
- unique_id="us-central1-a/myproject/1771384456894610289"
- ),
- )
- snapshot.cloud_instance = cloud_instance
- timestamp = rdfvalue.RDFDatetime.FromHumanReadable("2024-01-01")
- snapshot.timestamp = timestamp
- snapshot.edr_agents.append(rdf_client.EdrAgent(name="foo", agent_id="1337"))
- snapshot.edr_agents.append(rdf_client.EdrAgent(name="bar", agent_id="108"))
- snapshot.memory_size = 123456
-
- return snapshot
-
- def testInitFromClientObject(self):
- snapshot = self._CreateFilledOutClientSnapshot()
-
- want_client = client_plugin.ApiClient(
- client_id="C.0000000000000000",
- urn="aff4:/C.0000000000000000",
- source_flow_id="ABCDEF",
- agent_info=snapshot.startup_info.client_info,
- hardware_info=snapshot.hardware_info,
- os_info=rdf_client.Uname(
- fqdn="test123.examples.com",
- kernel="4.0.0",
- machine="x86_64",
- release="Windows",
- system="Linux",
- version="14.4",
- ),
- knowledge_base=snapshot.knowledge_base,
- cloud_instance=snapshot.cloud_instance,
- volumes=snapshot.volumes,
- age=snapshot.timestamp,
- interfaces=snapshot.interfaces,
- last_booted_at=snapshot.startup_info.boot_time,
- memory_size=snapshot.memory_size,
- users=snapshot.knowledge_base.users,
- )
-
- result = client_plugin.ApiClient().InitFromClientObject(snapshot)
-
- self.assertEqual(result, want_client)
-
- def testInitFromClientInfo_WithSnapshot(self):
- snapshot = self._CreateFilledOutClientSnapshot()
- client_info = rdf_objects.ClientFullInfo(last_snapshot=snapshot)
-
- want_client = client_plugin.ApiClient(
- client_id="C.0000000000000000",
- urn="aff4:/C.0000000000000000",
- source_flow_id="ABCDEF",
- agent_info=snapshot.startup_info.client_info,
- hardware_info=snapshot.hardware_info,
- os_info=rdf_client.Uname(
- fqdn="test123.examples.com",
- kernel="4.0.0",
- machine="x86_64",
- release="Windows",
- system="Linux",
- version="14.4",
- ),
- knowledge_base=snapshot.knowledge_base,
- cloud_instance=snapshot.cloud_instance,
- volumes=snapshot.volumes,
- age=snapshot.timestamp,
- interfaces=snapshot.interfaces,
- last_booted_at=snapshot.startup_info.boot_time,
- memory_size=snapshot.memory_size,
- users=snapshot.knowledge_base.users,
- )
-
- result = client_plugin.ApiClient().InitFromClientInfo(
- "C.0000000000000000", client_info
- )
-
- self.assertEqual(result, want_client)
-
- def testInitFromClientInfo_WithSnapshot_BadId(self):
- snapshot = self._CreateFilledOutClientSnapshot()
- client_info = rdf_objects.ClientFullInfo(last_snapshot=snapshot)
-
- with self.assertRaises(ValueError):
- client_plugin.ApiClient().InitFromClientInfo(
- "C.1111111111111111", client_info
- )
-
- def testInitFromClientInfo_WithoutSnapshot(self):
- first_seen_time = rdfvalue.RDFDatetime.FromHumanReadable("2022-01-01")
- boot_time = rdfvalue.RDFDatetime.FromHumanReadable("2022-02-02")
- ping_time = rdfvalue.RDFDatetime.FromHumanReadable("2022-03-03")
- crash_time = rdfvalue.RDFDatetime.FromHumanReadable("2022-04-04")
- metadata = rdf_objects.ClientMetadata(
- first_seen=first_seen_time,
- ping=ping_time,
- last_crash_timestamp=crash_time,
- )
- labels = [
- rdf_objects.ClientLabel(name="label3"),
- rdf_objects.ClientLabel(name="label4"),
- ]
- client_info = rdf_objects.ClientFullInfo(metadata=metadata, labels=labels)
- client_info.last_startup_info.boot_time = boot_time
- client_information = rdf_client.ClientInformation(
- client_name="GRR Monitor",
- client_version="1234",
- client_description="some client description",
- build_time="1980-01-01T12:00:00.000000+00:00",
- labels=["label1", "label2"],
- )
- client_info.last_startup_info.client_info = client_information
- client_info.last_rrg_startup.metadata.version.major = 1
- client_info.last_rrg_startup.metadata.version.minor = 2
- client_info.last_rrg_startup.metadata.version.patch = 3
- rrg_args = "some args --were passed"
- client_info.last_rrg_startup.args = rrg_args
-
- want_client = client_plugin.ApiClient(
- client_id="C.0000000000000000",
- agent_info=client_information,
- age=first_seen_time,
- first_seen_at=first_seen_time,
- last_booted_at=boot_time,
- last_seen_at=ping_time,
- last_crash_at=crash_time,
- labels=labels,
- rrg_args=rrg_args,
- rrg_version="1.2.3",
- )
-
- result = client_plugin.ApiClient().InitFromClientInfo(
- "C.0000000000000000", client_info
- )
-
- self.assertEqual(result, want_client)
-
- def testInitFromClientInfoAgeWithSnapshot(self):
- first_seen_time = rdfvalue.RDFDatetime.FromHumanReadable("2022-01-01")
- last_snapshot_time = rdfvalue.RDFDatetime.FromHumanReadable("2022-02-02")
-
- info = rdf_objects.ClientFullInfo()
- info.metadata.first_seen = first_seen_time
- info.last_snapshot.client_id = "C.1122334455667788"
- info.last_snapshot.timestamp = last_snapshot_time
-
- client = client_plugin.ApiClient()
- client.InitFromClientInfo("C.1122334455667788", info)
-
- self.assertEqual(client.age, last_snapshot_time)
-
- def testInitFromClientInfoWithoutSnapshot(self):
- first_seen_time = rdfvalue.RDFDatetime.FromHumanReadable("2022-01-01")
-
- info = rdf_objects.ClientFullInfo()
- info.metadata.first_seen = first_seen_time
-
- client = client_plugin.ApiClient()
- client.InitFromClientInfo("C.1122334455667788", info)
-
- self.assertEqual(client.age, first_seen_time)
-
- def testInitFromClientInfoRRG(self):
- info = rdf_objects.ClientFullInfo()
- info.last_rrg_startup.args = ["--foo", "--bar", "--baz"]
- info.last_rrg_startup.metadata.version.major = 1
- info.last_rrg_startup.metadata.version.minor = 2
- info.last_rrg_startup.metadata.version.patch = 3
-
- api_client = client_plugin.ApiClient()
- api_client.InitFromClientInfo("C.0123456789ABCDEF", info)
-
- self.assertEqual(api_client.rrg_version, "1.2.3")
- self.assertEqual(api_client.rrg_args, ["--foo", "--bar", "--baz"])
-
-
class ApiClientIdTest(rdf_test_base.RDFValueTestMixin, test_lib.GRRBaseTest):
"""Test for ApiClientId."""
diff --git a/grr/server/grr_response_server/gui/api_plugins/config.py b/grr/server/grr_response_server/gui/api_plugins/config.py
index 2beb712e03..cfb4778bfa 100644
--- a/grr/server/grr_response_server/gui/api_plugins/config.py
+++ b/grr/server/grr_response_server/gui/api_plugins/config.py
@@ -11,9 +11,9 @@
from grr_response_core.lib.rdfvalues import crypto as rdf_crypto
from grr_response_core.lib.rdfvalues import structs as rdf_structs
from grr_response_proto.api import config_pb2
+from grr_response_server import foreman_rules
from grr_response_server import signed_binary_utils
from grr_response_server.gui import api_call_handler_base
-from grr_response_server.gui import api_call_handler_utils
from grr_response_server.rdfvalues import hunts as rdf_hunts
# TODO(user): sensitivity of config options and sections should
@@ -31,6 +31,31 @@
REDACTED_SECTIONS = ["PrivateKeys", "Users"]
+def _IsSupportedValueType(value: any) -> bool:
+ """Returns whether the given config value type is supported in the UI.
+
+ Args:
+ value: value to validate.
+
+ Returns:
+ True if the value is supported in the UI, False otherwise.
+ """
+ if isinstance(value, float) and not value.is_integer():
+ return False
+ elif rdfvalue.RDFInteger.IsNumeric(value):
+ return True
+ elif isinstance(value, str):
+ return True
+ elif isinstance(value, bytes):
+ return True
+ elif isinstance(value, bool):
+ return True
+ elif isinstance(value, rdfvalue.RDFValue):
+ return True
+ else:
+ return False
+
+
class ApiConfigOption(rdf_structs.RDFProtoStruct):
protobuf = config_pb2.ApiConfigOption
@@ -58,17 +83,26 @@ def InitFromConfigOption(self, name):
return self
if config_value is not None:
- # TODO(user): this is a bit of a hack as we're reusing the logic
- # from ApiDataObjectKeyValuePair. We should probably abstract this
- # away into a separate function, so that we don't have to create
- # an ApiDataObjectKeyValuePair object.
- kv_pair = api_call_handler_utils.ApiDataObjectKeyValuePair()
- kv_pair.InitFromKeyValue(name, config_value)
-
- self.is_invalid = kv_pair.invalid
- if not self.is_invalid:
- self.type = kv_pair.type
- self.value = kv_pair.value
+ self.is_invalid = not _IsSupportedValueType(config_value)
+
+ if self.is_invalid:
+ return self
+
+ if rdfvalue.RDFInteger.IsNumeric(config_value):
+ self.type = rdfvalue.RDFInteger.__name__
+ self.value = rdfvalue.RDFInteger(config_value)
+ elif isinstance(config_value, str):
+ self.type = rdfvalue.RDFString.__name__
+ self.value = rdfvalue.RDFString(config_value)
+ elif isinstance(config_value, bytes):
+ self.type = rdfvalue.RDFBytes.__name__
+ self.value = rdfvalue.RDFBytes(config_value)
+ elif isinstance(config_value, bool):
+ self.type = "bool"
+ self.value = config_value
+ elif isinstance(config_value, rdfvalue.RDFValue):
+ self.type = config_value.__class__.__name__
+ self.value = config_value
return self
@@ -281,6 +315,7 @@ class ApiUiConfig(rdf_structs.RDFProtoStruct):
rdf_deps = [
rdf_hunts.HuntRunnerArgs,
rdf_config.AdminUIClientWarningsConfigOption,
+ rdf_config.AdminUIHuntConfig,
]
@@ -292,13 +327,45 @@ class ApiGetUiConfigHandler(api_call_handler_base.ApiCallHandler):
def Handle(self, args, context=None):
del args, context # Unused.
+ default_hunt_runner_args = rdf_hunts.HuntRunnerArgs()
+ hunt_config = config.CONFIG["AdminUI.hunt_config"]
+ if hunt_config and (
+ hunt_config.default_include_labels or hunt_config.default_exclude_labels
+ ):
+ default_hunt_runner_args.client_rule_set = (
+ foreman_rules.ForemanClientRuleSet(
+ match_mode=foreman_rules.ForemanClientRuleSet.MatchMode.MATCH_ALL,
+ )
+ )
+ if hunt_config.default_include_labels:
+ default_hunt_runner_args.client_rule_set.rules.append(
+ foreman_rules.ForemanClientRule(
+ rule_type=foreman_rules.ForemanClientRule.Type.LABEL,
+ label=foreman_rules.ForemanLabelClientRule(
+ match_mode=foreman_rules.ForemanLabelClientRule.MatchMode.MATCH_ANY,
+ label_names=hunt_config.default_include_labels,
+ ),
+ )
+ )
+ if hunt_config.default_exclude_labels:
+ default_hunt_runner_args.client_rule_set.rules.append(
+ foreman_rules.ForemanClientRule(
+ rule_type=foreman_rules.ForemanClientRule.Type.LABEL,
+ label=foreman_rules.ForemanLabelClientRule(
+ match_mode=foreman_rules.ForemanLabelClientRule.MatchMode.DOES_NOT_MATCH_ANY,
+ label_names=hunt_config.default_exclude_labels,
+ ),
+ )
+ )
+
return ApiUiConfig(
heading=config.CONFIG["AdminUI.heading"],
report_url=config.CONFIG["AdminUI.report_url"],
help_url=config.CONFIG["AdminUI.help_url"],
grr_version=config.CONFIG["Source.version_string"],
profile_image_url=config.CONFIG["AdminUI.profile_image_url"],
- default_hunt_runner_args=rdf_hunts.HuntRunnerArgs(),
+ default_hunt_runner_args=default_hunt_runner_args,
+ hunt_config=config.CONFIG["AdminUI.hunt_config"],
client_warnings=config.CONFIG["AdminUI.client_warnings"],
default_access_duration_seconds=config.CONFIG["ACL.token_expiry"],
max_access_duration_seconds=config.CONFIG["ACL.token_max_expiry"],
diff --git a/grr/server/grr_response_server/gui/api_plugins/config_regression_test.py b/grr/server/grr_response_server/gui/api_plugins/config_regression_test.py
index 9080686bc4..35557dca59 100644
--- a/grr/server/grr_response_server/gui/api_plugins/config_regression_test.py
+++ b/grr/server/grr_response_server/gui/api_plugins/config_regression_test.py
@@ -3,9 +3,11 @@
from absl import app
+from grr_response_core.lib.rdfvalues import config as rdf_config
from grr_response_server.gui import api_regression_test_lib
from grr_response_server.gui.api_plugins import config as config_plugin
from grr_response_server.gui.api_plugins import config_test as config_plugin_test
+from grr.test_lib import test_lib
class ApiListGrrBinariesHandlerRegressionTest(
@@ -70,6 +72,122 @@ def Run(self):
)
+class ApiGetConfigOptionHandlerRegressionTest(
+ api_regression_test_lib.ApiRegressionTest,
+):
+ api_method = "GetConfigOption"
+ handler = config_plugin.ApiGetConfigOptionHandler
+
+ def Run(self):
+ # Test config values of different types. Not all of them are supported in
+ # the UI.
+ with test_lib.ConfigOverrider({
+ "Client.company_name": "Monstros S.A.",
+ "AdminUI.hunt_config": rdf_config.AdminUIHuntConfig(
+ default_exclude_labels=["oh-oh"],
+ ),
+ "Source.version_major": 42,
+ "Hunt.default_client_rate": 42.0,
+ "Email.enable_custom_email_address": True,
+ "Cron.disabled_cron_jobs": ["Job1", "Job2"],
+ "Server.fleetspeak_last_ping_threshold": "1h",
+ "Server.raw_filesystem_access_pathtype": "TSK",
+ "ClientBuilder.build_type": "Debug",
+ "ClientBuilder.target_platforms": [
+ "darwin_amd64_dmg",
+ "linux_amd64_deb",
+ ],
+ "ClientRepacker.output_filename": (
+ "%(ClientRepacker.output_basename)%(ClientBuilder.output_extension)"
+ ),
+ "Mysql.password": "top-secret",
+ }):
+ self.Check(
+ "GetConfigOption",
+ args=config_plugin.ApiGetConfigOptionArgs(name="Client.company_name"),
+ )
+ self.Check(
+ "GetConfigOption",
+ args=config_plugin.ApiGetConfigOptionArgs(name="AdminUI.hunt_config"),
+ )
+ self.Check(
+ "GetConfigOption",
+ args=config_plugin.ApiGetConfigOptionArgs(
+ name="Source.version_major"
+ ),
+ )
+ self.Check(
+ "GetConfigOption",
+ args=config_plugin.ApiGetConfigOptionArgs(
+ name="Source.version_major"
+ ),
+ )
+ self.Check(
+ "GetConfigOption",
+ args=config_plugin.ApiGetConfigOptionArgs(
+ name="Source.version_major"
+ ),
+ )
+ self.Check(
+ "GetConfigOption",
+ args=config_plugin.ApiGetConfigOptionArgs(
+ name="Source.version_major"
+ ),
+ )
+ self.Check(
+ "GetConfigOption",
+ args=config_plugin.ApiGetConfigOptionArgs(
+ name="Hunt.default_client_rate"
+ ),
+ )
+ self.Check(
+ "GetConfigOption",
+ args=config_plugin.ApiGetConfigOptionArgs(
+ name="Email.enable_custom_email_address"
+ ),
+ )
+ self.Check(
+ "GetConfigOption",
+ args=config_plugin.ApiGetConfigOptionArgs(
+ name="Cron.disabled_cron_jobs"
+ ),
+ )
+ self.Check(
+ "GetConfigOption",
+ args=config_plugin.ApiGetConfigOptionArgs(
+ name="Server.fleetspeak_last_ping_threshold"
+ ),
+ )
+ self.Check(
+ "GetConfigOption",
+ args=config_plugin.ApiGetConfigOptionArgs(
+ name="Server.raw_filesystem_access_pathtype"
+ ),
+ )
+ self.Check(
+ "GetConfigOption",
+ args=config_plugin.ApiGetConfigOptionArgs(
+ name="ClientBuilder.build_type"
+ ),
+ )
+ self.Check(
+ "GetConfigOption",
+ args=config_plugin.ApiGetConfigOptionArgs(
+ name="ClientBuilder.target_platforms"
+ ),
+ )
+ self.Check(
+ "GetConfigOption",
+ args=config_plugin.ApiGetConfigOptionArgs(
+ name="ClientRepacker.output_filename"
+ ),
+ )
+ self.Check(
+ "GetConfigOption",
+ args=config_plugin.ApiGetConfigOptionArgs(name="Mysql.password"),
+ )
+
+
def main(argv):
api_regression_test_lib.main(argv)
diff --git a/grr/server/grr_response_server/gui/api_plugins/config_test.py b/grr/server/grr_response_server/gui/api_plugins/config_test.py
index 9182913cbf..d0c0b7895d 100644
--- a/grr/server/grr_response_server/gui/api_plugins/config_test.py
+++ b/grr/server/grr_response_server/gui/api_plugins/config_test.py
@@ -6,7 +6,10 @@
from absl import app
from grr_response_core import config
+from grr_response_core.lib import rdfvalue
from grr_response_core.lib import utils
+from grr_response_core.lib.rdfvalues import config as rdf_config
+from grr_response_core.lib.rdfvalues import paths as rdf_paths
from grr_response_server import maintenance_utils
from grr_response_server import signed_binary_utils
from grr_response_server.gui import api_test_lib
@@ -158,6 +161,151 @@ def testRendersRedacted(self):
self.assertEqual(result.name, "Mysql.password")
self.assertTrue(result.is_redacted)
+ def testRendersRDFStruct(self):
+ hunt_cfg = rdf_config.AdminUIHuntConfig(
+ default_include_labels=["include"],
+ make_default_exclude_labels_a_presubmit_check=True,
+ )
+ with test_lib.ConfigOverrider({"AdminUI.hunt_config": hunt_cfg}):
+ result = self.handler.Handle(
+ config_plugin.ApiGetConfigOptionArgs(name="AdminUI.hunt_config")
+ )
+ self.assertEqual(result.name, "AdminUI.hunt_config")
+ self.assertEqual(result.type, "AdminUIHuntConfig")
+ self.assertEqual(result.value.default_include_labels, ["include"])
+ self.assertTrue(result.value.make_default_exclude_labels_a_presubmit_check)
+
+ def testRendersRDFString(self):
+ with test_lib.ConfigOverrider({"Logging.domain": "localhost"}):
+ result = self.handler.Handle(
+ config_plugin.ApiGetConfigOptionArgs(name="Logging.domain")
+ )
+ self.assertEqual(result.name, "Logging.domain")
+ self.assertEqual(result.type, "RDFString")
+ self.assertEqual(result.value, "localhost")
+
+ def testRendersRDFStringFakeList(self):
+ with test_lib.ConfigOverrider(
+ {"AdminUI.new_flow_form.default_output_plugins": "Dummy1,Dummy2"}
+ ):
+ result = self.handler.Handle(
+ config_plugin.ApiGetConfigOptionArgs(
+ name="AdminUI.new_flow_form.default_output_plugins"
+ )
+ )
+ self.assertEqual(
+ result.name, "AdminUI.new_flow_form.default_output_plugins"
+ )
+ self.assertEqual(result.type, "RDFString")
+ self.assertEqual(result.value, "Dummy1,Dummy2")
+
+ def testRendersInt(self):
+ with test_lib.ConfigOverrider({"Source.version_major": 42}):
+ result = self.handler.Handle(
+ config_plugin.ApiGetConfigOptionArgs(name="Source.version_major")
+ )
+ self.assertEqual(result.name, "Source.version_major")
+ self.assertEqual(result.type, "RDFInteger")
+ self.assertEqual(result.value, 42)
+
+ def testRendersFakeFloat(self):
+ with test_lib.ConfigOverrider({"Hunt.default_client_rate": 42.0}):
+ result = self.handler.Handle(
+ config_plugin.ApiGetConfigOptionArgs(name="Hunt.default_client_rate")
+ )
+ self.assertEqual(result.name, "Hunt.default_client_rate")
+ self.assertEqual(result.type, "RDFInteger")
+ self.assertEqual(result.value, 42)
+
+ def testRendersBool(self):
+ with test_lib.ConfigOverrider({"Email.enable_custom_email_address": True}):
+ result = self.handler.Handle(
+ config_plugin.ApiGetConfigOptionArgs(
+ name="Email.enable_custom_email_address"
+ )
+ )
+ self.assertEqual(result.name, "Email.enable_custom_email_address")
+ # This is a bug, the type should be "bool".
+ self.assertEqual(result.type, "RDFInteger")
+ self.assertTrue(result.value)
+
+ def testRendersList(self):
+ with test_lib.ConfigOverrider(
+ {"Cron.disabled_cron_jobs": ["Job1", "Job2"]}
+ ):
+ result = self.handler.Handle(
+ config_plugin.ApiGetConfigOptionArgs(name="Cron.disabled_cron_jobs")
+ )
+ self.assertEqual(result.name, "Cron.disabled_cron_jobs")
+ # We don't support lists in the API.
+ self.assertTrue(result.is_invalid)
+
+ def testRendersRDFDuration(self):
+ with test_lib.ConfigOverrider(
+ {"Server.fleetspeak_last_ping_threshold": "1h"}
+ ):
+ result = self.handler.Handle(
+ config_plugin.ApiGetConfigOptionArgs(
+ name="Server.fleetspeak_last_ping_threshold"
+ )
+ )
+ self.assertEqual(result.name, "Server.fleetspeak_last_ping_threshold")
+ self.assertEqual(result.type, "Duration")
+ self.assertEqual(result.value, rdfvalue.Duration("1h"))
+
+ def testRendersRDFEnum(self):
+ with test_lib.ConfigOverrider(
+ {"Server.raw_filesystem_access_pathtype": "TSK"}
+ ):
+ result = self.handler.Handle(
+ config_plugin.ApiGetConfigOptionArgs(
+ name="Server.raw_filesystem_access_pathtype"
+ )
+ )
+ self.assertEqual(result.name, "Server.raw_filesystem_access_pathtype")
+ self.assertEqual(result.type, "EnumNamedValue")
+ self.assertEqual(result.value, rdf_paths.PathSpec.PathType.TSK)
+
+ def testRendersChoice(self):
+ with test_lib.ConfigOverrider({"ClientBuilder.build_type": "Debug"}):
+ result = self.handler.Handle(
+ config_plugin.ApiGetConfigOptionArgs(name="ClientBuilder.build_type")
+ )
+ self.assertEqual(result.name, "ClientBuilder.build_type")
+ self.assertEqual(result.type, "RDFString")
+ self.assertEqual(result.value, "Debug")
+
+ def testRendersMultiChoice(self):
+ with test_lib.ConfigOverrider({
+ "ClientBuilder.target_platforms": [
+ "darwin_amd64_dmg",
+ "linux_amd64_deb",
+ ]
+ }):
+ result = self.handler.Handle(
+ config_plugin.ApiGetConfigOptionArgs(
+ name="ClientBuilder.target_platforms"
+ )
+ )
+ self.assertEqual(result.name, "ClientBuilder.target_platforms")
+ # We don't support lists in the API.
+ self.assertTrue(result.is_invalid)
+
+ def testRendersOption(self):
+ with test_lib.ConfigOverrider({
+ "ClientRepacker.output_filename": (
+ "%(ClientRepacker.output_basename)%(ClientBuilder.output_extension)"
+ )
+ }):
+ result = self.handler.Handle(
+ config_plugin.ApiGetConfigOptionArgs(
+ name="ClientRepacker.output_filename"
+ )
+ )
+ self.assertEqual(result.name, "ClientRepacker.output_filename")
+ self.assertEqual(result.type, "RDFString")
+ self.assertEqual(result.value, "GRR_0.0.0.0_")
+
class ApiGrrBinaryTestMixin(object):
"""Mixing providing GRR binaries test setup routine."""
diff --git a/grr/server/grr_response_server/gui/api_plugins/cron.py b/grr/server/grr_response_server/gui/api_plugins/cron.py
index 4917d91e25..b286af8432 100644
--- a/grr/server/grr_response_server/gui/api_plugins/cron.py
+++ b/grr/server/grr_response_server/gui/api_plugins/cron.py
@@ -1,20 +1,65 @@
#!/usr/bin/env python
"""API handlers for dealing with cron jobs."""
+from typing import Optional
+
from grr_response_core.lib import rdfvalue
from grr_response_core.lib import registry
+from grr_response_core.lib.rdfvalues import mig_protodict
from grr_response_core.lib.rdfvalues import structs as rdf_structs
+from grr_response_proto import flows_pb2
from grr_response_proto.api import cron_pb2
from grr_response_server import cronjobs
from grr_response_server.databases import db
+from grr_response_server.gui import api_call_context
from grr_response_server.gui import api_call_handler_base
from grr_response_server.gui import api_call_handler_utils
+from grr_response_server.gui import mig_api_call_handler_utils
from grr_response_server.gui.api_plugins import flow as api_plugins_flow
+from grr_response_server.models import protobuf_utils
from grr_response_server.rdfvalues import cronjobs as rdf_cronjobs
from grr_response_server.rdfvalues import hunts as rdf_hunts
+from grr_response_server.rdfvalues import mig_cronjobs
from grr_response_server.rdfvalues import objects as rdf_objects
+def InitApiCronJobFromCronJob(
+ cron_job: flows_pb2.CronJob,
+) -> cron_pb2.ApiCronJob:
+ """Initializes ApiCronJob from CronJob."""
+
+ api_cron_job = cron_pb2.ApiCronJob()
+ protobuf_utils.CopyAttr(cron_job, api_cron_job, "cron_job_id")
+ if cron_job.HasField("args"):
+ api_cron_job.args.CopyFrom(cron_job.args)
+ protobuf_utils.CopyAttr(cron_job, api_cron_job, "current_run_id")
+ protobuf_utils.CopyAttr(cron_job, api_cron_job, "description")
+ protobuf_utils.CopyAttr(cron_job, api_cron_job, "enabled")
+ protobuf_utils.CopyAttr(cron_job, api_cron_job, "last_run_status")
+ protobuf_utils.CopyAttr(cron_job, api_cron_job, "last_run_time")
+ protobuf_utils.CopyAttr(cron_job, api_cron_job, "frequency")
+ protobuf_utils.CopyAttr(cron_job, api_cron_job, "lifetime")
+ protobuf_utils.CopyAttr(cron_job, api_cron_job, "allow_overruns")
+
+ api_cron_job.is_failing = cron_job.last_run_status in [
+ flows_pb2.CronJobRun.CronJobRunStatus.ERROR,
+ flows_pb2.CronJobRun.CronJobRunStatus.LIFETIME_EXCEEDED,
+ ]
+
+ if cron_job.forced_run_requested:
+ api_cron_job.forced_run_requested = True
+
+ rdf_state = mig_protodict.ToRDFAttributedDict(cron_job.state)
+ state_dict = rdf_state.ToDict()
+ if state_dict:
+ state = api_call_handler_utils.ApiDataObject()
+ state.InitFromDataObject(state_dict)
+ api_cron_job.state.CopyFrom(
+ mig_api_call_handler_utils.ToProtoApiDataObject(state)
+ )
+ return api_cron_job
+
+
class CronJobNotFoundError(api_call_handler_base.ResourceNotFoundError):
"""Raised when a cron job could not be found."""
@@ -81,46 +126,6 @@ def _StatusFromCronJobRunStatus(self, status):
return self.status_map[status.status]
- @classmethod
- def _IsCronJobObjectFailing(cls, cron_job):
- status = cron_job.last_run_status
- if status is None:
- return False
- return status in [
- rdf_cronjobs.CronJobRun.CronJobRunStatus.ERROR,
- rdf_cronjobs.CronJobRun.CronJobRunStatus.LIFETIME_EXCEEDED,
- ]
-
- @classmethod
- def InitFromObject(cls, cron_job):
- api_cron_job = ApiCronJob(
- cron_job_id=cron_job.cron_job_id,
- args=cron_job.args,
- # TODO(amoser): AFF4 does not keep this data. Enable once we don't have
- # aff4 to support anymore.
- # created_at=cron_job.created_at,
- current_run_id=cron_job.current_run_id or None,
- description=cron_job.description,
- enabled=cron_job.enabled,
- last_run_status=cron_job.last_run_status or None,
- last_run_time=cron_job.last_run_time,
- frequency=cron_job.frequency,
- lifetime=cron_job.lifetime or None,
- allow_overruns=cron_job.allow_overruns,
- is_failing=cls._IsCronJobObjectFailing(cron_job),
- )
-
- if cron_job.forced_run_requested:
- api_cron_job.forced_run_requested = True
-
- state_dict = cron_job.state.ToDict()
- if state_dict:
- state = api_call_handler_utils.ApiDataObject()
- state.InitFromDataObject(state_dict)
- api_cron_job.state = state
-
- return api_cron_job
-
def ObjectReference(self):
return rdf_objects.ObjectReference(
reference_type=rdf_objects.ObjectReference.Type.CRON_JOB,
@@ -200,8 +205,14 @@ class ApiListCronJobsHandler(api_call_handler_base.ApiCallHandler):
args_type = ApiListCronJobsArgs
result_type = ApiListCronJobsResult
-
- def Handle(self, args, context=None):
+ proto_args_type = cron_pb2.ApiListCronJobsArgs
+ proto_result_type = cron_pb2.ApiListCronJobsResult
+
+ def Handle(
+ self,
+ args: cron_pb2.ApiListCronJobsArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> cron_pb2.ApiListCronJobsResult:
if not args.count:
stop = None
else:
@@ -212,11 +223,14 @@ def Handle(self, args, context=None):
all_jobs.sort(
key=lambda job: (getattr(job, "cron_job_id", None) or job.urn)
)
+ all_jobs = [mig_cronjobs.ToProtoCronJob(job) for job in all_jobs]
cron_jobs = all_jobs[args.offset : stop]
- items = [ApiCronJob.InitFromObject(cron_job) for cron_job in cron_jobs]
+ items = [InitApiCronJobFromCronJob(cron_job) for cron_job in cron_jobs]
- return ApiListCronJobsResult(items=items, total_count=len(all_jobs))
+ return cron_pb2.ApiListCronJobsResult(
+ items=items, total_count=len(all_jobs)
+ )
class ApiGetCronJobArgs(rdf_structs.RDFProtoStruct):
@@ -231,12 +245,18 @@ class ApiGetCronJobHandler(api_call_handler_base.ApiCallHandler):
args_type = ApiGetCronJobArgs
result_type = ApiCronJob
-
- def Handle(self, args, context=None):
+ proto_args_type = cron_pb2.ApiGetCronJobArgs
+ proto_result_type = cron_pb2.ApiCronJob
+
+ def Handle(
+ self,
+ args: cron_pb2.ApiGetCronJobArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> cron_pb2.ApiCronJob:
try:
cron_job = cronjobs.CronManager().ReadJob(str(args.cron_job_id))
-
- return ApiCronJob.InitFromObject(cron_job)
+ cron_job = mig_cronjobs.ToProtoCronJob(cron_job)
+ return InitApiCronJobFromCronJob(cron_job)
except db.UnknownCronJobError as e:
raise CronJobNotFoundError(
"Cron job with id %s could not be found" % args.cron_job_id
@@ -325,12 +345,20 @@ class ApiCreateCronJobHandler(api_call_handler_base.ApiCallHandler):
args_type = ApiCreateCronJobArgs
result_type = ApiCronJob
-
- def Handle(self, source_args, context=None):
+ proto_args_type = cron_pb2.ApiCreateCronJobArgs
+ proto_result_type = cron_pb2.ApiCronJob
+
+ def Handle(
+ self,
+ source_args: cron_pb2.ApiCreateCronJobArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> cron_pb2.ApiCronJob:
# Make sure we don't modify source arguments.
- args = source_args.Copy()
+ args = cron_pb2.ApiCreateCronJobArgs()
+ args.CopyFrom(source_args)
# Clear all fields marked with HIDDEN.
+ args = ToRDFApiCreateCronJobArgs(args)
args.flow_args.ClearFieldsWithLabel(
rdf_structs.SemanticDescriptor.Labels.HIDDEN
)
@@ -344,14 +372,14 @@ def Handle(self, source_args, context=None):
rdf_structs.SemanticDescriptor.Labels.HIDDEN,
exceptions="output_plugins",
)
- cron_manager = cronjobs.CronManager()
+ cron_manager = cronjobs.CronManager()
cron_args = rdf_cronjobs.CreateCronJobArgs.FromApiCreateCronJobArgs(args)
cron_job_id = cron_manager.CreateJob(cron_args=cron_args, enabled=False)
cron_obj = cron_manager.ReadJob(cron_job_id)
-
- return ApiCronJob.InitFromObject(cron_obj)
+ cron_obj = mig_cronjobs.ToProtoCronJob(cron_obj)
+ return InitApiCronJobFromCronJob(cron_obj)
class ApiForceRunCronJobArgs(rdf_structs.RDFProtoStruct):
@@ -383,16 +411,23 @@ class ApiModifyCronJobHandler(api_call_handler_base.ApiCallHandler):
args_type = ApiModifyCronJobArgs
result_type = ApiCronJob
-
- def Handle(self, args, context=None):
- cron_id = str(args.cron_job_id)
+ proto_args_type = cron_pb2.ApiModifyCronJobArgs
+ proto_result_type = cron_pb2.ApiCronJob
+
+ def Handle(
+ self,
+ args: cron_pb2.ApiModifyCronJobArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> cron_pb2.ApiCronJob:
+ cron_id = args.cron_job_id
if args.enabled:
cronjobs.CronManager().EnableJob(cron_id)
else:
cronjobs.CronManager().DisableJob(cron_id)
cron_job_obj = cronjobs.CronManager().ReadJob(cron_id)
- return ApiCronJob.InitFromObject(cron_job_obj)
+ cron_job_obj = mig_cronjobs.ToProtoCronJob(cron_job_obj)
+ return InitApiCronJobFromCronJob(cron_job_obj)
class ApiDeleteCronJobArgs(rdf_structs.RDFProtoStruct):
@@ -409,3 +444,10 @@ class ApiDeleteCronJobHandler(api_call_handler_base.ApiCallHandler):
def Handle(self, args, context=None):
cronjobs.CronManager().DeleteJob(str(args.cron_job_id))
+
+
+# Copy of migration function to avoid circular dependency.
+def ToRDFApiCreateCronJobArgs(
+ proto: cron_pb2.ApiCreateCronJobArgs,
+) -> ApiCreateCronJobArgs:
+ return ApiCreateCronJobArgs.FromSerializedBytes(proto.SerializeToString())
diff --git a/grr/server/grr_response_server/gui/api_plugins/cron_test.py b/grr/server/grr_response_server/gui/api_plugins/cron_test.py
index 867e801e55..1e2214b2e2 100644
--- a/grr/server/grr_response_server/gui/api_plugins/cron_test.py
+++ b/grr/server/grr_response_server/gui/api_plugins/cron_test.py
@@ -6,14 +6,15 @@
from grr_response_core.lib import rdfvalue
from grr_response_core.lib.rdfvalues import mig_protodict
from grr_response_core.lib.rdfvalues import protodict as rdf_protodict
+from grr_response_proto import flows_pb2
+from grr_response_proto.api import cron_pb2
from grr_response_server import cronjobs
from grr_response_server import data_store
from grr_response_server.flows.general import file_finder
from grr_response_server.gui import api_test_lib
+from grr_response_server.gui import mig_api_call_handler_utils
from grr_response_server.gui.api_plugins import cron as cron_plugin
from grr_response_server.rdfvalues import cronjobs as rdf_cronjobs
-from grr_response_server.rdfvalues import hunts as rdf_hunts
-from grr_response_server.rdfvalues import mig_cronjobs
from grr.test_lib import flow_test_lib
from grr.test_lib import test_lib
@@ -27,37 +28,53 @@ def testInitFromCronObject(self):
state["quux"] = "norf"
state["thud"] = "blargh"
- cron_job = rdf_cronjobs.CronJob(
- created_at=rdfvalue.RDFDatetime.Now(),
+ cron_job = flows_pb2.CronJob(
+ created_at=int(rdfvalue.RDFDatetime.Now()),
)
cron_job.cron_job_id = "foo"
cron_job.current_run_id = "bar"
- cron_job.last_run_time = self._DATETIME("2001-01-01")
- cron_job.last_run_status = "FINISHED"
- cron_job.frequency = rdfvalue.Duration.From(1, rdfvalue.DAYS)
- cron_job.lifetime = rdfvalue.Duration.From(30, rdfvalue.DAYS)
+ cron_job.last_run_time = int(self._DATETIME("2001-01-01"))
+ cron_job.last_run_status = flows_pb2.CronJobRun.CronJobRunStatus.FINISHED
+ cron_job.frequency = rdfvalue.Duration.From(1, rdfvalue.DAYS).ToInt(
+ timeunit=rdfvalue.MICROSECONDS
+ )
+ cron_job.lifetime = rdfvalue.Duration.From(30, rdfvalue.DAYS).ToInt(
+ timeunit=rdfvalue.MICROSECONDS
+ )
cron_job.enabled = False
cron_job.forced_run_requested = True
- cron_job.state = state
+ cron_job.state.CopyFrom(mig_protodict.ToProtoAttributedDict(state))
cron_job.description = "testdescription"
- api_cron_job = cron_plugin.ApiCronJob.InitFromObject(cron_job)
+ api_cron_job = cron_plugin.InitApiCronJobFromCronJob(cron_job)
self.assertEqual(api_cron_job.cron_job_id, "foo")
self.assertEqual(api_cron_job.current_run_id, "bar")
self.assertEqual(api_cron_job.description, "testdescription")
- self.assertEqual(api_cron_job.last_run_time, self._DATETIME("2001-01-01"))
- self.assertEqual(api_cron_job.last_run_status, "FINISHED")
self.assertEqual(
- api_cron_job.frequency, rdfvalue.Duration.From(1, rdfvalue.DAYS)
+ api_cron_job.last_run_time, int(self._DATETIME("2001-01-01"))
)
self.assertEqual(
- api_cron_job.lifetime, rdfvalue.Duration.From(30, rdfvalue.DAYS)
+ api_cron_job.last_run_status,
+ flows_pb2.CronJobRun.CronJobRunStatus.FINISHED,
+ )
+ self.assertEqual(
+ api_cron_job.frequency,
+ rdfvalue.Duration.From(1, rdfvalue.DAYS).ToInt(
+ timeunit=rdfvalue.MICROSECONDS
+ ),
+ )
+ self.assertEqual(
+ api_cron_job.lifetime,
+ rdfvalue.Duration.From(30, rdfvalue.DAYS).ToInt(
+ timeunit=rdfvalue.MICROSECONDS
+ ),
)
self.assertFalse(api_cron_job.enabled)
self.assertTrue(api_cron_job.forced_run_requested)
- api_state_items = {_.key: _.value for _ in api_cron_job.state.items}
+ state = mig_api_call_handler_utils.ToRDFApiDataObject(api_cron_job.state)
+ api_state_items = {_.key: _.value for _ in state.items}
self.assertEqual(api_state_items, {"quux": "norf", "thud": "blargh"})
@@ -88,12 +105,9 @@ def setUp(self):
self.handler = cron_plugin.ApiCreateCronJobHandler()
def testAddForemanRulesHuntRunnerArgumentIsNotRespected(self):
- args = cron_plugin.ApiCreateCronJobArgs(
+ args = cron_pb2.ApiCreateCronJobArgs(
flow_name=flow_test_lib.FlowWithOneNestedFlow.__name__,
- hunt_runner_args=rdf_hunts.HuntRunnerArgs(
- # Default is True.
- add_foreman_rules=False
- ),
+ hunt_runner_args=flows_pb2.HuntRunnerArgs(add_foreman_rules=False),
)
result = self.handler.Handle(args, context=self.context)
self.assertTrue(
@@ -136,42 +150,46 @@ def setUp(self):
def testHandler(self):
now = rdfvalue.RDFDatetime.Now()
with test_lib.FakeTime(now):
- rdf_job = rdf_cronjobs.CronJob(
+ job = flows_pb2.CronJob(
cron_job_id="job_id",
enabled=True,
- last_run_status="FINISHED",
- frequency=rdfvalue.Duration.From(7, rdfvalue.DAYS),
- lifetime=rdfvalue.Duration.From(1, rdfvalue.HOURS),
+ last_run_status=flows_pb2.CronJobRun.CronJobRunStatus.FINISHED,
+ frequency=rdfvalue.Duration.From(7, rdfvalue.DAYS).ToInt(
+ timeunit=rdfvalue.MICROSECONDS
+ ),
+ lifetime=rdfvalue.Duration.From(1, rdfvalue.HOURS).ToInt(
+ timeunit=rdfvalue.MICROSECONDS
+ ),
allow_overruns=True,
- created_at=rdfvalue.RDFDatetime.Now(),
+ created_at=int(rdfvalue.RDFDatetime.Now()),
)
- proto_job = mig_cronjobs.ToProtoCronJob(rdf_job)
- data_store.REL_DB.WriteCronJob(proto_job)
+ data_store.REL_DB.WriteCronJob(job)
state = rdf_protodict.AttributedDict()
state["item"] = "key"
data_store.REL_DB.UpdateCronJob(
- rdf_job.cron_job_id,
+ job.cron_job_id,
current_run_id="ABCD1234",
state=mig_protodict.ToProtoAttributedDict(state),
forced_run_requested=True,
)
- args = cron_plugin.ApiGetCronJobArgs(cron_job_id=rdf_job.cron_job_id)
+ args = cron_pb2.ApiGetCronJobArgs(cron_job_id=job.cron_job_id)
result = self.handler.Handle(args)
- self.assertEqual(result.cron_job_id, rdf_job.cron_job_id)
+ self.assertEqual(result.cron_job_id, job.cron_job_id)
# TODO(amoser): The aff4 implementation does not store the create time so we
# can't return it yet.
# self.assertEqual(result.created_at, now)
- self.assertEqual(result.enabled, rdf_job.enabled)
+ self.assertEqual(result.enabled, job.enabled)
self.assertEqual(result.current_run_id, "ABCD1234")
self.assertEqual(result.forced_run_requested, True)
- self.assertEqual(result.frequency, rdf_job.frequency)
+ self.assertEqual(result.frequency, job.frequency)
self.assertEqual(result.is_failing, False)
- self.assertEqual(result.last_run_status, rdf_job.last_run_status)
- self.assertEqual(result.lifetime, rdf_job.lifetime)
- state_entries = list(result.state.items)
+ self.assertEqual(result.last_run_status, job.last_run_status)
+ self.assertEqual(result.lifetime, job.lifetime)
+ state = mig_api_call_handler_utils.ToRDFApiDataObject(result.state)
+ state_entries = list(state.items)
self.assertLen(state_entries, 1)
state_entry = state_entries[0]
self.assertEqual(state_entry.key, "item")
diff --git a/grr/server/grr_response_server/gui/api_plugins/flow.py b/grr/server/grr_response_server/gui/api_plugins/flow.py
index 9d21bffaf1..470fa22d3e 100644
--- a/grr/server/grr_response_server/gui/api_plugins/flow.py
+++ b/grr/server/grr_response_server/gui/api_plugins/flow.py
@@ -5,20 +5,23 @@
import itertools
import logging
import re
-from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Sequence, Tuple, Type
+from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Sequence, Tuple, Type, Union
from grr_response_core import config
from grr_response_core.lib import rdfvalue
from grr_response_core.lib import registry
from grr_response_core.lib import utils
from grr_response_core.lib.rdfvalues import flows as rdf_flows
+from grr_response_core.lib.rdfvalues import mig_flows
from grr_response_core.lib.rdfvalues import mig_protodict
from grr_response_core.lib.rdfvalues import mig_structs
from grr_response_core.lib.rdfvalues import paths as rdf_paths
from grr_response_core.lib.rdfvalues import structs as rdf_structs
from grr_response_proto import flows_pb2
+from grr_response_proto import jobs_pb2
from grr_response_proto import output_plugin_pb2
from grr_response_proto.api import flow_pb2
+from grr_response_proto.api import output_plugin_pb2 as api_output_plugin_pb2
from grr_response_server import access_control
from grr_response_server import data_store
from grr_response_server import data_store_utils
@@ -345,6 +348,18 @@ def _GetFlowClass(
logging.warning("Failed to get flow class for %s: %s", flow_name, e)
+def InitApiFlowResultFromFlowResult(
+ result: flows_pb2.FlowResult,
+) -> flow_pb2.ApiFlowResult:
+ """Creates an ApiFlowResult from a FlowResult."""
+ api_flow_result = flow_pb2.ApiFlowResult()
+ if result.HasField("payload"):
+ api_flow_result.payload.CopyFrom(result.payload)
+ protobuf_utils.CopyAttr(result, api_flow_result, "timestamp")
+ protobuf_utils.CopyAttr(result, api_flow_result, "tag")
+ return api_flow_result
+
+
class ApiFlowId(rdfvalue.RDFString):
"""Class encapsulating flows ids."""
@@ -445,19 +460,6 @@ class ApiFlowResult(rdf_structs.RDFProtoStruct):
rdfvalue.RDFDatetime,
]
- def GetPayloadClass(self):
- return rdfvalue.RDFValue.classes[self.payload_type]
-
- def InitFromFlowResult(self, result):
- p = result.payload
- self.payload_type = p.__class__.__name__
- self.payload = p
- self.timestamp = result.timestamp
- if result.tag:
- self.tag = result.tag
-
- return self
-
class ApiFlowLog(rdf_structs.RDFProtoStruct):
protobuf = flow_pb2.ApiFlowLog
@@ -515,28 +517,38 @@ class ApiListFlowRequestsHandler(api_call_handler_base.ApiCallHandler):
args_type = ApiListFlowRequestsArgs
result_type = ApiListFlowRequestsResult
+ proto_args_type = flow_pb2.ApiListFlowRequestsArgs
+ proto_result_type = flow_pb2.ApiListFlowRequestsResult
+
+ def Handle(
+ self,
+ args: flow_pb2.ApiListFlowRequestsArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> flow_pb2.ApiListFlowRequestsResult:
- def Handle(self, args, context=None):
- client_id = args.client_id.ToString()
requests_and_responses = data_store.REL_DB.ReadAllFlowRequestsAndResponses(
- client_id, str(args.flow_id)
+ args.client_id, args.flow_id
)
- result = ApiListFlowRequestsResult()
+ result = flow_pb2.ApiListFlowRequestsResult()
stop = None
- if args.count:
+ if args.HasField("count"):
stop = args.offset + args.count
for request, response_dict in itertools.islice(
requests_and_responses, args.offset, stop
):
- request_state = rdf_flow_runner.RequestState(
- client_id=client_id,
+ request_state = jobs_pb2.RequestState(
+ client_id=str(rdfvalue.RDFURN(args.client_id)),
id=request.request_id,
next_state=request.next_state,
- session_id="{}/flows/{}".format(client_id, str(request.flow_id)),
+ session_id=str(
+ rdfvalue.SessionID(
+ "{}/flows/{}".format(args.client_id, request.flow_id)
+ )
+ ),
)
- api_request = ApiFlowRequest(
+ api_request = flow_pb2.ApiFlowRequest(
request_id=str(request.request_id), request_state=request_state
)
@@ -549,12 +561,15 @@ def Handle(self, args, context=None):
response = mig_flow_objects.ToRDFFlowStatus(response)
if isinstance(response, flows_pb2.FlowIterator):
response = mig_flow_objects.ToRDFFlowIterator(response)
- responses.append(response.AsLegacyGrrMessage())
+ responses.append(
+ mig_flows.ToProtoGrrMessage(response.AsLegacyGrrMessage())
+ )
for r in responses:
- r.ClearPayload()
+ r.ClearField("args_rdf_name")
+ r.ClearField("args")
- api_request.responses = responses
+ api_request.responses.extend(responses)
result.items.append(api_request)
@@ -581,18 +596,23 @@ class ApiListFlowResultsHandler(api_call_handler_base.ApiCallHandler):
args_type = ApiListFlowResultsArgs
result_type = ApiListFlowResultsResult
+ proto_args_type = flow_pb2.ApiListFlowResultsArgs
+ proto_result_type = flow_pb2.ApiListFlowResultsResult
- def Handle(self, args, context=None):
+ def Handle(
+ self,
+ args: flow_pb2.ApiListFlowResultsArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> flow_pb2.ApiListFlowResultsResult:
results = data_store.REL_DB.ReadFlowResults(
- str(args.client_id),
- str(args.flow_id),
+ args.client_id,
+ args.flow_id,
args.offset,
args.count or db.MAX_COUNT,
with_substring=args.filter or None,
with_tag=args.with_tag or None,
with_type=args.with_type or None,
)
- results = [mig_flow_objects.ToRDFFlowResult(r) for r in results]
if args.filter:
# TODO: with_substring is implemented in a hacky way,
@@ -603,16 +623,16 @@ def Handle(self, args, context=None):
total_count = None
else:
total_count = data_store.REL_DB.CountFlowResults(
- str(args.client_id),
- str(args.flow_id),
+ args.client_id,
+ args.flow_id,
# TODO: Add with_substring to CountFlowResults().
with_tag=args.with_tag or None,
with_type=args.with_type or None,
)
- wrapped_items = [ApiFlowResult().InitFromFlowResult(r) for r in results]
+ wrapped_items = [InitApiFlowResultFromFlowResult(r) for r in results]
- return ApiListFlowResultsResult(
+ return flow_pb2.ApiListFlowResultsResult(
items=wrapped_items, total_count=total_count
)
@@ -977,24 +997,30 @@ class ApiListFlowOutputPluginsHandler(api_call_handler_base.ApiCallHandler):
args_type = ApiListFlowOutputPluginsArgs
result_type = ApiListFlowOutputPluginsResult
+ proto_args_type = flow_pb2.ApiListFlowOutputPluginsArgs
+ proto_result_type = flow_pb2.ApiListFlowOutputPluginsResult
- def Handle(self, args, context=None):
- flow_obj = data_store.REL_DB.ReadFlowObject(
- str(args.client_id), str(args.flow_id)
- )
- flow_obj = mig_flow_objects.ToRDFFlow(flow_obj)
- output_plugins_states = flow_obj.output_plugins_states
+ def Handle(
+ self,
+ args: flow_pb2.ApiListFlowOutputPluginsArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> flow_pb2.ApiListFlowOutputPluginsResult:
+ flow_obj = data_store.REL_DB.ReadFlowObject(args.client_id, args.flow_id)
+
+ plugin_results: List[api_output_plugin_pb2.ApiOutputPlugin] = []
type_indices = {}
- result = []
- for output_plugin_state in output_plugins_states:
- plugin_state = output_plugin_state.plugin_state.Copy()
- if "source_urn" in plugin_state:
- del plugin_state["source_urn"]
- if "token" in plugin_state:
- del plugin_state["token"]
+ for output_plugin_state in flow_obj.output_plugins_states:
+ plugin_state = jobs_pb2.AttributedDict()
+ plugin_state.CopyFrom(output_plugin_state.plugin_state)
+
+ for index, item in enumerate(plugin_state.dat):
+ key = item.k.string
+ if key in ("token", "source_urn"):
+ del plugin_state.dat[index]
plugin_descriptor = output_plugin_state.plugin_descriptor
+
type_index = type_indices.setdefault(plugin_descriptor.plugin_name, 0)
type_indices[plugin_descriptor.plugin_name] += 1
@@ -1004,14 +1030,14 @@ def Handle(self, args, context=None):
# TODO(user): store output plugins states in the same way for flows
# and hunts. Until this is done, we can emulate the same interface in
# the HTTP API.
- api_plugin = api_output_plugin.ApiOutputPlugin(
- id=plugin_descriptor.plugin_name + "_%d" % type_index,
- plugin_descriptor=plugin_descriptor,
- state=plugin_state,
- )
- result.append(api_plugin)
+ api_plugin = api_output_plugin_pb2.ApiOutputPlugin()
+ api_plugin.id = f"{plugin_descriptor.plugin_name}_{type_index}"
+ api_plugin.plugin_descriptor.CopyFrom(plugin_descriptor)
+ api_plugin.state.Pack(plugin_state)
+
+ plugin_results.append(api_plugin)
- return ApiListFlowOutputPluginsResult(items=result)
+ return flow_pb2.ApiListFlowOutputPluginsResult(items=plugin_results)
def GetOutputPluginIndex(
@@ -1066,34 +1092,40 @@ class ApiListFlowOutputPluginLogsHandlerBase(
log_entry_type = None
- def Handle(self, args, context=None):
- flow_obj = data_store.REL_DB.ReadFlowObject(
- str(args.client_id), str(args.flow_id)
- )
+ def Handle(
+ self,
+ args: Union[
+ flow_pb2.ApiListFlowOutputPluginLogsArgs,
+ flow_pb2.ApiListFlowOutputPluginErrorsArgs,
+ ],
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> Union[
+ flow_pb2.ApiListFlowOutputPluginLogsResult,
+ flow_pb2.ApiListFlowOutputPluginErrorsResult,
+ ]:
+ flow_obj = data_store.REL_DB.ReadFlowObject(args.client_id, args.flow_id)
index = GetOutputPluginIndex(flow_obj.output_plugins, args.plugin_id)
output_plugin_id = "%d" % index
logs = data_store.REL_DB.ReadFlowOutputPluginLogEntries(
- str(args.client_id),
- str(args.flow_id),
+ args.client_id,
+ args.flow_id,
output_plugin_id,
args.offset,
args.count or db.MAX_COUNT,
with_type=self.__class__.log_entry_type,
)
total_count = data_store.REL_DB.CountFlowOutputPluginLogEntries(
- str(args.client_id),
- str(args.flow_id),
+ args.client_id,
+ args.flow_id,
output_plugin_id,
with_type=self.__class__.log_entry_type,
)
- return self.result_type(
+ return self.proto_result_type(
total_count=total_count,
items=[
- mig_flow_objects.ToRDFFlowOutputPluginLogEntry(
- l
- ).ToOutputPluginBatchProcessingStatus()
+ rdf_flow_objects.ToOutputPluginBatchProcessingStatus(l)
for l in logs
],
)
@@ -1123,6 +1155,8 @@ class ApiListFlowOutputPluginLogsHandler(
args_type = ApiListFlowOutputPluginLogsArgs
result_type = ApiListFlowOutputPluginLogsResult
+ proto_args_type = flow_pb2.ApiListFlowOutputPluginLogsArgs
+ proto_result_type = flow_pb2.ApiListFlowOutputPluginLogsResult
class ApiListFlowOutputPluginErrorsArgs(rdf_structs.RDFProtoStruct):
@@ -1149,6 +1183,8 @@ class ApiListFlowOutputPluginErrorsHandler(
args_type = ApiListFlowOutputPluginErrorsArgs
result_type = ApiListFlowOutputPluginErrorsResult
+ proto_args_type = flow_pb2.ApiListFlowOutputPluginErrorsArgs
+ proto_result_type = flow_pb2.ApiListFlowOutputPluginErrorsResult
class ApiListFlowsArgs(rdf_structs.RDFProtoStruct):
@@ -1171,12 +1207,26 @@ class ApiListFlowsHandler(api_call_handler_base.ApiCallHandler):
args_type = ApiListFlowsArgs
result_type = ApiListFlowsResult
+ proto_args_type = flow_pb2.ApiListFlowsArgs
+ proto_result_type = flow_pb2.ApiListFlowsResult
- def _HandleTopFlowsOnly(self, args):
+ def _HandleTopFlowsOnly(
+ self,
+ args: flow_pb2.ApiListFlowsArgs,
+ ) -> flow_pb2.ApiListFlowsResult:
+ min_started_at, max_started_at = None, None
+ if args.HasField("min_started_at"):
+ min_started_at = rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(
+ args.min_started_at
+ )
+ if args.HasField("max_started_at"):
+ max_started_at = rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(
+ args.max_started_at
+ )
top_flows = data_store.REL_DB.ReadAllFlowObjects(
- client_id=str(args.client_id),
- min_create_time=args.min_started_at,
- max_create_time=args.max_started_at,
+ client_id=args.client_id,
+ min_create_time=min_started_at,
+ max_create_time=max_started_at,
include_child_flows=False,
not_created_by=access_control.SYSTEM_USERS
if args.human_flows_only
@@ -1190,20 +1240,32 @@ def _HandleTopFlowsOnly(self, args):
)
for f_data in top_flows
]
- result = [ToRDFApiFlow(f) for f in result]
# TODO(hanuszczak): Consult with the team what should we do in case of flows
# with missing information.
+ # TODO: Refactor sorting andfiltering of flows to DB layer.
result.sort(key=lambda f: f.started_at or 0, reverse=True)
result = result[args.offset :]
- if args.count:
+ if args.HasField("count"):
result = result[: args.count]
- return ApiListFlowsResult(items=result)
+ return flow_pb2.ApiListFlowsResult(items=result)
- def _HandleAllFlows(self, args):
+ def _HandleAllFlows(
+ self,
+ args: flow_pb2.ApiListFlowsArgs,
+ ) -> flow_pb2.ApiListFlowsResult:
+ min_started_at, max_started_at = None, None
+ if args.HasField("min_started_at"):
+ min_started_at = rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(
+ args.min_started_at
+ )
+ if args.HasField("max_started_at"):
+ max_started_at = rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(
+ args.max_started_at
+ )
all_flows = data_store.REL_DB.ReadAllFlowObjects(
- client_id=str(args.client_id),
- min_create_time=args.min_started_at,
- max_create_time=args.max_started_at,
+ client_id=args.client_id,
+ min_create_time=min_started_at,
+ max_create_time=max_started_at,
include_child_flows=True,
not_created_by=access_control.SYSTEM_USERS
if args.human_flows_only
@@ -1234,14 +1296,18 @@ def _HandleAllFlows(self, args):
]
# TODO(hanuszczak): Consult with the team what should we do in case of flows
# with missing information.
+ # TODO: Refactor sorting andfiltering of flows to DB layer.
result.sort(key=lambda f: f.started_at or 0, reverse=True)
result = result[args.offset :]
- if args.count:
+ if args.HasField("count"):
result = result[: args.count]
- result = [ToRDFApiFlow(f) for f in result]
- return ApiListFlowsResult(items=result)
+ return flow_pb2.ApiListFlowsResult(items=result)
- def Handle(self, args, context=None):
+ def Handle(
+ self,
+ args: flow_pb2.ApiListFlowsArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> flow_pb2.ApiListFlowsResult:
if args.top_flows_only:
return self._HandleTopFlowsOnly(args)
else:
@@ -1280,11 +1346,11 @@ def _SanitizeApiCreateFlowArgs(
#
# TODO(user): Refactor the code to remove the HIDDEN label from
# FlowRunnerArgs.output_plugins.
- for field_name, descriptor in runner_args.DESCRIPTOR.fields_by_name.items():
- if field_name == "output_plugins":
- continue
- if descriptor.label == "HIDDEN":
- runner_args.ClearField(field_name)
+ runner_args = mig_flow_runner.ToRDFFlowRunnerArgs(runner_args)
+ runner_args.ClearFieldsWithLabel(
+ rdf_structs.SemanticDescriptor.Labels.HIDDEN, exceptions="output_plugins"
+ )
+ runner_args = mig_flow_runner.ToProtoFlowRunnerArgs(runner_args)
if args.HasField("original_flow"):
runner_args.original_flow.flow_id = args.original_flow.flow_id
diff --git a/grr/server/grr_response_server/gui/api_plugins/flow_test.py b/grr/server/grr_response_server/gui/api_plugins/flow_test.py
index 1536ff267c..e3fd635400 100644
--- a/grr/server/grr_response_server/gui/api_plugins/flow_test.py
+++ b/grr/server/grr_response_server/gui/api_plugins/flow_test.py
@@ -453,7 +453,7 @@ def setUp(self):
def testReturnsTagsInResultsList(self):
result = self.handler.Handle(
- flow_plugin.ApiListFlowResultsArgs(
+ flow_pb2.ApiListFlowResultsArgs(
client_id=self.client_id, flow_id=self.flow_id
)
)
@@ -464,7 +464,7 @@ def testReturnsTagsInResultsList(self):
def testCorrectlyFiltersByTag(self):
foo_result = self.handler.Handle(
- flow_plugin.ApiListFlowResultsArgs(
+ flow_pb2.ApiListFlowResultsArgs(
client_id=self.client_id, flow_id=self.flow_id, with_tag="tag:foo"
)
)
@@ -473,7 +473,7 @@ def testCorrectlyFiltersByTag(self):
self.assertEqual(foo_result.items[0].tag, "tag:foo")
bar_result = self.handler.Handle(
- flow_plugin.ApiListFlowResultsArgs(
+ flow_pb2.ApiListFlowResultsArgs(
client_id=self.client_id, flow_id=self.flow_id, with_tag="tag:bar"
)
)
@@ -483,7 +483,7 @@ def testCorrectlyFiltersByTag(self):
def testCorrectlyFiltersByType(self):
foo_result = self.handler.Handle(
- flow_plugin.ApiListFlowResultsArgs(
+ flow_pb2.ApiListFlowResultsArgs(
client_id=self.client_id,
flow_id=self.flow_id,
with_type=rdfvalue.RDFString.__name__,
@@ -494,7 +494,7 @@ def testCorrectlyFiltersByType(self):
self.assertEqual(foo_result.items[0].tag, "tag:foo")
bar_result = self.handler.Handle(
- flow_plugin.ApiListFlowResultsArgs(
+ flow_pb2.ApiListFlowResultsArgs(
client_id=self.client_id,
flow_id=self.flow_id,
with_type=rdfvalue.RDFInteger.__name__,
@@ -506,7 +506,7 @@ def testCorrectlyFiltersByType(self):
def testCorrectlyFiltersBySubstring(self):
foo_result = self.handler.Handle(
- flow_plugin.ApiListFlowResultsArgs(
+ flow_pb2.ApiListFlowResultsArgs(
client_id=self.client_id, flow_id=self.flow_id, filter="foo"
)
)
@@ -517,7 +517,7 @@ def testCorrectlyFiltersBySubstring(self):
# payload protobufs in their serialized protobuf form, meaning that integers
# are going to be serialized as varints and not as unicode strings.
bar_result = self.handler.Handle(
- flow_plugin.ApiListFlowResultsArgs(
+ flow_pb2.ApiListFlowResultsArgs(
client_id=self.client_id, flow_id=self.flow_id, filter="42"
)
)
@@ -525,7 +525,7 @@ def testCorrectlyFiltersBySubstring(self):
def testReturnsNothingWhenFilteringByNonExistingTag(self):
result = self.handler.Handle(
- flow_plugin.ApiListFlowResultsArgs(
+ flow_pb2.ApiListFlowResultsArgs(
client_id=self.client_id,
flow_id=self.flow_id,
with_tag="non-existing",
@@ -550,7 +550,7 @@ def testHandlerUsesKnowledgeBase(self, db: abstract_db.Database):
snapshot = objects_pb2.ClientSnapshot()
snapshot.client_id = client_id
- snapshot.knowledge_base.users.add(homedir="/home/foo")
+ snapshot.knowledge_base.users.add(username="foo", homedir="/home/foo")
db.WriteClientSnapshot(snapshot)
handler = flow_plugin.ApiExplainGlobExpressionHandler()
diff --git a/grr/server/grr_response_server/gui/api_plugins/hunt.py b/grr/server/grr_response_server/gui/api_plugins/hunt.py
index b54aa4b73e..b8c9f842b9 100644
--- a/grr/server/grr_response_server/gui/api_plugins/hunt.py
+++ b/grr/server/grr_response_server/gui/api_plugins/hunt.py
@@ -8,6 +8,7 @@
from typing import Iterable
from typing import Iterator
from typing import Optional
+from typing import Sequence
from typing import Tuple
from typing import Union
@@ -56,6 +57,8 @@
HUNTS_ROOT_PATH = rdfvalue.RDFURN("aff4:/hunts")
+FORCE_HUNT_TAG = "FORCE"
+
# pyformat: disable
CANCELLED_BY_USER = "Cancelled by user"
@@ -331,6 +334,18 @@ def InitApiHuntErrorFromFlowErrorInfo(
return hunt_error
+def InitApiHuntResultFromFlowResult(
+ flow_result: flows_pb2.FlowResult,
+) -> hunt_pb2.ApiHuntResult:
+ """Init ApiFlowResult from FlowResult."""
+ api_flow_result = hunt_pb2.ApiHuntResult()
+ api_flow_result.payload.CopyFrom(flow_result.payload)
+ protobuf_utils.CopyAttr(flow_result, api_flow_result, "client_id")
+ protobuf_utils.CopyAttr(flow_result, api_flow_result, "timestamp")
+
+ return api_flow_result
+
+
class Bucket:
"""A bucket for counts of timestamps."""
@@ -504,19 +519,6 @@ class ApiHuntResult(rdf_structs.RDFProtoStruct):
rdfvalue.RDFDatetime,
]
- def GetPayloadClass(self):
- return rdfvalue.RDFValue.classes[self.payload_type]
-
- def InitFromFlowResult(self, flow_result):
- """Init from rdf_flow_objects.FlowResult."""
-
- self.payload_type = flow_result.payload.__class__.__name__
- self.payload = flow_result.payload
- self.client_id = flow_result.client_id
- self.timestamp = flow_result.timestamp
-
- return self
-
class ApiHuntClient(rdf_structs.RDFProtoStruct):
protobuf = hunt_pb2.ApiHuntClient
@@ -838,10 +840,16 @@ class ApiListHuntResultsHandler(api_call_handler_base.ApiCallHandler):
args_type = ApiListHuntResultsArgs
result_type = ApiListHuntResultsResult
+ proto_args_type = hunt_pb2.ApiListHuntResultsArgs
+ proto_result_type = hunt_pb2.ApiListHuntResultsResult
- def Handle(self, args, context=None):
+ def Handle(
+ self,
+ args: hunt_pb2.ApiListHuntResultsArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> hunt_pb2.ApiListHuntResultsResult:
results = data_store.REL_DB.ReadHuntResults(
- str(args.hunt_id),
+ args.hunt_id,
args.offset,
args.count or db.MAX_COUNT,
with_substring=args.filter or None,
@@ -849,12 +857,11 @@ def Handle(self, args, context=None):
)
total_count = data_store.REL_DB.CountHuntResults(
- str(args.hunt_id), with_type=args.with_type or None
+ args.hunt_id, with_type=args.with_type or None
)
- results = [mig_flow_objects.ToRDFFlowResult(r) for r in results]
- return ApiListHuntResultsResult(
- items=[ApiHuntResult().InitFromFlowResult(r) for r in results],
+ return hunt_pb2.ApiListHuntResultsResult(
+ items=[InitApiHuntResultFromFlowResult(r) for r in results],
total_count=total_count,
)
@@ -1667,6 +1674,10 @@ def GetFlowArgsClass(self):
return flow_cls.args_type
+class HuntPresubmitError(Error):
+ """Raised when there is a hunt presubmit error."""
+
+
class ApiCreateHuntHandler(api_call_handler_base.ApiCallHandler):
"""Handles hunt creation request."""
@@ -1675,6 +1686,51 @@ class ApiCreateHuntHandler(api_call_handler_base.ApiCallHandler):
proto_args_type = hunt_pb2.ApiCreateHuntArgs
proto_result_type = hunt_pb2.ApiHunt
+ def _HuntPresubmitCheck(
+ self,
+ client_rule_set: jobs_pb2.ForemanClientRuleSet,
+ expected_labels: Sequence[str],
+ ) -> bool:
+ """Very simple presubmit check for exclude labels rule.
+
+ Requires that the rule set has `MATCH_ALL` mode and it has the
+ `exclude_labels` list as a LABEL rule within the set.
+
+ This could be extended to be a more generic/complex check, but for now this
+ simple version should be enough for our needs.
+
+ Args:
+ client_rule_set: The rule set to check.
+ expected_labels: The labels that should be excluded.
+
+ Returns:
+ True if the presubmit check passes, False otherwise.
+ """
+ if (
+ client_rule_set.match_mode
+ != jobs_pb2.ForemanClientRuleSet.MatchMode.MATCH_ALL
+ ):
+ return False
+
+ for rule in client_rule_set.rules:
+ if rule.rule_type != jobs_pb2.ForemanClientRule.Type.LABEL:
+ continue
+ if not rule.label:
+ continue
+ if (
+ rule.label.match_mode
+ != jobs_pb2.ForemanLabelClientRule.MatchMode.DOES_NOT_MATCH_ANY
+ ):
+ continue
+ if len(rule.label.label_names) < len(expected_labels):
+ continue
+
+ found = set(expected_labels).issubset(set(rule.label.label_names))
+ if found:
+ return True
+
+ return False
+
def Handle(
self,
args: hunt_pb2.ApiCreateHuntArgs,
@@ -1702,6 +1758,25 @@ def Handle(
hunt_obj.args.standard.flow_args.CopyFrom(args.flow_args)
hunt_obj.creator = context.username
+ hunt_cfg = config.CONFIG["AdminUI.hunt_config"]
+ presubmit_on = bool(
+ hunt_cfg and hunt_cfg.make_default_exclude_labels_a_presubmit_check
+ )
+ if presubmit_on and FORCE_HUNT_TAG not in hra.description:
+ passes = self._HuntPresubmitCheck(
+ hra.client_rule_set, hunt_cfg.default_exclude_labels
+ )
+ if not passes:
+ message = hunt_cfg.presubmit_warning_message + (
+ "\nHunt creation failed because the presubmit check failed. Please"
+ " check exclude the following labels"
+ f" {hunt_cfg.default_exclude_labels} or add a '{FORCE_HUNT_TAG}='"
+ " tag to the hunt description."
+ )
+ raise HuntPresubmitError(message)
+
+ # At this point, either the presubmit is off, the FORCE tag is set,
+ # or the presubmit passed, so we can set the client_rule_set.
if hra.HasField("client_rule_set"):
hunt_obj.client_rule_set.CopyFrom(hra.client_rule_set)
@@ -1742,6 +1817,7 @@ def Handle(
hunt_obj.output_plugins.extend(hra.output_plugins)
+ # Effectively writes the hunt to the DB.
hunt.CreateHunt(hunt_obj)
return InitApiHuntFromHuntObject(hunt_obj, with_full_summary=True)
diff --git a/grr/server/grr_response_server/gui/api_plugins/hunt_regression_test.py b/grr/server/grr_response_server/gui/api_plugins/hunt_regression_test.py
index 9665c28250..59eb1b951a 100644
--- a/grr/server/grr_response_server/gui/api_plugins/hunt_regression_test.py
+++ b/grr/server/grr_response_server/gui/api_plugins/hunt_regression_test.py
@@ -3,20 +3,23 @@
from absl import app
+from google.protobuf import any_pb2
from grr_response_core.lib import rdfvalue
+from grr_response_proto import dummy_pb2
from grr_response_proto import flows_pb2
from grr_response_server import access_control
from grr_response_server import data_store
from grr_response_server import flow
from grr_response_server import hunt
from grr_response_server.databases import db
+# TODO: Import needed as Dummy proto is packed into Any and is then
+# converted to rdf value, which is unknown otherwise.
+from grr_response_server.flows.general import dummy # pylint: disable=unused-import
from grr_response_server.flows.general import processes as flows_processes
from grr_response_server.gui import api_regression_test_lib
from grr_response_server.gui.api_plugins import hunt as hunt_plugin
from grr_response_server.output_plugins import test_plugins
-from grr_response_server.rdfvalues import flow_objects as rdf_flow_objects
from grr_response_server.rdfvalues import hunts as rdf_hunts
-from grr_response_server.rdfvalues import mig_flow_objects
from grr_response_server.rdfvalues import objects as rdf_objects
from grr_response_server.rdfvalues import output_plugin as rdf_output_plugin
from grr.test_lib import flow_test_lib
@@ -120,26 +123,26 @@ def Run(self):
)
with test_lib.FakeTime(rdfvalue.RDFDatetime.FromSecondsSinceEpoch(2)):
+ payload = any_pb2.Any()
+ payload.Pack(dummy_pb2.DummyFlowResult(flow_output="blah1"))
data_store.REL_DB.WriteFlowResults([
- mig_flow_objects.ToProtoFlowResult(
- rdf_flow_objects.FlowResult(
- client_id=client_id,
- flow_id=flow_id,
- hunt_id=hunt_id,
- payload=rdfvalue.RDFString("blah1"),
- )
+ flows_pb2.FlowResult(
+ client_id=client_id,
+ flow_id=flow_id,
+ hunt_id=hunt_id,
+ payload=payload,
)
])
with test_lib.FakeTime(rdfvalue.RDFDatetime.FromSecondsSinceEpoch(43)):
+ payload = any_pb2.Any()
+ payload.Pack(dummy_pb2.DummyFlowResult(flow_output="blah2-foo"))
data_store.REL_DB.WriteFlowResults([
- mig_flow_objects.ToProtoFlowResult(
- rdf_flow_objects.FlowResult(
- client_id=client_id,
- flow_id=flow_id,
- hunt_id=hunt_id,
- payload=rdfvalue.RDFString("blah2-foo"),
- )
+ flows_pb2.FlowResult(
+ client_id=client_id,
+ flow_id=flow_id,
+ hunt_id=hunt_id,
+ payload=payload,
)
])
@@ -185,15 +188,14 @@ def Run(self):
client_id=client_id,
parent=flow.FlowParent.FromHuntID(hunt_id),
)
-
+ payload = any_pb2.Any()
+ payload.Pack(dummy_pb2.DummyFlowResult(flow_output="blah1"))
data_store.REL_DB.WriteFlowResults([
- mig_flow_objects.ToProtoFlowResult(
- rdf_flow_objects.FlowResult(
- client_id=client_id,
- flow_id=flow_id,
- hunt_id=hunt_id,
- payload=rdfvalue.RDFString("blah1"),
- )
+ flows_pb2.FlowResult(
+ client_id=client_id,
+ flow_id=flow_id,
+ hunt_id=hunt_id,
+ payload=payload,
)
])
diff --git a/grr/server/grr_response_server/gui/api_plugins/hunt_test.py b/grr/server/grr_response_server/gui/api_plugins/hunt_test.py
index d238628501..4d67f7069f 100644
--- a/grr/server/grr_response_server/gui/api_plugins/hunt_test.py
+++ b/grr/server/grr_response_server/gui/api_plugins/hunt_test.py
@@ -12,6 +12,7 @@
from grr_response_core.lib import rdfvalue
from grr_response_core.lib import utils
+from grr_response_core.lib.rdfvalues import config as rdf_config
from grr_response_core.lib.rdfvalues import file_finder as rdf_file_finder
from grr_response_core.lib.rdfvalues import flows as rdf_flows
from grr_response_core.lib.rdfvalues import test_base as rdf_test_base
@@ -111,6 +112,139 @@ def testCollectLargeFileBlocksHuntCreationRespected(self):
ValueError, self.handler.Handle, args, context=self.context
)
+ def testPresubmit_HasPresbmitRule(self):
+ hunt_cfg = rdf_config.AdminUIHuntConfig(
+ default_exclude_labels=["no-no"],
+ make_default_exclude_labels_a_presubmit_check=True,
+ presubmit_warning_message="not cool",
+ )
+ with test_lib.ConfigOverrider({"AdminUI.hunt_config": hunt_cfg}):
+ args = hunt_pb2.ApiCreateHuntArgs(
+ flow_name=file_finder.ClientFileFinder.__name__,
+ hunt_runner_args=flows_pb2.HuntRunnerArgs(
+ client_rule_set=jobs_pb2.ForemanClientRuleSet(
+ match_mode=jobs_pb2.ForemanClientRuleSet.MatchMode.MATCH_ALL,
+ rules=[
+ jobs_pb2.ForemanClientRule(
+ rule_type=jobs_pb2.ForemanClientRule.Type.LABEL,
+ label=jobs_pb2.ForemanLabelClientRule(
+ match_mode=jobs_pb2.ForemanLabelClientRule.MatchMode.DOES_NOT_MATCH_ANY,
+ label_names=["irrelevant"],
+ ),
+ ),
+ jobs_pb2.ForemanClientRule(
+ rule_type=jobs_pb2.ForemanClientRule.Type.LABEL,
+ label=jobs_pb2.ForemanLabelClientRule(
+ match_mode=jobs_pb2.ForemanLabelClientRule.MatchMode.DOES_NOT_MATCH_ANY,
+ label_names=["no-no"],
+ ),
+ ),
+ ],
+ )
+ ),
+ )
+ # Should not raise.
+ self.handler.Handle(args, context=self.context)
+
+ def testPresubmit_HasPresbmitRuleWithExtraLabels(self):
+ hunt_cfg = rdf_config.AdminUIHuntConfig(
+ default_exclude_labels=["no-no"],
+ make_default_exclude_labels_a_presubmit_check=True,
+ presubmit_warning_message="not cool",
+ )
+ with test_lib.ConfigOverrider({"AdminUI.hunt_config": hunt_cfg}):
+ args = hunt_pb2.ApiCreateHuntArgs(
+ flow_name=file_finder.ClientFileFinder.__name__,
+ hunt_runner_args=flows_pb2.HuntRunnerArgs(
+ client_rule_set=jobs_pb2.ForemanClientRuleSet(
+ match_mode=jobs_pb2.ForemanClientRuleSet.MatchMode.MATCH_ALL,
+ rules=[
+ jobs_pb2.ForemanClientRule(
+ rule_type=jobs_pb2.ForemanClientRule.Type.LABEL,
+ label=jobs_pb2.ForemanLabelClientRule(
+ match_mode=jobs_pb2.ForemanLabelClientRule.MatchMode.DOES_NOT_MATCH_ANY,
+ label_names=["no-no", "irrelevant"],
+ ),
+ ),
+ ],
+ )
+ ),
+ )
+ # Should not raise.
+ self.handler.Handle(args, context=self.context)
+
+ def testPresubmit_NoLabelRule(self):
+ hunt_cfg = rdf_config.AdminUIHuntConfig(
+ default_exclude_labels=["no-no"],
+ make_default_exclude_labels_a_presubmit_check=True,
+ presubmit_warning_message="not cool",
+ )
+ with test_lib.ConfigOverrider({"AdminUI.hunt_config": hunt_cfg}):
+ args = hunt_pb2.ApiCreateHuntArgs(
+ flow_name=file_finder.ClientFileFinder.__name__
+ )
+ self.assertRaises(
+ hunt_plugin.HuntPresubmitError,
+ self.handler.Handle,
+ args,
+ context=self.context,
+ )
+
+ def testPresubmit_WrongLabelRule(self):
+ hunt_cfg = rdf_config.AdminUIHuntConfig(
+ default_exclude_labels=["no-no"],
+ make_default_exclude_labels_a_presubmit_check=True,
+ presubmit_warning_message="not cool",
+ )
+ with test_lib.ConfigOverrider({"AdminUI.hunt_config": hunt_cfg}):
+ args = hunt_pb2.ApiCreateHuntArgs(
+ flow_name=file_finder.ClientFileFinder.__name__,
+ hunt_runner_args=flows_pb2.HuntRunnerArgs(
+ client_rule_set=jobs_pb2.ForemanClientRuleSet(
+ match_mode=jobs_pb2.ForemanClientRuleSet.MatchMode.MATCH_ALL,
+ rules=[
+ jobs_pb2.ForemanClientRule(
+ rule_type=jobs_pb2.ForemanClientRule.Type.LABEL,
+ label=jobs_pb2.ForemanLabelClientRule(
+ match_mode=jobs_pb2.ForemanLabelClientRule.MatchMode.MATCH_ALL,
+ label_names=["irrelevant"],
+ ),
+ ),
+ # Rule uses `MATCH_ALL` instead of `DOES_NOT_MATCH_ANY`.
+ jobs_pb2.ForemanClientRule(
+ rule_type=jobs_pb2.ForemanClientRule.Type.LABEL,
+ label=jobs_pb2.ForemanLabelClientRule(
+ match_mode=jobs_pb2.ForemanLabelClientRule.MatchMode.MATCH_ALL,
+ label_names=["no-no"],
+ ),
+ ),
+ ],
+ )
+ ),
+ )
+ self.assertRaises(
+ hunt_plugin.HuntPresubmitError,
+ self.handler.Handle,
+ args,
+ context=self.context,
+ )
+
+ def testPresubmit_ForceSubmit(self):
+ hunt_cfg = rdf_config.AdminUIHuntConfig(
+ default_exclude_labels=["no-no"],
+ make_default_exclude_labels_a_presubmit_check=True,
+ presubmit_warning_message="not cool",
+ )
+ with test_lib.ConfigOverrider({"AdminUI.hunt_config": hunt_cfg}):
+ args = hunt_pb2.ApiCreateHuntArgs(
+ flow_name=file_finder.ClientFileFinder.__name__,
+ hunt_runner_args=flows_pb2.HuntRunnerArgs(
+ description="something something FORCE=submit"
+ ),
+ )
+ # Should not raise.
+ self.handler.Handle(args, context=self.context)
+
class ApiListHuntCrashesHandlerTest(
api_test_lib.ApiCallHandlerTest,
@@ -669,15 +803,14 @@ def testReturnsAllResultsOfAllTypes(self):
],
)
result = self.handler.Handle(
- hunt_plugin.ApiListHuntResultsArgs(hunt_id=hunt_id),
+ hunt_pb2.ApiListHuntResultsArgs(hunt_id=hunt_id),
context=self.context,
)
-
self.assertCountEqual(
- [r.payload_type for r in result.items],
+ [r.payload.TypeName() for r in result.items],
[
- rdf_file_finder.CollectFilesByKnownPathResult.__name__,
- rdf_file_finder.FileFinderResult.__name__,
+ flows_pb2.CollectFilesByKnownPathResult.DESCRIPTOR.full_name,
+ flows_pb2.FileFinderResult.DESCRIPTOR.full_name,
]
* 5,
)
@@ -692,7 +825,7 @@ def testCountsAllResultsWithAllTypes(self):
],
)
result = self.handler.Handle(
- hunt_plugin.ApiListHuntResultsArgs(hunt_id=hunt_id, count=3),
+ hunt_pb2.ApiListHuntResultsArgs(hunt_id=hunt_id, count=3),
context=self.context,
)
@@ -708,15 +841,15 @@ def testReturnsAllResultsOfFilteredType(self):
],
)
result = self.handler.Handle(
- hunt_plugin.ApiListHuntResultsArgs(
+ hunt_pb2.ApiListHuntResultsArgs(
hunt_id=hunt_id, with_type=rdf_file_finder.FileFinderResult.__name__
),
context=self.context,
)
self.assertCountEqual(
- [r.payload_type for r in result.items],
- [rdf_file_finder.FileFinderResult.__name__] * 5,
+ [r.payload.TypeName() for r in result.items],
+ [flows_pb2.FileFinderResult.DESCRIPTOR.full_name] * 5,
)
self.assertEqual(result.total_count, 5)
@@ -729,17 +862,16 @@ def testCountsAllResultsWithType(self):
],
)
result = self.handler.Handle(
- hunt_plugin.ApiListHuntResultsArgs(
+ hunt_pb2.ApiListHuntResultsArgs(
hunt_id=hunt_id,
count=3,
with_type=rdf_file_finder.FileFinderResult.__name__,
),
context=self.context,
)
-
self.assertCountEqual(
- [r.payload_type for r in result.items],
- [rdf_file_finder.FileFinderResult.__name__] * 3,
+ [r.payload.TypeName() for r in result.items],
+ [flows_pb2.FileFinderResult.DESCRIPTOR.full_name] * 3,
)
self.assertEqual(result.total_count, 5)
diff --git a/grr/server/grr_response_server/gui/api_plugins/report_plugins/report_plugin_base.py b/grr/server/grr_response_server/gui/api_plugins/report_plugins/report_plugin_base.py
index 9f38cfca2b..cfaba0b225 100644
--- a/grr/server/grr_response_server/gui/api_plugins/report_plugins/report_plugin_base.py
+++ b/grr/server/grr_response_server/gui/api_plugins/report_plugins/report_plugin_base.py
@@ -4,7 +4,7 @@
Each report plugin is a subclass of ReportPluginBase.
"""
-from grr_response_server.gui.api_plugins.report_plugins import rdf_report_plugins
+from grr_response_proto.api import stats_pb2
class ReportPluginBase(object):
@@ -25,7 +25,7 @@ class ReportPluginBase(object):
REQUIRES_TIME_RANGE = False
@classmethod
- def GetReportDescriptor(cls):
+ def GetReportDescriptor(cls) -> stats_pb2.ApiReportDescriptor:
"""Returns plugins' metadata in ApiReportDescriptor."""
if cls.TYPE is None:
raise ValueError("%s.TYPE is uninitialized." % cls)
@@ -36,7 +36,7 @@ def GetReportDescriptor(cls):
if cls.SUMMARY is None:
raise ValueError("%s.SUMMARY is uninitialized." % cls)
- return rdf_report_plugins.ApiReportDescriptor(
+ return stats_pb2.ApiReportDescriptor(
type=cls.TYPE,
name=cls.__name__,
title=cls.TITLE,
@@ -44,7 +44,9 @@ def GetReportDescriptor(cls):
requires_time_range=cls.REQUIRES_TIME_RANGE,
)
- def GetReportData(self, get_report_args):
+ def GetReportData(
+ self, get_report_args: stats_pb2.ApiGetReportArgs
+ ) -> stats_pb2.ApiReportData:
"""Generates the data to be displayed in the report.
Args:
diff --git a/grr/server/grr_response_server/gui/api_plugins/report_plugins/report_plugins.py b/grr/server/grr_response_server/gui/api_plugins/report_plugins/report_plugins.py
index 8ad292224b..82d73f72fb 100644
--- a/grr/server/grr_response_server/gui/api_plugins/report_plugins/report_plugins.py
+++ b/grr/server/grr_response_server/gui/api_plugins/report_plugins/report_plugins.py
@@ -1,17 +1,20 @@
#!/usr/bin/env python
"""UI report plugins server-side interface."""
+from typing import Dict, List
+
+from grr_response_server.gui.api_plugins.report_plugins import report_plugin_base
from grr_response_server.gui.api_plugins.report_plugins import server_report_plugins
-def GetAvailableReportPlugins():
+def GetAvailableReportPlugins() -> List[report_plugin_base.ReportPluginBase]:
"""Lists the registered report plugins."""
return sorted(
REGISTRY.GetRegisteredPlugins().values(), key=lambda cls: cls.__name__
)
-def GetReportByName(name):
+def GetReportByName(name) -> report_plugin_base.ReportPluginBase:
"""Maps report plugin names to report objects.
Args:
@@ -35,12 +38,16 @@ class _Registry(object):
"""
def __init__(self):
- self.plugins = {}
+ self.plugins: Dict[str, report_plugin_base.ReportPluginBase] = {}
- def GetRegisteredPlugins(self):
+ def GetRegisteredPlugins(
+ self,
+ ) -> Dict[str, report_plugin_base.ReportPluginBase]:
return self.plugins
- def RegisterPlugin(self, report_plugin_cls):
+ def RegisterPlugin(
+ self, report_plugin_cls: report_plugin_base.ReportPluginBase
+ ) -> None:
"""Registers a report plugin for use in the GRR UI."""
name = report_plugin_cls.__name__
diff --git a/grr/server/grr_response_server/gui/api_plugins/report_plugins/report_plugins_test.py b/grr/server/grr_response_server/gui/api_plugins/report_plugins/report_plugins_test.py
index 68cebe2c94..281a91a777 100644
--- a/grr/server/grr_response_server/gui/api_plugins/report_plugins/report_plugins_test.py
+++ b/grr/server/grr_response_server/gui/api_plugins/report_plugins/report_plugins_test.py
@@ -4,18 +4,17 @@
from absl import app
from grr_response_core.lib import rdfvalue
-from grr_response_core.lib.rdfvalues import events as rdf_events
+from grr_response_proto import jobs_pb2
from grr_response_proto import objects_pb2
+from grr_response_proto.api import stats_pb2
from grr_response_server import data_store
-from grr_response_server.gui.api_plugins import stats as stats_api
-from grr_response_server.gui.api_plugins.report_plugins import rdf_report_plugins
from grr_response_server.gui.api_plugins.report_plugins import report_plugins
from grr_response_server.gui.api_plugins.report_plugins import report_plugins_test_mocks
from grr_response_server.gui.api_plugins.report_plugins import server_report_plugins
from grr.test_lib import test_lib
-RepresentationType = rdf_report_plugins.ApiReportData.RepresentationType
-Action = rdf_events.AuditEvent.Action
+RepresentationType = stats_pb2.ApiReportData.RepresentationType
+Action = jobs_pb2.AuditEvent.Action
class ReportPluginsTest(test_lib.GRRBaseTest):
@@ -47,9 +46,7 @@ def testGetReportDescriptor(self):
desc = report_plugins_test_mocks.BarReportPlugin.GetReportDescriptor()
- self.assertEqual(
- desc.type, rdf_report_plugins.ApiReportDescriptor.ReportType.SERVER
- )
+ self.assertEqual(desc.type, stats_pb2.ApiReportDescriptor.ReportType.SERVER)
self.assertEqual(desc.title, "Bar Activity")
self.assertEqual(
desc.summary, "Reports bars' activity in the given time range."
@@ -102,10 +99,10 @@ def testClientApprovalsReportPlugin(self):
month_duration = rdfvalue.Duration.From(30, rdfvalue.DAYS)
api_report_data = report.GetReportData(
- stats_api.ApiGetReportArgs(
+ stats_pb2.ApiGetReportArgs(
name=report.__class__.__name__,
- start_time=start,
- duration=month_duration,
+ start_time=int(start),
+ duration=month_duration.ToInt(timeunit=rdfvalue.MICROSECONDS),
)
)
@@ -145,18 +142,18 @@ def testClientApprovalsReportPluginWithNoActivityToReport(self):
month_duration = rdfvalue.Duration.From(30, rdfvalue.DAYS)
api_report_data = report.GetReportData(
- stats_api.ApiGetReportArgs(
+ stats_pb2.ApiGetReportArgs(
name=report.__class__.__name__,
- start_time=now - month_duration,
- duration=month_duration,
+ start_time=int(now - month_duration),
+ duration=month_duration.ToInt(timeunit=rdfvalue.MICROSECONDS),
)
)
self.assertEqual(
api_report_data,
- rdf_report_plugins.ApiReportData(
+ stats_pb2.ApiReportData(
representation_type=RepresentationType.AUDIT_CHART,
- audit_chart=rdf_report_plugins.ApiAuditChartReportData(
+ audit_chart=stats_pb2.ApiAuditChartReportData(
used_fields=["action", "client", "timestamp", "user"], rows=[]
),
),
@@ -184,10 +181,10 @@ def testHuntActionsReportPlugin(self):
month_duration = rdfvalue.Duration.From(30, rdfvalue.DAYS)
api_report_data = report.GetReportData(
- stats_api.ApiGetReportArgs(
+ stats_pb2.ApiGetReportArgs(
name=report.__class__.__name__,
- start_time=start,
- duration=month_duration,
+ start_time=int(start),
+ duration=month_duration.ToInt(timeunit=rdfvalue.MICROSECONDS),
)
)
@@ -201,7 +198,13 @@ def testHuntActionsReportPlugin(self):
self.assertEqual(
[
- (row.action, row.timestamp.Format("%Y/%m/%d"), row.user)
+ (
+ row.action,
+ rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(
+ row.timestamp
+ ).Format("%Y/%m/%d"),
+ row.user,
+ )
for row in api_report_data.audit_chart.rows
],
[
@@ -227,18 +230,18 @@ def testHuntActionsReportPluginWithNoActivityToReport(self):
month_duration = rdfvalue.Duration.From(30, rdfvalue.DAYS)
api_report_data = report.GetReportData(
- stats_api.ApiGetReportArgs(
+ stats_pb2.ApiGetReportArgs(
name=report.__class__.__name__,
- start_time=now - month_duration,
- duration=month_duration,
+ start_time=int(now - month_duration),
+ duration=month_duration.ToInt(timeunit=rdfvalue.MICROSECONDS),
)
)
self.assertEqual(
api_report_data,
- rdf_report_plugins.ApiReportData(
+ stats_pb2.ApiReportData(
representation_type=RepresentationType.AUDIT_CHART,
- audit_chart=rdf_report_plugins.ApiAuditChartReportData(
+ audit_chart=stats_pb2.ApiAuditChartReportData(
used_fields=["action", "timestamp", "user"], rows=[]
),
),
@@ -280,16 +283,16 @@ def testHuntApprovalsReportPlugin(self):
month_duration = rdfvalue.Duration.From(30, rdfvalue.DAYS)
api_report_data = report.GetReportData(
- stats_api.ApiGetReportArgs(
+ stats_pb2.ApiGetReportArgs(
name=report.__class__.__name__,
- start_time=start,
- duration=month_duration,
+ start_time=int(start),
+ duration=month_duration.ToInt(timeunit=rdfvalue.MICROSECONDS),
)
)
self.assertEqual(
api_report_data.representation_type,
- rdf_report_plugins.ApiReportData.RepresentationType.AUDIT_CHART,
+ stats_pb2.ApiReportData.RepresentationType.AUDIT_CHART,
)
self.assertCountEqual(
@@ -368,7 +371,9 @@ def testHuntApprovalsReportPlugin(self):
[
(
row.action,
- row.timestamp.Format("%Y/%m/%d"),
+ rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(
+ row.timestamp
+ ).Format("%Y/%m/%d"),
row.user,
str(row.urn),
)
@@ -386,10 +391,10 @@ def testHuntApprovalsReportPluginWithNoActivityToReport(self):
month_duration = rdfvalue.Duration.From(30, rdfvalue.DAYS)
api_report_data = report.GetReportData(
- stats_api.ApiGetReportArgs(
+ stats_pb2.ApiGetReportArgs(
name=report.__class__.__name__,
- start_time=now - month_duration,
- duration=month_duration,
+ start_time=int(now - month_duration),
+ duration=month_duration.ToInt(timeunit=rdfvalue.MICROSECONDS),
)
)
@@ -438,16 +443,16 @@ def testCronApprovalsReportPlugin(self):
month_duration = rdfvalue.Duration.From(30, rdfvalue.DAYS)
api_report_data = report.GetReportData(
- stats_api.ApiGetReportArgs(
+ stats_pb2.ApiGetReportArgs(
name=report.__class__.__name__,
- start_time=start,
- duration=month_duration,
+ start_time=int(start),
+ duration=month_duration.ToInt(timeunit=rdfvalue.MICROSECONDS),
)
)
self.assertEqual(
api_report_data.representation_type,
- rdf_report_plugins.ApiReportData.RepresentationType.AUDIT_CHART,
+ stats_pb2.ApiReportData.RepresentationType.AUDIT_CHART,
)
self.assertCountEqual(
@@ -473,7 +478,9 @@ def testCronApprovalsReportPlugin(self):
[
(
row.action,
- row.timestamp.Format("%Y/%m/%d"),
+ rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(
+ row.timestamp
+ ).Format("%Y/%m/%d"),
row.user,
str(row.urn),
)
@@ -491,10 +498,10 @@ def testCronApprovalsReportPluginWithNoActivityToReport(self):
month_duration = rdfvalue.Duration.From(30, rdfvalue.DAYS)
api_report_data = report.GetReportData(
- stats_api.ApiGetReportArgs(
+ stats_pb2.ApiGetReportArgs(
name=report.__class__.__name__,
- start_time=now - month_duration,
- duration=month_duration,
+ start_time=int(now - month_duration),
+ duration=month_duration.ToInt(timeunit=rdfvalue.MICROSECONDS),
)
)
diff --git a/grr/server/grr_response_server/gui/api_plugins/report_plugins/report_plugins_test_mocks.py b/grr/server/grr_response_server/gui/api_plugins/report_plugins/report_plugins_test_mocks.py
index 369b4d2618..4cd2e2abc2 100644
--- a/grr/server/grr_response_server/gui/api_plugins/report_plugins/report_plugins_test_mocks.py
+++ b/grr/server/grr_response_server/gui/api_plugins/report_plugins/report_plugins_test_mocks.py
@@ -4,8 +4,8 @@
from unittest import mock
from grr_response_core.lib import rdfvalue
-from grr_response_core.lib.rdfvalues import events as rdf_events
-from grr_response_server.gui.api_plugins.report_plugins import rdf_report_plugins
+from grr_response_proto import jobs_pb2
+from grr_response_proto.api import stats_pb2
from grr_response_server.gui.api_plugins.report_plugins import report_plugin_base
from grr_response_server.gui.api_plugins.report_plugins import report_plugins
@@ -13,7 +13,7 @@
class FooReportPlugin(report_plugin_base.ReportPluginBase):
"""Stub report plugin."""
- TYPE = rdf_report_plugins.ApiReportDescriptor.ReportType.CLIENT
+ TYPE = stats_pb2.ApiReportDescriptor.ReportType.CLIENT
TITLE = "Foo"
SUMMARY = "Reports all foos."
@@ -21,22 +21,24 @@ class FooReportPlugin(report_plugin_base.ReportPluginBase):
class BarReportPlugin(report_plugin_base.ReportPluginBase):
"""Stub report plugin."""
- TYPE = rdf_report_plugins.ApiReportDescriptor.ReportType.SERVER
+ TYPE = stats_pb2.ApiReportDescriptor.ReportType.SERVER
TITLE = "Bar Activity"
SUMMARY = "Reports bars' activity in the given time range."
REQUIRES_TIME_RANGE = True
- def GetReportData(self, get_report_args):
- ret = rdf_report_plugins.ApiReportData(
- representation_type=rdf_report_plugins.ApiReportData.RepresentationType.AUDIT_CHART,
- audit_chart=rdf_report_plugins.ApiAuditChartReportData(
+ def GetReportData(
+ self, get_report_args: stats_pb2.ApiGetReportArgs
+ ) -> stats_pb2.ApiReportData:
+ return stats_pb2.ApiReportData(
+ representation_type=stats_pb2.ApiReportData.RepresentationType.AUDIT_CHART,
+ audit_chart=stats_pb2.ApiAuditChartReportData(
used_fields=["action", "client", "timestamp", "user"],
rows=[
- rdf_events.AuditEvent(
+ jobs_pb2.AuditEvent(
user="user",
- action=rdf_events.AuditEvent.Action.USER_ADD,
- timestamp=rdfvalue.RDFDatetime.FromHumanReadable(
- "2018/12/14"
+ action=jobs_pb2.AuditEvent.Action.USER_ADD,
+ timestamp=int(
+ rdfvalue.RDFDatetime.FromHumanReadable("2018/12/14")
),
id=42,
)
@@ -44,8 +46,6 @@ def GetReportData(self, get_report_args):
),
)
- return ret
-
class MockedReportPlugins(object):
"""A context manager that swaps available reports with the mocked reports."""
diff --git a/grr/server/grr_response_server/gui/api_plugins/report_plugins/server_report_plugins.py b/grr/server/grr_response_server/gui/api_plugins/report_plugins/server_report_plugins.py
index 1f3006e5a6..6f3ade76b6 100644
--- a/grr/server/grr_response_server/gui/api_plugins/report_plugins/server_report_plugins.py
+++ b/grr/server/grr_response_server/gui/api_plugins/report_plugins/server_report_plugins.py
@@ -1,24 +1,36 @@
#!/usr/bin/env python
"""UI server report handling classes."""
+from collections.abc import Callable
import re
+from typing import Dict, List, Optional
-from grr_response_core.lib.rdfvalues import events as rdf_events
+from grr_response_core.lib import rdfvalue
+from grr_response_proto import jobs_pb2
+from grr_response_proto import objects_pb2
+from grr_response_proto.api import stats_pb2
from grr_response_server import data_store
-from grr_response_server.gui.api_plugins.report_plugins import rdf_report_plugins
from grr_response_server.gui.api_plugins.report_plugins import report_plugin_base
-RepresentationType = rdf_report_plugins.ApiReportData.RepresentationType
-
-def _LoadAuditEvents(handlers, get_report_args, transformers=None):
+def _LoadAuditEvents(
+ handlers: Dict[str, "jobs_pb2.AuditEvent.Action"],
+ get_report_args: stats_pb2.ApiGetReportArgs,
+ transformers: Optional[
+ Callable[[objects_pb2.APIAuditEntry, jobs_pb2.AuditEvent], None]
+ ] = None,
+) -> List[jobs_pb2.AuditEvent]:
"""Returns AuditEvents for given handlers, actions, and timerange."""
if transformers is None:
transformers = {}
entries = data_store.REL_DB.ReadAPIAuditEntries(
- min_timestamp=get_report_args.start_time,
- max_timestamp=get_report_args.start_time + get_report_args.duration,
+ min_timestamp=rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(
+ get_report_args.start_time
+ ),
+ max_timestamp=rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(
+ get_report_args.start_time + get_report_args.duration
+ ),
router_method_names=list(handlers.keys()),
)
rows = [_EntryToEvent(entry, handlers, transformers) for entry in entries]
@@ -26,9 +38,15 @@ def _LoadAuditEvents(handlers, get_report_args, transformers=None):
return rows
-def _EntryToEvent(entry, handlers, transformers):
+def _EntryToEvent(
+ entry: objects_pb2.APIAuditEntry,
+ handlers: Dict[str, "jobs_pb2.AuditEvent.Action"],
+ transformers: Optional[
+ Callable[[objects_pb2.APIAuditEntry, jobs_pb2.AuditEvent], None]
+ ],
+) -> jobs_pb2.AuditEvent:
"""Converts an APIAuditEntry to a legacy AuditEvent."""
- event = rdf_events.AuditEvent(
+ event = jobs_pb2.AuditEvent(
timestamp=entry.timestamp,
user=entry.username,
action=handlers[entry.router_method_name],
@@ -40,7 +58,10 @@ def _EntryToEvent(entry, handlers, transformers):
return event
-def _ExtractClientIdFromPath(entry, event):
+def _ExtractClientIdFromPath(
+ entry: objects_pb2.APIAuditEntry,
+ event: jobs_pb2.AuditEvent,
+) -> None:
"""Extracts a Client ID from an APIAuditEntry's HTTP request path."""
match = re.match(r".*(C\.[0-9a-fA-F]{16}).*", entry.http_request_path)
if match:
@@ -48,14 +69,20 @@ def _ExtractClientIdFromPath(entry, event):
# TODO: Remove AFF4 URNs from the API data format.
-def _ExtractCronJobIdFromPath(entry, event):
+def _ExtractCronJobIdFromPath(
+ entry: objects_pb2.APIAuditEntry,
+ event: jobs_pb2.AuditEvent,
+) -> None:
"""Extracts a CronJob ID from an APIAuditEntry's HTTP request path."""
match = re.match(r".*cron-job/([^/]+).*", entry.http_request_path)
if match:
event.urn = "aff4:/cron/{}".format(match.group(1))
-def _ExtractHuntIdFromPath(entry, event):
+def _ExtractHuntIdFromPath(
+ entry: objects_pb2.APIAuditEntry,
+ event: jobs_pb2.AuditEvent,
+) -> None:
"""Extracts a Hunt ID from an APIAuditEntry's HTTP request path."""
match = re.match(r".*hunt/([^/]+).*", entry.http_request_path)
if match:
@@ -65,7 +92,7 @@ def _ExtractHuntIdFromPath(entry, event):
class ClientApprovalsReportPlugin(report_plugin_base.ReportPluginBase):
"""Given timerange's client approvals."""
- TYPE = rdf_report_plugins.ApiReportDescriptor.ReportType.SERVER
+ TYPE = stats_pb2.ApiReportDescriptor.ReportType.SERVER
TITLE = "Client Approvals"
SUMMARY = "Client approval requests and grants for the given timerange."
REQUIRES_TIME_RANGE = True
@@ -74,23 +101,29 @@ class ClientApprovalsReportPlugin(report_plugin_base.ReportPluginBase):
# TODO: Rework API data format, to remove need for legacy
# AuditEvent.Action.
HANDLERS = {
- "GrantClientApproval": rdf_events.AuditEvent.Action.CLIENT_APPROVAL_GRANT,
+ "GrantClientApproval": jobs_pb2.AuditEvent.Action.CLIENT_APPROVAL_GRANT,
"CreateClientApproval": (
- rdf_events.AuditEvent.Action.CLIENT_APPROVAL_REQUEST
+ jobs_pb2.AuditEvent.Action.CLIENT_APPROVAL_REQUEST
),
}
- def GetReportData(self, get_report_args=None):
+ def GetReportData(
+ self, get_report_args: stats_pb2.ApiGetReportArgs
+ ) -> stats_pb2.ApiReportData:
"""Filter the cron job approvals in the given timerange."""
- ret = rdf_report_plugins.ApiReportData(
- representation_type=RepresentationType.AUDIT_CHART,
- audit_chart=rdf_report_plugins.ApiAuditChartReportData(
- used_fields=self.USED_FIELDS
- ),
+ ret = stats_pb2.ApiReportData()
+ ret.representation_type = (
+ stats_pb2.ApiReportData.RepresentationType.AUDIT_CHART
)
-
- ret.audit_chart.rows = _LoadAuditEvents(
- self.HANDLERS, get_report_args, transformers=[_ExtractClientIdFromPath]
+ ret.audit_chart.CopyFrom(
+ stats_pb2.ApiAuditChartReportData(used_fields=self.USED_FIELDS),
+ )
+ ret.audit_chart.rows.extend(
+ _LoadAuditEvents(
+ self.HANDLERS,
+ get_report_args,
+ transformers=[_ExtractClientIdFromPath],
+ )
)
return ret
@@ -98,30 +131,32 @@ def GetReportData(self, get_report_args=None):
class CronApprovalsReportPlugin(report_plugin_base.ReportPluginBase):
"""Given timerange's cron job approvals."""
- TYPE = rdf_report_plugins.ApiReportDescriptor.ReportType.SERVER
+ TYPE = stats_pb2.ApiReportDescriptor.ReportType.SERVER
TITLE = "Cron Job Approvals"
SUMMARY = "Cron job approval requests and grants for the given timerange."
REQUIRES_TIME_RANGE = True
USED_FIELDS = ["action", "timestamp", "user", "urn"]
HANDLERS = {
- "GrantCronJobApproval": rdf_events.AuditEvent.Action.CRON_APPROVAL_GRANT,
- "CreateCronJobApproval": (
- rdf_events.AuditEvent.Action.CRON_APPROVAL_REQUEST
- ),
+ "GrantCronJobApproval": jobs_pb2.AuditEvent.Action.CRON_APPROVAL_GRANT,
+ "CreateCronJobApproval": jobs_pb2.AuditEvent.Action.CRON_APPROVAL_REQUEST,
}
def GetReportData(self, get_report_args):
"""Filter the cron job approvals in the given timerange."""
- ret = rdf_report_plugins.ApiReportData(
- representation_type=RepresentationType.AUDIT_CHART,
- audit_chart=rdf_report_plugins.ApiAuditChartReportData(
- used_fields=self.USED_FIELDS
- ),
+ ret = stats_pb2.ApiReportData()
+ ret.representation_type = (
+ stats_pb2.ApiReportData.RepresentationType.AUDIT_CHART
)
-
- ret.audit_chart.rows = _LoadAuditEvents(
- self.HANDLERS, get_report_args, transformers=[_ExtractCronJobIdFromPath]
+ ret.audit_chart.CopyFrom(
+ stats_pb2.ApiAuditChartReportData(used_fields=self.USED_FIELDS),
+ )
+ ret.audit_chart.rows.extend(
+ _LoadAuditEvents(
+ self.HANDLERS,
+ get_report_args,
+ transformers=[_ExtractCronJobIdFromPath],
+ )
)
return ret
@@ -130,61 +165,70 @@ def GetReportData(self, get_report_args):
class HuntActionsReportPlugin(report_plugin_base.ReportPluginBase):
"""Hunt actions in the given timerange."""
- TYPE = rdf_report_plugins.ApiReportDescriptor.ReportType.SERVER
+ TYPE = stats_pb2.ApiReportDescriptor.ReportType.SERVER
TITLE = "Hunts"
SUMMARY = "Hunt management actions for the given timerange."
REQUIRES_TIME_RANGE = True
USED_FIELDS = ["action", "timestamp", "user"]
TYPES = [
- rdf_events.AuditEvent.Action.HUNT_CREATED,
- rdf_events.AuditEvent.Action.HUNT_MODIFIED,
- rdf_events.AuditEvent.Action.HUNT_PAUSED,
- rdf_events.AuditEvent.Action.HUNT_STARTED,
- rdf_events.AuditEvent.Action.HUNT_STOPPED,
+ jobs_pb2.AuditEvent.Action.HUNT_CREATED,
+ jobs_pb2.AuditEvent.Action.HUNT_MODIFIED,
+ jobs_pb2.AuditEvent.Action.HUNT_PAUSED,
+ jobs_pb2.AuditEvent.Action.HUNT_STARTED,
+ jobs_pb2.AuditEvent.Action.HUNT_STOPPED,
]
HANDLERS = {
- "CreateHunt": rdf_events.AuditEvent.Action.HUNT_CREATED,
- "ModifyHunt": rdf_events.AuditEvent.Action.HUNT_MODIFIED,
+ "CreateHunt": jobs_pb2.AuditEvent.Action.HUNT_CREATED,
+ "ModifyHunt": jobs_pb2.AuditEvent.Action.HUNT_MODIFIED,
}
def GetReportData(self, get_report_args):
"""Filter the hunt actions in the given timerange."""
- ret = rdf_report_plugins.ApiReportData(
- representation_type=RepresentationType.AUDIT_CHART,
- audit_chart=rdf_report_plugins.ApiAuditChartReportData(
- used_fields=self.USED_FIELDS
- ),
+ ret = stats_pb2.ApiReportData()
+ ret.representation_type = (
+ stats_pb2.ApiReportData.RepresentationType.AUDIT_CHART
+ )
+ ret.audit_chart.CopyFrom(
+ stats_pb2.ApiAuditChartReportData(used_fields=self.USED_FIELDS)
+ )
+ ret.audit_chart.rows.extend(
+ _LoadAuditEvents(self.HANDLERS, get_report_args)
)
-
- ret.audit_chart.rows = _LoadAuditEvents(self.HANDLERS, get_report_args)
return ret
class HuntApprovalsReportPlugin(report_plugin_base.ReportPluginBase):
"""Given timerange's hunt approvals."""
- TYPE = rdf_report_plugins.ApiReportDescriptor.ReportType.SERVER
+ TYPE = stats_pb2.ApiReportDescriptor.ReportType.SERVER
TITLE = "Hunt Approvals"
SUMMARY = "Hunt approval requests and grants for the given timerange."
REQUIRES_TIME_RANGE = True
USED_FIELDS = ["action", "timestamp", "user", "urn"]
HANDLERS = {
- "GrantHuntApproval": rdf_events.AuditEvent.Action.HUNT_APPROVAL_GRANT,
- "CreateHuntApproval": rdf_events.AuditEvent.Action.HUNT_APPROVAL_REQUEST,
+ "GrantHuntApproval": jobs_pb2.AuditEvent.Action.HUNT_APPROVAL_GRANT,
+ "CreateHuntApproval": jobs_pb2.AuditEvent.Action.HUNT_APPROVAL_REQUEST,
}
- def GetReportData(self, get_report_args):
+ def GetReportData(
+ self,
+ get_report_args: stats_pb2.ApiGetReportArgs,
+ ) -> stats_pb2.ApiReportData:
"""Filter the hunt approvals in the given timerange."""
- ret = rdf_report_plugins.ApiReportData(
- representation_type=RepresentationType.AUDIT_CHART,
- audit_chart=rdf_report_plugins.ApiAuditChartReportData(
- used_fields=self.USED_FIELDS
- ),
+ ret = stats_pb2.ApiReportData()
+ ret.representation_type = (
+ stats_pb2.ApiReportData.RepresentationType.AUDIT_CHART
)
-
- ret.audit_chart.rows = _LoadAuditEvents(
- self.HANDLERS, get_report_args, transformers=[_ExtractHuntIdFromPath]
+ ret.audit_chart.CopyFrom(
+ stats_pb2.ApiAuditChartReportData(used_fields=self.USED_FIELDS)
+ )
+ ret.audit_chart.rows.extend(
+ _LoadAuditEvents(
+ self.HANDLERS,
+ get_report_args,
+ transformers=[_ExtractHuntIdFromPath],
+ )
)
return ret
diff --git a/grr/server/grr_response_server/gui/api_plugins/stats.py b/grr/server/grr_response_server/gui/api_plugins/stats.py
index 9d83f60d1c..e14ecdc2d4 100644
--- a/grr/server/grr_response_server/gui/api_plugins/stats.py
+++ b/grr/server/grr_response_server/gui/api_plugins/stats.py
@@ -1,11 +1,14 @@
#!/usr/bin/env python
"""API handlers for stats."""
+from typing import Optional
+
from grr_response_core.lib import rdfvalue
from grr_response_core.lib.rdfvalues import structs as rdf_structs
from grr_response_core.stats import stats_collector_instance
from grr_response_proto.api import stats_pb2
from grr_response_server.gui import admin_ui_metrics
+from grr_response_server.gui import api_call_context
from grr_response_server.gui import api_call_handler_base
from grr_response_server.gui.api_plugins.report_plugins import rdf_report_plugins
from grr_response_server.gui.api_plugins.report_plugins import report_plugins
@@ -22,14 +25,17 @@ class ApiListReportsHandler(api_call_handler_base.ApiCallHandler):
"""Lists the reports."""
result_type = ApiListReportsResult
-
- def Handle(self, args, context):
- return ApiListReportsResult(
+ proto_result_type = stats_pb2.ApiListReportsResult
+
+ def Handle(
+ self,
+ args: Optional[None] = None,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> stats_pb2.ApiListReportsResult:
+ return stats_pb2.ApiListReportsResult(
reports=sorted(
(
- rdf_report_plugins.ApiReport(
- desc=report_cls.GetReportDescriptor(), data=None
- )
+ stats_pb2.ApiReport(desc=report_cls.GetReportDescriptor())
for report_cls in report_plugins.GetAvailableReportPlugins()
),
key=lambda report: (report.desc.type, report.desc.title),
@@ -50,14 +56,20 @@ class ApiGetReportHandler(api_call_handler_base.ApiCallHandler):
args_type = ApiGetReportArgs
result_type = rdf_report_plugins.ApiReport
-
- def Handle(self, args, context):
+ proto_args_type = stats_pb2.ApiGetReportArgs
+ proto_result_type = stats_pb2.ApiReport
+
+ def Handle(
+ self,
+ args: stats_pb2.ApiGetReportArgs,
+ context: Optional[api_call_context.ApiCallContext],
+ ) -> stats_pb2.ApiReport:
report = report_plugins.GetReportByName(args.name)
if not args.client_label:
args.client_label = "All"
- return rdf_report_plugins.ApiReport(
+ return stats_pb2.ApiReport(
desc=report.GetReportDescriptor(), data=report.GetReportData(args)
)
@@ -82,8 +94,14 @@ class ApiIncrementCounterMetricHandler(api_call_handler_base.ApiCallHandler):
args_type = ApiIncrementCounterMetricArgs
result_type = ApiIncrementCounterMetricResult
-
- def Handle(self, args, context):
+ proto_args_type = stats_pb2.ApiIncrementCounterMetricArgs
+ proto_result_type = stats_pb2.ApiIncrementCounterMetricResult
+
+ def Handle(
+ self,
+ args: stats_pb2.ApiIncrementCounterMetricArgs,
+ context: Optional[api_call_context.ApiCallContext],
+ ) -> stats_pb2.ApiIncrementCounterMetricResult:
if not args.metric_name:
raise ValueError("Missing `metric_name` input (must be provided).")
@@ -109,4 +127,4 @@ def Handle(self, args, context):
args.metric_name, fields=fields
)
- return ApiIncrementCounterMetricResult()
+ return stats_pb2.ApiIncrementCounterMetricResult()
diff --git a/grr/server/grr_response_server/gui/api_plugins/stats_test.py b/grr/server/grr_response_server/gui/api_plugins/stats_test.py
index af9eebbc3b..87c0dae077 100644
--- a/grr/server/grr_response_server/gui/api_plugins/stats_test.py
+++ b/grr/server/grr_response_server/gui/api_plugins/stats_test.py
@@ -40,13 +40,13 @@ def testIncreasesExistingMetric(self):
"bananas_de_pijamas_counter", fields=[("name", str), ("number", int)]
)
- args = stats.ApiIncrementCounterMetricArgs(
+ args = stats_pb2.ApiIncrementCounterMetricArgs(
metric_name="bananas_de_pijamas_counter",
field_values=[
- stats.FieldValue(
+ stats_pb2.FieldValue(
field_type=stats_pb2.FieldValue.STRING, string_value="b"
),
- stats.FieldValue(
+ stats_pb2.FieldValue(
field_type=stats_pb2.FieldValue.NUMBER, number_value=2
),
],
@@ -73,13 +73,13 @@ def testRaisesNotAllowlisted(self):
"bananas_de_pijamas_counter", fields=[("name", str), ("number", int)]
)
- args = stats.ApiIncrementCounterMetricArgs(
+ args = stats_pb2.ApiIncrementCounterMetricArgs(
metric_name="invalid_counter_does_not_exist",
field_values=[
- stats.FieldValue(
+ stats_pb2.FieldValue(
field_type=stats_pb2.FieldValue.STRING, string_value="b"
),
- stats.FieldValue(
+ stats_pb2.FieldValue(
field_type=stats_pb2.FieldValue.NUMBER, number_value=2
),
],
@@ -103,13 +103,13 @@ def testRaisesWithInvalidMetric(self):
"bananas_de_pijamas_counter", fields=[("name", str), ("number", int)]
)
- args = stats.ApiIncrementCounterMetricArgs(
+ args = stats_pb2.ApiIncrementCounterMetricArgs(
metric_name="invalid_counter_does_not_exist",
field_values=[
- stats.FieldValue(
+ stats_pb2.FieldValue(
field_type=stats_pb2.FieldValue.STRING, string_value="b"
),
- stats.FieldValue(
+ stats_pb2.FieldValue(
field_type=stats_pb2.FieldValue.NUMBER, number_value=2
),
],
diff --git a/grr/server/grr_response_server/gui/api_plugins/timeline.py b/grr/server/grr_response_server/gui/api_plugins/timeline.py
index 46908fb7ac..b45dcfb869 100644
--- a/grr/server/grr_response_server/gui/api_plugins/timeline.py
+++ b/grr/server/grr_response_server/gui/api_plugins/timeline.py
@@ -49,15 +49,16 @@ class ApiGetCollectedTimelineHandler(api_call_handler_base.ApiCallHandler):
"""An API handler for the timeline exporter."""
args_type = ApiGetCollectedTimelineArgs
+ proto_args_type = timeline_pb2.ApiGetCollectedTimelineArgs
def Handle(
self,
- args: ApiGetCollectedTimelineArgs,
+ args: timeline_pb2.ApiGetCollectedTimelineArgs,
context: Optional[api_call_context.ApiCallContext] = None,
) -> api_call_handler_base.ApiBinaryStream:
"""Handles requests for the timeline export API call."""
- client_id = str(args.client_id)
- flow_id = str(args.flow_id)
+ client_id = args.client_id
+ flow_id = args.flow_id
flow_obj = data_store.REL_DB.ReadFlowObject(client_id, flow_id)
if flow_obj.flow_class_name != timeline.TimelineFlow.__name__:
@@ -74,10 +75,10 @@ def Handle(
def _StreamBody(
self,
- args: ApiGetCollectedTimelineArgs,
+ args: timeline_pb2.ApiGetCollectedTimelineArgs,
) -> api_call_handler_base.ApiBinaryStream:
- client_id = str(args.client_id)
- flow_id = str(args.flow_id)
+ client_id = args.client_id
+ flow_id = args.flow_id
opts = body.Opts()
opts.timestamp_subsecond_precision = (
@@ -119,6 +120,7 @@ class ApiGetCollectedHuntTimelinesHandler(api_call_handler_base.ApiCallHandler):
"""An API handler for the hunt timelines exporter."""
args_type = ApiGetCollectedHuntTimelinesArgs
+ proto_args_type = timeline_pb2.ApiGetCollectedHuntTimelinesArgs
def __init__(self):
super().__init__()
@@ -126,11 +128,11 @@ def __init__(self):
def Handle(
self,
- args: ApiGetCollectedHuntTimelinesArgs,
+ args: timeline_pb2.ApiGetCollectedHuntTimelinesArgs,
context: Optional[api_call_context.ApiCallContext] = None,
) -> api_call_handler_base.ApiBinaryStream:
"""Handles requests for the hunt timelines export API call."""
- hunt_id = str(args.hunt_id)
+ hunt_id = args.hunt_id
hunt_obj = data_store.REL_DB.ReadHuntObject(hunt_id)
if hunt_obj.args.standard.flow_name != timeline.TimelineFlow.__name__:
@@ -151,7 +153,7 @@ def Handle(
def _GenerateArchive(
self,
- args: ApiGetCollectedHuntTimelinesArgs,
+ args: timeline_pb2.ApiGetCollectedHuntTimelinesArgs,
) -> Iterator[bytes]:
zipgen = utils.StreamingZipGenerator()
yield from self._GenerateHuntTimelines(args, zipgen)
@@ -159,14 +161,15 @@ def _GenerateArchive(
def _GenerateHuntTimelines(
self,
- args: ApiGetCollectedHuntTimelinesArgs,
+ args: timeline_pb2.ApiGetCollectedHuntTimelinesArgs,
zipgen: utils.StreamingZipGenerator,
) -> Iterator[bytes]:
- hunt_id = str(args.hunt_id)
offset = 0
while True:
- flows = data_store.REL_DB.ReadHuntFlows(hunt_id, offset, _FLOW_BATCH_SIZE)
+ flows = data_store.REL_DB.ReadHuntFlows(
+ args.hunt_id, offset, _FLOW_BATCH_SIZE
+ )
client_ids = [flow.client_id for flow in flows]
client_snapshots = data_store.REL_DB.MultiReadClientSnapshot(client_ids)
@@ -175,11 +178,11 @@ def _GenerateHuntTimelines(
snapshot = client_snapshots[flow.client_id]
filename = _GetHuntTimelineFilename(snapshot, args.format)
- subargs = ApiGetCollectedTimelineArgs()
+ subargs = timeline_pb2.ApiGetCollectedTimelineArgs()
subargs.client_id = flow.client_id
subargs.flow_id = flow.flow_id
subargs.format = args.format
- subargs.body_opts = args.body_opts
+ subargs.body_opts.CopyFrom(args.body_opts)
yield zipgen.WriteFileHeader(filename)
yield from map(zipgen.WriteFileChunk, self._GenerateTimeline(subargs))
@@ -190,7 +193,7 @@ def _GenerateHuntTimelines(
def _GenerateTimeline(
self,
- args: ApiGetCollectedTimelineArgs,
+ args: timeline_pb2.ApiGetCollectedTimelineArgs,
) -> Iterator[bytes]:
return self._handler.Handle(args).GenerateContent()
diff --git a/grr/server/grr_response_server/gui/api_plugins/timeline_test.py b/grr/server/grr_response_server/gui/api_plugins/timeline_test.py
index 7f24dd5919..4fe199eced 100644
--- a/grr/server/grr_response_server/gui/api_plugins/timeline_test.py
+++ b/grr/server/grr_response_server/gui/api_plugins/timeline_test.py
@@ -9,16 +9,16 @@
from grr_response_core.lib.rdfvalues import timeline as rdf_timeline
from grr_response_core.lib.util import chunked
+from grr_response_proto import flows_pb2
+from grr_response_proto import hunts_pb2
from grr_response_proto import objects_pb2
+from grr_response_proto import timeline_pb2
+from grr_response_proto.api import timeline_pb2 as api_timeline_pb2
from grr_response_server import data_store
from grr_response_server.databases import db_test_utils
from grr_response_server.flows.general import timeline
from grr_response_server.gui import api_test_lib
from grr_response_server.gui.api_plugins import timeline as api_timeline
-from grr_response_server.rdfvalues import flow_objects as rdf_flow_objects
-from grr_response_server.rdfvalues import hunt_objects as rdf_hunt_objects
-from grr_response_server.rdfvalues import mig_flow_objects
-from grr_response_server.rdfvalues import mig_hunt_objects
from grr.test_lib import testing_startup
from grr.test_lib import timeline_test_lib
@@ -38,16 +38,16 @@ def testRaisesOnIncorrectFlowType(self):
client_id = db_test_utils.InitializeClient(data_store.REL_DB)
flow_id = "A1B3C5D7E"
- flow_obj = rdf_flow_objects.Flow()
+ flow_obj = flows_pb2.Flow()
flow_obj.client_id = client_id
flow_obj.flow_id = flow_id
flow_obj.flow_class_name = "NotTimelineFlow"
- data_store.REL_DB.WriteFlowObject(mig_flow_objects.ToProtoFlow(flow_obj))
+ data_store.REL_DB.WriteFlowObject(flow_obj)
- args = api_timeline.ApiGetCollectedTimelineArgs()
+ args = api_timeline_pb2.ApiGetCollectedTimelineArgs()
args.client_id = client_id
args.flow_id = flow_id
- args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY
+ args.format = api_timeline_pb2.ApiGetCollectedTimelineArgs.Format.BODY
with self.assertRaises(ValueError):
self.handler.Handle(args)
@@ -56,10 +56,12 @@ def testRaisesOnIncorrectFormat(self):
client_id = db_test_utils.InitializeClient(data_store.REL_DB)
flow_id = timeline_test_lib.WriteTimeline(client_id, [])
- args = api_timeline.ApiGetCollectedTimelineArgs()
+ args = api_timeline_pb2.ApiGetCollectedTimelineArgs()
args.client_id = client_id
args.flow_id = flow_id
- args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.UNSPECIFIED
+ args.format = (
+ api_timeline_pb2.ApiGetCollectedTimelineArgs.Format.UNSPECIFIED
+ )
with self.assertRaises(ValueError):
self.handler.Handle(args)
@@ -68,10 +70,10 @@ def testBodyNoEntries(self):
client_id = db_test_utils.InitializeClient(data_store.REL_DB)
flow_id = timeline_test_lib.WriteTimeline(client_id, [])
- args = api_timeline.ApiGetCollectedTimelineArgs()
+ args = api_timeline_pb2.ApiGetCollectedTimelineArgs()
args.client_id = client_id
args.flow_id = flow_id
- args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY
+ args.format = api_timeline_pb2.ApiGetCollectedTimelineArgs.Format.BODY
result = self.handler.Handle(args)
content = b"".join(result.GenerateContent()).decode("utf-8")
@@ -80,7 +82,7 @@ def testBodyNoEntries(self):
self.assertLen(rows, 0)
def testBodySingleEntry(self):
- entry = rdf_timeline.TimelineEntry()
+ entry = timeline_pb2.TimelineEntry()
entry.path = "/foo/bar/baz".encode("utf-8")
entry.ino = 4815162342
entry.size = 42
@@ -91,10 +93,10 @@ def testBodySingleEntry(self):
client_id = db_test_utils.InitializeClient(data_store.REL_DB)
flow_id = timeline_test_lib.WriteTimeline(client_id, [entry])
- args = api_timeline.ApiGetCollectedTimelineArgs()
+ args = api_timeline_pb2.ApiGetCollectedTimelineArgs()
args.client_id = client_id
args.flow_id = flow_id
- args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY
+ args.format = api_timeline_pb2.ApiGetCollectedTimelineArgs.Format.BODY
result = self.handler.Handle(args)
content = b"".join(result.GenerateContent()).decode("utf-8")
@@ -112,7 +114,7 @@ def testBodyMultipleEntries(self):
entries = []
for idx in range(1024):
- entry = rdf_timeline.TimelineEntry()
+ entry = timeline_pb2.TimelineEntry()
entry.path = "/foo/bar/baz/quux/norf/thud{}".format(idx).encode("utf-8")
entry.size = random.randint(0, 1024)
entries.append(entry)
@@ -120,10 +122,10 @@ def testBodyMultipleEntries(self):
client_id = db_test_utils.InitializeClient(data_store.REL_DB)
flow_id = timeline_test_lib.WriteTimeline(client_id, entries)
- args = api_timeline.ApiGetCollectedTimelineArgs()
+ args = api_timeline_pb2.ApiGetCollectedTimelineArgs()
args.client_id = client_id
args.flow_id = flow_id
- args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY
+ args.format = api_timeline_pb2.ApiGetCollectedTimelineArgs.Format.BODY
result = self.handler.Handle(args)
content = b"".join(result.GenerateContent()).decode("utf-8")
@@ -136,17 +138,17 @@ def testBodyMultipleEntries(self):
self.assertEqual(int(row[6]), entries[idx].size)
def testBodySubsecondPrecision(self):
- entry = rdf_timeline.TimelineEntry()
+ entry = timeline_pb2.TimelineEntry()
entry.path = "/foo/bar/baz".encode("utf-8")
entry.atime_ns = int(3.14 * 10**9)
client_id = db_test_utils.InitializeClient(data_store.REL_DB)
flow_id = timeline_test_lib.WriteTimeline(client_id, [entry])
- args = api_timeline.ApiGetCollectedTimelineArgs()
+ args = api_timeline_pb2.ApiGetCollectedTimelineArgs()
args.client_id = client_id
args.flow_id = flow_id
- args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY
+ args.format = api_timeline_pb2.ApiGetCollectedTimelineArgs.Format.BODY
args.body_opts.timestamp_subsecond_precision = True
result = self.handler.Handle(args)
@@ -158,17 +160,17 @@ def testBodySubsecondPrecision(self):
self.assertEqual(rows[0][7], "3.14")
def testNtfsFileReferenceFormat(self):
- entry = rdf_timeline.TimelineEntry()
+ entry = timeline_pb2.TimelineEntry()
entry.path = "/foo/bar/baz".encode("utf-8")
entry.ino = 1688849860339456
client_id = db_test_utils.InitializeClient(data_store.REL_DB)
flow_id = timeline_test_lib.WriteTimeline(client_id, [entry])
- args = api_timeline.ApiGetCollectedTimelineArgs()
+ args = api_timeline_pb2.ApiGetCollectedTimelineArgs()
args.client_id = client_id
args.flow_id = flow_id
- args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY
+ args.format = api_timeline_pb2.ApiGetCollectedTimelineArgs.Format.BODY
args.body_opts.inode_ntfs_file_reference_format = True
result = self.handler.Handle(args)
@@ -180,38 +182,36 @@ def testNtfsFileReferenceFormat(self):
self.assertEqual(rows[0][2], "75520-6")
def testNtfsFileReferenceFormatInference(self):
- entry = rdf_timeline.TimelineEntry()
+ entry = timeline_pb2.TimelineEntry()
entry.path = "/foo/bar/baz".encode("utf-8")
entry.ino = 1688849860339456
client_id = db_test_utils.InitializeClient(data_store.REL_DB)
flow_id = "F00BA542"
- flow_obj = rdf_flow_objects.Flow()
+ flow_obj = flows_pb2.Flow()
flow_obj.client_id = client_id
flow_obj.flow_id = flow_id
flow_obj.flow_class_name = timeline.TimelineFlow.__name__
- data_store.REL_DB.WriteFlowObject(mig_flow_objects.ToProtoFlow(flow_obj))
+ data_store.REL_DB.WriteFlowObject(flow_obj)
- blobs = list(rdf_timeline.TimelineEntry.SerializeStream(iter([entry])))
+ blobs = list(rdf_timeline.SerializeTimelineEntryStream([entry]))
blob_ids = data_store.BLOBS.WriteBlobsWithUnknownHashes(blobs)
- result = rdf_timeline.TimelineResult()
- result.entry_batch_blob_ids = list(map(bytes, blob_ids))
+ result = timeline_pb2.TimelineResult()
+ result.entry_batch_blob_ids.extend(list(map(bytes, blob_ids)))
result.filesystem_type = "NTFS"
- flow_result = rdf_flow_objects.FlowResult()
+ flow_result = flows_pb2.FlowResult()
flow_result.client_id = client_id
flow_result.flow_id = flow_id
- flow_result.payload = result
- data_store.REL_DB.WriteFlowResults(
- [mig_flow_objects.ToProtoFlowResult(flow_result)]
- )
+ flow_result.payload.Pack(result)
+ data_store.REL_DB.WriteFlowResults([flow_result])
- args = api_timeline.ApiGetCollectedTimelineArgs()
+ args = api_timeline_pb2.ApiGetCollectedTimelineArgs()
args.client_id = client_id
args.flow_id = flow_id
- args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY
+ args.format = api_timeline_pb2.ApiGetCollectedTimelineArgs.Format.BODY
result = self.handler.Handle(args)
content = b"".join(result.GenerateContent()).decode("utf-8")
@@ -222,16 +222,16 @@ def testNtfsFileReferenceFormatInference(self):
self.assertEqual(rows[0][2], "75520-6")
def testBackslashEscape(self):
- entry = rdf_timeline.TimelineEntry()
+ entry = timeline_pb2.TimelineEntry()
entry.path = "C:\\Windows\\system32\\notepad.exe".encode("utf-8")
client_id = db_test_utils.InitializeClient(data_store.REL_DB)
flow_id = timeline_test_lib.WriteTimeline(client_id, [entry])
- args = api_timeline.ApiGetCollectedTimelineArgs()
+ args = api_timeline_pb2.ApiGetCollectedTimelineArgs()
args.client_id = client_id
args.flow_id = flow_id
- args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY
+ args.format = api_timeline_pb2.ApiGetCollectedTimelineArgs.Format.BODY
args.body_opts.backslash_escape = True
result = self.handler.Handle(args)
@@ -240,16 +240,16 @@ def testBackslashEscape(self):
self.assertIn("|C:\\\\Windows\\\\system32\\\\notepad.exe|", content)
def testCarriageReturnEscape(self):
- entry = rdf_timeline.TimelineEntry()
+ entry = timeline_pb2.TimelineEntry()
entry.path = "/foo/bar\r\rbaz/quux\rnorf".encode("utf-8")
client_id = db_test_utils.InitializeClient(data_store.REL_DB)
flow_id = timeline_test_lib.WriteTimeline(client_id, [entry])
- args = api_timeline.ApiGetCollectedTimelineArgs()
+ args = api_timeline_pb2.ApiGetCollectedTimelineArgs()
args.client_id = client_id
args.flow_id = flow_id
- args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY
+ args.format = api_timeline_pb2.ApiGetCollectedTimelineArgs.Format.BODY
args.body_opts.carriage_return_escape = True
result = self.handler.Handle(args)
@@ -258,16 +258,16 @@ def testCarriageReturnEscape(self):
self.assertIn("|/foo/bar\\r\\rbaz/quux\\rnorf|", content)
def testNonPrintableEscape(self):
- entry = rdf_timeline.TimelineEntry()
+ entry = timeline_pb2.TimelineEntry()
entry.path = b"/f\x00b\x0ar\x1baz"
client_id = db_test_utils.InitializeClient(data_store.REL_DB)
flow_id = timeline_test_lib.WriteTimeline(client_id, [entry])
- args = api_timeline.ApiGetCollectedTimelineArgs()
+ args = api_timeline_pb2.ApiGetCollectedTimelineArgs()
args.client_id = client_id
args.flow_id = flow_id
- args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY
+ args.format = api_timeline_pb2.ApiGetCollectedTimelineArgs.Format.BODY
args.body_opts.non_printable_escape = True
result = self.handler.Handle(args)
@@ -279,49 +279,46 @@ def testBodyMultipleResults(self):
client_id = db_test_utils.InitializeClient(data_store.REL_DB)
flow_id = "ABCDEF42"
- flow_obj = rdf_flow_objects.Flow()
+ flow_obj = flows_pb2.Flow()
flow_obj.client_id = client_id
flow_obj.flow_id = flow_id
flow_obj.flow_class_name = timeline.TimelineFlow.__name__
- data_store.REL_DB.WriteFlowObject(mig_flow_objects.ToProtoFlow(flow_obj))
+ data_store.REL_DB.WriteFlowObject(flow_obj)
- entry_1 = rdf_timeline.TimelineEntry()
+ entry_1 = timeline_pb2.TimelineEntry()
entry_1.path = "/foo".encode("utf-8")
- blobs_1 = list(rdf_timeline.TimelineEntry.SerializeStream(iter([entry_1])))
+ blobs_1 = list(rdf_timeline.SerializeTimelineEntryStream([entry_1]))
(blob_id_1,) = data_store.BLOBS.WriteBlobsWithUnknownHashes(blobs_1)
- result_1 = rdf_timeline.TimelineResult()
- result_1.entry_batch_blob_ids = [bytes(blob_id_1)]
+ result_1 = timeline_pb2.TimelineResult()
+ result_1.entry_batch_blob_ids.append(bytes(blob_id_1))
- entry_2 = rdf_timeline.TimelineEntry()
+ entry_2 = timeline_pb2.TimelineEntry()
entry_2.path = "/bar".encode("utf-8")
- blobs_2 = list(rdf_timeline.TimelineEntry.SerializeStream(iter([entry_2])))
+ blobs_2 = list(rdf_timeline.SerializeTimelineEntryStream([entry_2]))
(blob_id_2,) = data_store.BLOBS.WriteBlobsWithUnknownHashes(blobs_2)
- result_2 = rdf_timeline.TimelineResult()
- result_2.entry_batch_blob_ids = [bytes(blob_id_2)]
+ result_2 = timeline_pb2.TimelineResult()
+ result_2.entry_batch_blob_ids.extend([bytes(blob_id_2)])
- flow_result_1 = rdf_flow_objects.FlowResult()
+ flow_result_1 = flows_pb2.FlowResult()
flow_result_1.client_id = client_id
flow_result_1.flow_id = flow_id
- flow_result_1.payload = result_1
+ flow_result_1.payload.Pack(result_1)
- flow_result_2 = rdf_flow_objects.FlowResult()
+ flow_result_2 = flows_pb2.FlowResult()
flow_result_2.client_id = client_id
flow_result_2.flow_id = flow_id
- flow_result_2.payload = result_2
+ flow_result_2.payload.Pack(result_2)
- data_store.REL_DB.WriteFlowResults([
- mig_flow_objects.ToProtoFlowResult(flow_result_1),
- mig_flow_objects.ToProtoFlowResult(flow_result_2),
- ])
+ data_store.REL_DB.WriteFlowResults([flow_result_1, flow_result_2])
- args = api_timeline.ApiGetCollectedTimelineArgs()
+ args = api_timeline_pb2.ApiGetCollectedTimelineArgs()
args.client_id = client_id
args.flow_id = flow_id
- args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY
+ args.format = api_timeline_pb2.ApiGetCollectedTimelineArgs.Format.BODY
result = self.handler.Handle(args)
content = b"".join(result.GenerateContent()).decode("utf-8")
@@ -333,21 +330,23 @@ def testRawGzchunkedEmpty(self):
client_id = db_test_utils.InitializeClient(data_store.REL_DB)
flow_id = timeline_test_lib.WriteTimeline(client_id, [])
- args = api_timeline.ApiGetCollectedTimelineArgs()
+ args = api_timeline_pb2.ApiGetCollectedTimelineArgs()
args.client_id = client_id
args.flow_id = flow_id
- args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.RAW_GZCHUNKED
+ args.format = (
+ api_timeline_pb2.ApiGetCollectedTimelineArgs.Format.RAW_GZCHUNKED
+ )
content = b"".join(self.handler.Handle(args).GenerateContent())
buf = io.BytesIO(content)
self.assertIsNone(chunked.Read(buf))
- def testRawGzchunkedMulipleEntries(self):
+ def testRawGzchunkedMultipleEntries(self):
entries = []
for idx in range(1024):
- entry = rdf_timeline.TimelineEntry()
+ entry = timeline_pb2.TimelineEntry()
entry.path = "/quux/thud/bar/baz/foo{}".format(idx).encode("utf-8")
entry.size = random.randint(0, 1024)
entries.append(entry)
@@ -355,16 +354,20 @@ def testRawGzchunkedMulipleEntries(self):
client_id = db_test_utils.InitializeClient(data_store.REL_DB)
flow_id = timeline_test_lib.WriteTimeline(client_id, entries)
- args = api_timeline.ApiGetCollectedTimelineArgs()
+ args = api_timeline_pb2.ApiGetCollectedTimelineArgs()
args.client_id = client_id
args.flow_id = flow_id
- args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.RAW_GZCHUNKED
+ args.format = (
+ api_timeline_pb2.ApiGetCollectedTimelineArgs.Format.RAW_GZCHUNKED
+ )
content = b"".join(self.handler.Handle(args).GenerateContent())
buf = io.BytesIO(content)
chunks = chunked.ReadAll(buf)
- deserialized = list(rdf_timeline.TimelineEntry.DeserializeStream(chunks))
+ deserialized = list(
+ rdf_timeline.DeserializeTimelineEntryStream(iter(chunks))
+ )
self.assertEqual(entries, deserialized)
@@ -383,17 +386,16 @@ def setUp(self):
def testRaisesOnIncorrectFlowType(self):
hunt_id = "".join(random.choice("ABCDEF") for _ in range(8))
- hunt_obj = rdf_hunt_objects.Hunt()
+ hunt_obj = hunts_pb2.Hunt()
hunt_obj.hunt_id = hunt_id
hunt_obj.args.standard.flow_name = "NotTimelineFlow"
- hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED
+ hunt_obj.hunt_state = hunts_pb2.Hunt.HuntState.PAUSED
- hunt_obj = mig_hunt_objects.ToProtoHunt(hunt_obj)
data_store.REL_DB.WriteHuntObject(hunt_obj)
- args = api_timeline.ApiGetCollectedHuntTimelinesArgs()
+ args = api_timeline_pb2.ApiGetCollectedHuntTimelinesArgs()
args.hunt_id = hunt_id
- args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY
+ args.format = api_timeline_pb2.ApiGetCollectedTimelineArgs.Format.BODY
with self.assertRaises(ValueError):
self.handler.Handle(args)
@@ -401,17 +403,18 @@ def testRaisesOnIncorrectFlowType(self):
def testRaisesOnIncorrectFormat(self):
hunt_id = "B1C2E3D4"
- hunt_obj = rdf_hunt_objects.Hunt()
+ hunt_obj = hunts_pb2.Hunt()
hunt_obj.hunt_id = hunt_id
hunt_obj.args.standard.flow_name = timeline.TimelineFlow.__name__
- hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED
+ hunt_obj.hunt_state = hunts_pb2.Hunt.HuntState.PAUSED
- hunt_obj = mig_hunt_objects.ToProtoHunt(hunt_obj)
data_store.REL_DB.WriteHuntObject(hunt_obj)
- args = api_timeline.ApiGetCollectedHuntTimelinesArgs()
+ args = api_timeline_pb2.ApiGetCollectedHuntTimelinesArgs()
args.hunt_id = hunt_id
- args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.UNSPECIFIED
+ args.format = (
+ api_timeline_pb2.ApiGetCollectedTimelineArgs.Format.UNSPECIFIED
+ )
with self.assertRaises(ValueError):
self.handler.Handle(args)
@@ -432,15 +435,14 @@ def testBodyMultipleClients(self):
hunt_id = "B1C2E3D4"
- hunt_obj = rdf_hunt_objects.Hunt()
+ hunt_obj = hunts_pb2.Hunt()
hunt_obj.hunt_id = hunt_id
hunt_obj.args.standard.flow_name = timeline.TimelineFlow.__name__
- hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED
+ hunt_obj.hunt_state = hunts_pb2.Hunt.HuntState.PAUSED
- hunt_obj = mig_hunt_objects.ToProtoHunt(hunt_obj)
data_store.REL_DB.WriteHuntObject(hunt_obj)
- entry_1 = rdf_timeline.TimelineEntry()
+ entry_1 = timeline_pb2.TimelineEntry()
entry_1.path = "/bar/baz/quux".encode("utf-8")
entry_1.ino = 5926273453
entry_1.size = 13373
@@ -449,7 +451,7 @@ def testBodyMultipleClients(self):
entry_1.ctime_ns = 333 * 10**9
entry_1.mode = 0o664
- entry_2 = rdf_timeline.TimelineEntry()
+ entry_2 = timeline_pb2.TimelineEntry()
entry_2.path = "/bar/baz/quuz".encode("utf-8")
entry_2.ino = 6037384564
entry_2.size = 13374
@@ -461,9 +463,9 @@ def testBodyMultipleClients(self):
timeline_test_lib.WriteTimeline(client_id_1, [entry_1], hunt_id=hunt_id)
timeline_test_lib.WriteTimeline(client_id_2, [entry_2], hunt_id=hunt_id)
- args = api_timeline.ApiGetCollectedHuntTimelinesArgs()
+ args = api_timeline_pb2.ApiGetCollectedHuntTimelinesArgs()
args.hunt_id = hunt_id
- args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY
+ args.format = api_timeline_pb2.ApiGetCollectedTimelineArgs.Format.BODY
content = b"".join(self.handler.Handle(args).GenerateContent())
buffer = io.BytesIO(content)
@@ -513,15 +515,14 @@ def testRawGzchunkedMultipleClients(self):
hunt_id = "A0B1D2C3"
- hunt_obj = rdf_hunt_objects.Hunt()
+ hunt_obj = hunts_pb2.Hunt()
hunt_obj.hunt_id = hunt_id
hunt_obj.args.standard.flow_name = timeline.TimelineFlow.__name__
- hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED
+ hunt_obj.hunt_state = hunts_pb2.Hunt.HuntState.PAUSED
- hunt_obj = mig_hunt_objects.ToProtoHunt(hunt_obj)
data_store.REL_DB.WriteHuntObject(hunt_obj)
- entry_1 = rdf_timeline.TimelineEntry()
+ entry_1 = timeline_pb2.TimelineEntry()
entry_1.path = "foo_1".encode("utf-8")
entry_1.ino = 5432154321
entry_1.size = 13371
@@ -530,21 +531,23 @@ def testRawGzchunkedMultipleClients(self):
entry_1.ctime_ns = 344 * 10**9
entry_1.mode = 0o663
- entry_2 = rdf_timeline.TimelineEntry()
+ entry_2 = timeline_pb2.TimelineEntry()
entry_2.path = "foo_2".encode("utf-8")
- entry_1.ino = 7654376543
+ entry_2.ino = 7654376543
entry_2.size = 13372
- entry_1.atime_ns = 788 * 10**9
- entry_1.mtime_ns = 899 * 10**9
- entry_1.ctime_ns = 900 * 10**9
- entry_1.mode = 0o763
+ entry_2.atime_ns = 788 * 10**9
+ entry_2.mtime_ns = 899 * 10**9
+ entry_2.ctime_ns = 900 * 10**9
+ entry_2.mode = 0o763
timeline_test_lib.WriteTimeline(client_id_1, [entry_1], hunt_id=hunt_id)
timeline_test_lib.WriteTimeline(client_id_2, [entry_2], hunt_id=hunt_id)
- args = api_timeline.ApiGetCollectedHuntTimelinesArgs()
+ args = api_timeline_pb2.ApiGetCollectedHuntTimelinesArgs()
args.hunt_id = hunt_id
- args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.RAW_GZCHUNKED
+ args.format = (
+ api_timeline_pb2.ApiGetCollectedTimelineArgs.Format.RAW_GZCHUNKED
+ )
content = b"".join(self.handler.Handle(args).GenerateContent())
buffer = io.BytesIO(content)
@@ -553,13 +556,17 @@ def testRawGzchunkedMultipleClients(self):
client_filename_1 = f"{client_id_1}_foo.quux.com.gzchunked"
with archive.open(client_filename_1, mode="r") as file:
chunks = chunked.ReadAll(file)
- entries = list(rdf_timeline.TimelineEntry.DeserializeStream(chunks))
+ entries = list(
+ rdf_timeline.DeserializeTimelineEntryStream(iter(chunks))
+ )
self.assertEqual(entries, [entry_1])
client_filename_2 = f"{client_id_2}_foo.norf.com.gzchunked"
with archive.open(client_filename_2, mode="r") as file:
chunks = chunked.ReadAll(file)
- entries = list(rdf_timeline.TimelineEntry.DeserializeStream(chunks))
+ entries = list(
+ rdf_timeline.DeserializeTimelineEntryStream(iter(chunks))
+ )
self.assertEqual(entries, [entry_2])
def testBodySubsecondPrecision(self):
@@ -571,22 +578,21 @@ def testBodySubsecondPrecision(self):
snapshot.knowledge_base.fqdn = "foo.bar.baz"
data_store.REL_DB.WriteClientSnapshot(snapshot)
- hunt_obj = rdf_hunt_objects.Hunt()
+ hunt_obj = hunts_pb2.Hunt()
hunt_obj.hunt_id = hunt_id
hunt_obj.args.standard.flow_name = timeline.TimelineFlow.__name__
- hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED
- hunt_obj = mig_hunt_objects.ToProtoHunt(hunt_obj)
+ hunt_obj.hunt_state = hunts_pb2.Hunt.HuntState.PAUSED
data_store.REL_DB.WriteHuntObject(hunt_obj)
- entry = rdf_timeline.TimelineEntry()
+ entry = timeline_pb2.TimelineEntry()
entry.path = "/foo/bar/baz".encode("utf-8")
entry.btime_ns = int(1337.42 * 10**9)
timeline_test_lib.WriteTimeline(client_id, [entry], hunt_id=hunt_id)
- args = api_timeline.ApiGetCollectedHuntTimelinesArgs()
+ args = api_timeline_pb2.ApiGetCollectedHuntTimelinesArgs()
args.hunt_id = hunt_id
- args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY
+ args.format = api_timeline_pb2.ApiGetCollectedTimelineArgs.Format.BODY
args.body_opts.timestamp_subsecond_precision = True
content = b"".join(self.handler.Handle(args).GenerateContent())
diff --git a/grr/server/grr_response_server/gui/api_plugins/user.py b/grr/server/grr_response_server/gui/api_plugins/user.py
index 78362d518f..7ec975056e 100644
--- a/grr/server/grr_response_server/gui/api_plugins/user.py
+++ b/grr/server/grr_response_server/gui/api_plugins/user.py
@@ -5,17 +5,17 @@
import email
import itertools
import logging
+from typing import Iterable, Optional, Sequence, Union
import jinja2
from grr_response_core import config
from grr_response_core.lib import rdfvalue
-from grr_response_core.lib.rdfvalues import client as rdf_client
-from grr_response_core.lib.rdfvalues import paths as rdf_paths
from grr_response_core.lib.rdfvalues import structs as rdf_structs
from grr_response_core.lib.util import collection
from grr_response_proto import objects_pb2
from grr_response_proto import user_pb2
+from grr_response_proto.api import hunt_pb2
from grr_response_proto.api import user_pb2 as api_user_pb2
from grr_response_server import access_control
from grr_response_server import cronjobs
@@ -24,17 +24,17 @@
from grr_response_server import flow
from grr_response_server import notification as notification_lib
from grr_response_server.databases import db
+from grr_response_server.gui import api_call_context
from grr_response_server.gui import api_call_handler_base
from grr_response_server.gui import approval_checks
from grr_response_server.gui.api_plugins import client as api_client
from grr_response_server.gui.api_plugins import cron as api_cron
from grr_response_server.gui.api_plugins import flow as api_flow
from grr_response_server.gui.api_plugins import hunt as api_hunt
-from grr_response_server.gui.api_plugins import mig_flow
-from grr_response_server.gui.api_plugins import mig_hunt
+from grr_response_server.models import clients
+from grr_response_server.models import protobuf_utils
from grr_response_server.models import users
-from grr_response_server.rdfvalues import mig_hunt_objects
-from grr_response_server.rdfvalues import mig_objects
+from grr_response_server.rdfvalues import mig_cronjobs
from grr_response_server.rdfvalues import objects as rdf_objects
@@ -126,70 +126,88 @@ class ApiNotificationReference(rdf_structs.RDFProtoStruct):
ApiNotificationVfsReference,
]
- def InitFromObjectReference(self, ref):
- if ref.reference_type == ref.Type.UNSET:
- self.type = self.Type.UNSET
- elif ref.reference_type == ref.Type.CLIENT:
- self.type = self.Type.CLIENT
- self.client.client_id = ref.client.client_id
+def InitApiNotificationReferenceFromObjectReference(
+ reference: objects_pb2.ObjectReference,
+) -> api_user_pb2.ApiNotificationReference:
+ """Initializes ApiNotificationReference from an ObjectReference."""
- elif ref.reference_type == ref.Type.HUNT:
- self.type = self.Type.HUNT
- self.hunt.hunt_id = ref.hunt.hunt_id
+ api_reference = api_user_pb2.ApiNotificationReference()
- elif ref.reference_type == ref.Type.FLOW:
- self.type = self.Type.FLOW
- self.flow.client_id = ref.flow.client_id
- self.flow.flow_id = ref.flow.flow_id
+ if reference.reference_type == objects_pb2.ObjectReference.Type.UNSET:
+ api_reference.type = api_reference.Type.UNSET
- elif ref.reference_type == ref.Type.CRON_JOB:
- self.type = self.Type.CRON
- self.cron.cron_job_id = ref.cron_job.cron_job_id
+ elif reference.reference_type == objects_pb2.ObjectReference.CLIENT:
+ api_reference.type = api_reference.Type.CLIENT
+ api_reference.client.client_id = reference.client.client_id
- elif ref.reference_type == ref.Type.VFS_FILE:
- self.type = self.Type.VFS
- self.vfs.client_id = ref.vfs_file.client_id
+ elif reference.reference_type == objects_pb2.ObjectReference.HUNT:
+ api_reference.type = api_reference.Type.HUNT
+ api_reference.hunt.hunt_id = reference.hunt.hunt_id
- if ref.vfs_file.path_type == rdf_objects.PathInfo.PathType.UNSET:
- raise ValueError(
- "Can't init from VFS_FILE object reference with unset path_type."
- )
+ elif reference.reference_type == objects_pb2.ObjectReference.FLOW:
+ api_reference.type = api_reference.Type.FLOW
+ api_reference.flow.client_id = reference.flow.client_id
+ api_reference.flow.flow_id = reference.flow.flow_id
- self.vfs.vfs_path = ref.vfs_file.ToPath()
+ elif reference.reference_type == objects_pb2.ObjectReference.CRON_JOB:
+ api_reference.type = api_reference.Type.CRON
+ api_reference.cron.cron_job_id = reference.cron_job.cron_job_id
- elif ref.reference_type == ref.Type.APPROVAL_REQUEST:
- ref_ar = ref.approval_request
+ elif reference.reference_type == objects_pb2.ObjectReference.VFS_FILE:
+ api_reference.type = api_reference.Type.VFS
+ api_reference.vfs.client_id = reference.vfs_file.client_id
- if ref_ar.approval_type == ref_ar.ApprovalType.APPROVAL_TYPE_NONE:
- raise ValueError(
- "Can't init from APPROVAL_REQUEST object reference "
- "with unset approval_type."
- )
- elif ref_ar.approval_type == ref_ar.ApprovalType.APPROVAL_TYPE_CLIENT:
- self.type = self.Type.CLIENT_APPROVAL
- self.client_approval.approval_id = ref_ar.approval_id
- self.client_approval.username = ref_ar.requestor_username
- self.client_approval.client_id = ref_ar.subject_id
- elif ref_ar.approval_type == ref_ar.ApprovalType.APPROVAL_TYPE_HUNT:
- self.type = self.Type.HUNT_APPROVAL
- self.hunt_approval.approval_id = ref_ar.approval_id
- self.hunt_approval.username = ref_ar.requestor_username
- self.hunt_approval.hunt_id = ref_ar.subject_id
- elif ref_ar.approval_type == ref_ar.ApprovalType.APPROVAL_TYPE_CRON_JOB:
- self.type = self.Type.CRON_JOB_APPROVAL
- self.cron_job_approval.approval_id = ref_ar.approval_id
- self.cron_job_approval.username = ref_ar.requestor_username
- self.cron_job_approval.cron_job_id = ref_ar.subject_id
- else:
- raise ValueError(
- "Unexpected APPROVAL_REQUEST object reference type value: %d"
- % ref_ar.approval_type
- )
+ if reference.vfs_file.path_type == objects_pb2.PathInfo.PathType.UNSET:
+ raise ValueError(
+ "Can't init from VFS_FILE object reference with unset path_type."
+ )
+
+ api_reference.vfs.vfs_path = rdf_objects.VfsFileReferenceToPath(
+ reference.vfs_file
+ )
+
+ elif reference.reference_type == objects_pb2.ObjectReference.APPROVAL_REQUEST:
+ ref_ar = reference.approval_request
+
+ if (
+ ref_ar.approval_type
+ == objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_NONE
+ ):
+ raise ValueError(
+ "Can't init from APPROVAL_REQUEST object reference "
+ "with unset approval_type."
+ )
+ elif (
+ ref_ar.approval_type
+ == objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT
+ ):
+ api_reference.type = api_reference.Type.CLIENT_APPROVAL
+ api_reference.client_approval.approval_id = ref_ar.approval_id
+ api_reference.client_approval.username = ref_ar.requestor_username
+ api_reference.client_approval.client_id = ref_ar.subject_id
+ elif (
+ ref_ar.approval_type
+ == objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_HUNT
+ ):
+ api_reference.type = api_reference.Type.HUNT_APPROVAL
+ api_reference.hunt_approval.approval_id = ref_ar.approval_id
+ api_reference.hunt_approval.username = ref_ar.requestor_username
+ api_reference.hunt_approval.hunt_id = ref_ar.subject_id
+ elif (
+ ref_ar.approval_type
+ == objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CRON_JOB
+ ):
+ api_reference.type = api_reference.Type.CRON_JOB_APPROVAL
+ api_reference.cron_job_approval.approval_id = ref_ar.approval_id
+ api_reference.cron_job_approval.username = ref_ar.requestor_username
+ api_reference.cron_job_approval.cron_job_id = ref_ar.subject_id
else:
- raise ValueError("Unexpected reference type: %d" % ref.type)
+ raise ValueError("Unexpected approval type: %d" % ref_ar.approval_type)
+ else:
+ raise ValueError("Unexpected reference type: %d" % reference.reference_type)
- return self
+ return api_reference
class ApiNotification(rdf_structs.RDFProtoStruct):
@@ -201,151 +219,62 @@ class ApiNotification(rdf_structs.RDFProtoStruct):
rdfvalue.RDFDatetime,
]
- def _GetUrnComponents(self, notification):
- # Still display if subject doesn't get set, this will appear in the GUI with
- # a target of "None"
- urn = "/"
- if notification.subject is not None:
- urn = notification.subject
-
- path = rdfvalue.RDFURN(urn)
- return path.Path().split("/")[1:]
-
- def InitFromNotification(self, notification, is_pending=False):
- """Initializes this object from an existing notification.
-
- Args:
- notification: A rdfvalues.flows.Notification object.
- is_pending: Indicates whether the user has already seen this notification
- or not.
-
- Returns:
- The current instance.
- """
- self.timestamp = notification.timestamp
- self.message = notification.message
- self.subject = str(notification.subject)
- self.is_pending = is_pending
-
- reference_type_enum = ApiNotificationReference.Type
-
- # Please see the comments to notification.Notify implementation
- # for the details of notification.type format. Short summary:
- # notification.type may be one of legacy values (i.e. "ViewObject") or
- # have a format of "[legacy value]:[new-style notification type]", i.e.
- # "ViewObject:TYPE_CLIENT_INTERROGATED".
- if ":" in notification.type:
- legacy_type, new_type = notification.type.split(":", 2)
- self.notification_type = new_type
- else:
- legacy_type = notification.type
-
- # TODO(user): refactor notifications, so that we send a meaningful
- # notification from the start, so that we don't have to do the
- # bridging/conversion/guessing here.
- components = self._GetUrnComponents(notification)
- if legacy_type == "Discovery":
- self.reference.type = reference_type_enum.CLIENT
- self.reference.client = ApiNotificationClientReference(
- client_id=components[0]
- )
- elif legacy_type == "ViewObject":
- if len(components) >= 2 and components[0] == "hunts":
- self.reference.type = reference_type_enum.HUNT
- self.reference.hunt.hunt_id = components[1]
- elif len(components) >= 2 and components[0] == "cron":
- self.reference.type = reference_type_enum.CRON
- self.reference.cron.cron_job_id = components[1]
- elif len(components) >= 3 and components[1] == "flows":
- self.reference.type = reference_type_enum.FLOW
- self.reference.flow.flow_id = components[2]
- self.reference.flow.client_id = components[0]
- elif len(components) == 1 and rdf_client.ClientURN.Validate(
- components[0]
- ):
- self.reference.type = reference_type_enum.CLIENT
- self.reference.client.client_id = components[0]
- else:
- if notification.subject:
- path = notification.subject.Path()
- for prefix in rdf_paths.PathSpec.AFF4_PREFIXES.values():
- part = "/%s%s" % (components[0], prefix)
- if path.startswith(part):
- self.reference.type = reference_type_enum.VFS
- self.reference.vfs.client_id = components[0]
- self.reference.vfs.vfs_path = (prefix + path[len(part) :]).lstrip(
- "/"
- )
- break
-
- if self.reference.type != reference_type_enum.VFS:
- self.reference.type = reference_type_enum.UNKNOWN
- self.reference.unknown.subject_urn = notification.subject
-
- elif legacy_type == "FlowStatus":
- if not components or not rdf_client.ClientURN.Validate(components[0]):
- self.reference.type = reference_type_enum.UNKNOWN
- self.reference.unknown.subject_urn = notification.subject
- else:
- self.reference.type = reference_type_enum.FLOW
- self.reference.flow.flow_id = notification.source.Basename()
- self.reference.flow.client_id = components[0]
-
- # TODO(user): refactor GrantAccess notification so that we don't have
- # to infer approval type from the URN.
- elif legacy_type == "GrantAccess":
- if rdf_client.ClientURN.Validate(components[1]):
- self.reference.type = reference_type_enum.CLIENT_APPROVAL
- self.reference.client_approval.client_id = components[1]
- self.reference.client_approval.approval_id = components[-1]
- self.reference.client_approval.username = components[-2]
- elif components[1] == "hunts":
- self.reference.type = reference_type_enum.HUNT_APPROVAL
- self.reference.hunt_approval.hunt_id = components[2]
- self.reference.hunt_approval.approval_id = components[-1]
- self.reference.hunt_approval.username = components[-2]
- elif components[1] == "cron":
- self.reference.type = reference_type_enum.CRON_JOB_APPROVAL
- self.reference.cron_job_approval.cron_job_id = components[2]
- self.reference.cron_job_approval.approval_id = components[-1]
- self.reference.cron_job_approval.username = components[-2]
- else:
- self.reference.type = reference_type_enum.UNKNOWN
- self.reference.unknown.subject_urn = notification.subject
- self.reference.unknown.source_urn = notification.source
+def InitApiNotificationFromUserNotification(
+ notification: objects_pb2.UserNotification,
+) -> api_user_pb2.ApiNotification:
+ """Initializes an ApiNotification from a UserNotification."""
- return self
+ api_notification = api_user_pb2.ApiNotification()
+ protobuf_utils.CopyAttr(notification, api_notification, "timestamp")
+ protobuf_utils.CopyAttr(notification, api_notification, "notification_type")
+ protobuf_utils.CopyAttr(notification, api_notification, "message")
+ api_notification.is_pending = (
+ notification.state == objects_pb2.UserNotification.State.STATE_PENDING
+ )
+ try:
+ api_notification.reference.CopyFrom(
+ InitApiNotificationReferenceFromObjectReference(notification.reference)
+ )
+ except ValueError as e:
+ logging.exception(
+ "Can't initialize notification from an object reference: %s", e
+ )
+ # In case of any initialization issue, simply create an empty reference.
+ api_notification.reference.CopyFrom(
+ api_user_pb2.ApiNotificationReference(
+ type=api_user_pb2.ApiNotificationReference.Type.UNSET
+ )
+ )
- def InitFromUserNotification(self, notification):
- self.timestamp = notification.timestamp
- self.notification_type = notification.notification_type
- self.message = notification.message
- self.is_pending = notification.state == notification.State.STATE_PENDING
- try:
- self.reference = ApiNotificationReference().InitFromObjectReference(
- notification.reference
- )
- except ValueError as e:
- logging.exception(
- "Can't initialize notification from an object reference: %s", e
- )
- # In case of any initialization issue, simply create an empty reference.
- self.reference = ApiNotificationReference(
- type=ApiNotificationReference.Type.UNSET
- )
+ return api_notification
- return self
+def InitApiGrrUserFromGrrUser(
+ user: objects_pb2.GRRUser,
+) -> api_user_pb2.ApiGrrUser:
+ """Initializes ApiGrrUser from a GRRUser."""
-class ApiGrrUserInterfaceTraits(rdf_structs.RDFProtoStruct):
- protobuf = api_user_pb2.ApiGrrUserInterfaceTraits
+ api_user = api_user_pb2.ApiGrrUser()
+ api_user.username = user.username
+
+ if user.user_type == objects_pb2.GRRUser.UserType.USER_TYPE_ADMIN:
+ api_user.user_type = api_user.UserType.USER_TYPE_ADMIN
+ else:
+ api_user.user_type = api_user.UserType.USER_TYPE_STANDARD
+
+ # Intentionally set default values if the user has no settings set.
+ api_user.settings.mode = user.ui_mode
+ api_user.settings.canary_mode = user.canary_mode
- def EnableAll(self):
- for type_descriptor in self.type_infos:
- self.Set(type_descriptor.name, True)
+ if config.CONFIG.Get("Email.enable_custom_email_address") and user.email:
+ api_user.email = user.email
- return self
+ return api_user
+
+
+class ApiGrrUserInterfaceTraits(rdf_structs.RDFProtoStruct):
+ protobuf = api_user_pb2.ApiGrrUserInterfaceTraits
class ApiGrrUser(rdf_structs.RDFProtoStruct):
@@ -357,41 +286,136 @@ class ApiGrrUser(rdf_structs.RDFProtoStruct):
GUISettings,
]
- def InitFromDatabaseObject(self, db_obj):
- self.username = db_obj.username
- if db_obj.user_type == db_obj.UserType.USER_TYPE_ADMIN:
- self.user_type = self.UserType.USER_TYPE_ADMIN
- else:
- self.user_type = self.UserType.USER_TYPE_STANDARD
+def InitApiHuntApprovalFromApprovalRequest(
+ approval_request: objects_pb2.ApprovalRequest,
+) -> api_user_pb2.ApiHuntApproval:
+ """Initializes ApiHuntApproval from an ApprovalRequest."""
+ api_hunt_approval = api_user_pb2.ApiHuntApproval()
+
+ _FillApiApprovalFromApprovalRequest(api_hunt_approval, approval_request)
+
+ approval_subject_obj = data_store.REL_DB.ReadHuntObject(
+ approval_request.subject_id
+ )
+
+ approval_subject_counters = data_store.REL_DB.ReadHuntCounters(
+ approval_request.subject_id
+ )
+ api_hunt_approval.subject.CopyFrom(
+ api_hunt.InitApiHuntFromHuntObject(
+ approval_subject_obj,
+ hunt_counters=approval_subject_counters,
+ with_full_summary=True,
+ )
+ )
+ original_object = approval_subject_obj.original_object
+
+ if (
+ original_object.object_type
+ == hunt_pb2.ApiFlowLikeObjectReference.ObjectType.FLOW_REFERENCE
+ ):
+ original_flow = data_store.REL_DB.ReadFlowObject(
+ original_object.flow_reference.client_id,
+ original_object.flow_reference.flow_id,
+ )
+ api_hunt_approval.copied_from_flow.CopyFrom(
+ api_flow.InitApiFlowFromFlowObject(original_flow)
+ )
+
+ elif (
+ original_object.object_type
+ == hunt_pb2.ApiFlowLikeObjectReference.ObjectType.HUNT_REFERENCE
+ ):
+ original_hunt = data_store.REL_DB.ReadHuntObject(
+ original_object.hunt_reference.hunt_id
+ )
+ original_hunt_counters = data_store.REL_DB.ReadHuntCounters(
+ original_object.hunt_reference.hunt_id
+ )
+ api_hunt_approval.copied_from_hunt.CopyFrom(
+ api_hunt.InitApiHuntFromHuntObject(
+ original_hunt,
+ hunt_counters=original_hunt_counters,
+ with_full_summary=True,
+ )
+ )
- self.settings.mode = db_obj.ui_mode
- self.settings.canary_mode = db_obj.canary_mode
- if config.CONFIG.Get("Email.enable_custom_email_address") and db_obj.email:
- self.email = db_obj.email
+ return api_hunt_approval
- return self
+def InitApiClientApprovalFromApprovalRequest(
+ approval_request: objects_pb2.ApprovalRequest,
+) -> api_user_pb2.ApiClientApproval:
+ """Initializes ApiClientApproval from an ApprovalRequest."""
-def _InitApiApprovalFromDatabaseObject(api_approval, db_obj):
- """Initializes Api(Client|Hunt|CronJob)Approval from the database object."""
+ api_client_approval = api_user_pb2.ApiClientApproval()
- api_approval.id = db_obj.approval_id
- api_approval.requestor = db_obj.requestor_username
- api_approval.reason = db_obj.reason
+ client_full_info = data_store.REL_DB.ReadClientFullInfo(
+ approval_request.subject_id
+ )
+ api_client_approval.subject.CopyFrom(
+ clients.ApiClientFromClientFullInfo(
+ approval_request.subject_id, client_full_info
+ )
+ )
- api_approval.notified_users = sorted(db_obj.notified_users)
- api_approval.email_cc_addresses = sorted(db_obj.email_cc_addresses)
- api_approval.email_message_id = db_obj.email_message_id
+ _FillApiApprovalFromApprovalRequest(api_client_approval, approval_request)
+ return api_client_approval
- # raise ValueError("INIT:", db_obj)
- api_approval.approvers = sorted([g.grantor_username for g in db_obj.grants])
+def InitApiCronJobApprovalFromApprovalRequest(
+ approval_request: objects_pb2.ApprovalRequest,
+) -> api_user_pb2.ApiCronJobApproval:
+ """Initializes ApiCronJobApproval from an ApprovalRequest."""
- api_approval.expiration_time_us = db_obj.expiration_time
+ api_cron_job_approval = api_user_pb2.ApiCronJobApproval()
+ _FillApiApprovalFromApprovalRequest(api_cron_job_approval, approval_request)
+
+ approval_subject_obj = cronjobs.CronManager().ReadJob(
+ approval_request.subject_id
+ )
+ approval_subject_obj = mig_cronjobs.ToProtoCronJob(approval_subject_obj)
+ api_cron_job_approval.subject.CopyFrom(
+ api_cron.InitApiCronJobFromCronJob(approval_subject_obj)
+ )
+
+ return api_cron_job_approval
+
+
+def _FillApiApprovalFromApprovalRequest(
+ api_approval: Union[
+ api_user_pb2.ApiClientApproval,
+ api_user_pb2.ApiHuntApproval,
+ api_user_pb2.ApiCronJobApproval,
+ ],
+ approval_request: objects_pb2.ApprovalRequest,
+):
+ """Fills a given Api(Client|Hunt|CronJob)Approval with data from an ApprovalRequest."""
+
+ protobuf_utils.CopyAttr(approval_request, api_approval, "approval_id", "id")
+ protobuf_utils.CopyAttr(
+ approval_request, api_approval, "requestor_username", "requestor"
+ )
+ protobuf_utils.CopyAttr(approval_request, api_approval, "reason", "reason")
+ protobuf_utils.CopyAttr(approval_request, api_approval, "email_message_id")
+
+ api_approval.notified_users.extend(sorted(approval_request.notified_users))
+ api_approval.email_cc_addresses.extend(
+ sorted(approval_request.email_cc_addresses)
+ )
+
+ api_approval.approvers.extend(
+ sorted([g.grantor_username for g in approval_request.grants])
+ )
+ # TODO(user): Remove this once Cron jobs are removed.
+ if not isinstance(api_approval, api_user_pb2.ApiCronJobApproval):
+ protobuf_utils.CopyAttr(
+ approval_request, api_approval, "expiration_time", "expiration_time_us"
+ )
try:
- approval_checks.CheckApprovalRequest(db_obj)
+ approval_checks.CheckApprovalRequest(approval_request)
api_approval.is_valid = True
except access_control.UnauthorizedAccess as e:
api_approval.is_valid_message = str(e)
@@ -400,6 +424,30 @@ def _InitApiApprovalFromDatabaseObject(api_approval, db_obj):
return api_approval
+def GetSubjectTitleForHuntApproval(
+ approval: api_user_pb2.ApiHuntApproval,
+) -> str:
+ """Returns a human-readable title for a hunt approval."""
+ return f"hunt {approval.subject.hunt_id}"
+
+
+def GetSubjectTitleForCronJobApproval(
+ approval: api_user_pb2.ApiCronJobApproval,
+) -> str:
+ """Returns a human-readable title for a cron job approval."""
+ return f"a cron job {approval.subject.cron_job_id}"
+
+
+def GetSubjectTitleForClientApproval(
+ approval: api_user_pb2.ApiClientApproval,
+) -> str:
+ """Returns a human-readable title for a client approval."""
+ return (
+ f"GRR client {approval.subject.client_id} "
+ f"({approval.subject.knowledge_base.fqdn})"
+ )
+
+
class ApiClientApproval(rdf_structs.RDFProtoStruct):
"""API client approval object."""
@@ -409,49 +457,6 @@ class ApiClientApproval(rdf_structs.RDFProtoStruct):
rdfvalue.RDFDatetime,
]
- def InitFromDatabaseObject(self, db_obj, approval_subject_obj=None):
- if not approval_subject_obj:
- approval_subject_obj = data_store.REL_DB.ReadClientFullInfo(
- db_obj.subject_id
- )
- approval_subject_obj = mig_objects.ToRDFClientFullInfo(
- approval_subject_obj
- )
- self.subject = api_client.ApiClient().InitFromClientInfo(
- db_obj.subject_id, approval_subject_obj
- )
-
- return _InitApiApprovalFromDatabaseObject(self, db_obj)
-
- @property
- def subject_title(self):
- return "GRR client %s (%s)" % (
- self.subject.client_id,
- self.subject.knowledge_base.fqdn,
- )
-
- @property
- def review_url_path(self):
- return (
- f"/v2/clients/{self.subject.client_id}/users/{self.requestor}"
- f"/approvals/{self.id}"
- )
-
- @property
- def review_url_path_legacy(self):
- return (
- f"/#/users/{self.requestor}/approvals/client/"
- f"{self.subject.client_id}/{self.id}"
- )
-
- @property
- def subject_url_path(self):
- return f"/v2/clients/{self.subject.client_id}"
-
- @property
- def subject_url_path_legacy(self):
- return f"#/clients/{self.subject.client_id}"
-
def ObjectReference(self):
at = rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT
return rdf_objects.ObjectReference(
@@ -475,75 +480,6 @@ class ApiHuntApproval(rdf_structs.RDFProtoStruct):
rdfvalue.RDFDatetime,
]
- def InitFromDatabaseObject(self, db_obj, approval_subject_obj=None):
- _InitApiApprovalFromDatabaseObject(self, db_obj)
-
- if not approval_subject_obj:
- approval_subject_obj = data_store.REL_DB.ReadHuntObject(db_obj.subject_id)
-
- approval_subject_counters = data_store.REL_DB.ReadHuntCounters(
- db_obj.subject_id
- )
- self.subject = mig_hunt.ToRDFApiHunt(
- api_hunt.InitApiHuntFromHuntObject(
- approval_subject_obj,
- hunt_counters=approval_subject_counters,
- with_full_summary=True,
- )
- )
- approval_subject_obj = mig_hunt_objects.ToRDFHunt(approval_subject_obj)
- original_object = approval_subject_obj.original_object
-
- if original_object.object_type == "FLOW_REFERENCE":
- original_flow = data_store.REL_DB.ReadFlowObject(
- original_object.flow_reference.client_id,
- original_object.flow_reference.flow_id,
- )
- copied_from_flow = api_flow.InitApiFlowFromFlowObject(original_flow)
- self.copied_from_flow = mig_flow.ToRDFApiFlow(copied_from_flow)
- elif original_object.object_type == "HUNT_REFERENCE":
- original_hunt = data_store.REL_DB.ReadHuntObject(
- original_object.hunt_reference.hunt_id
- )
- original_hunt_counters = data_store.REL_DB.ReadHuntCounters(
- original_object.hunt_reference.hunt_id
- )
- self.copied_from_hunt = mig_hunt.ToRDFApiHunt(
- api_hunt.InitApiHuntFromHuntObject(
- original_hunt,
- hunt_counters=original_hunt_counters,
- with_full_summary=True,
- )
- )
-
- return self
-
- @property
- def subject_title(self):
- return "hunt %s" % (self.subject.hunt_id)
-
- @property
- def review_url_path(self):
- return (
- f"/v2/hunts/{self.subject.hunt_id}/users/{self.requestor}/"
- f"approvals/{self.id}"
- )
-
- @property
- def review_url_path_legacy(self):
- return (
- f"/#/users/{self.requestor}/approvals/hunt/{self.subject.hunt_id}/"
- f"{self.id}"
- )
-
- @property
- def subject_url_path(self):
- return f"/v2/hunts/{self.subject.hunt_id}"
-
- @property
- def subject_url_path_legacy(self):
- return f"#/hunts/{self.subject.hunt_id}"
-
def ObjectReference(self):
at = rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_HUNT
return rdf_objects.ObjectReference(
@@ -565,41 +501,6 @@ class ApiCronJobApproval(rdf_structs.RDFProtoStruct):
api_cron.ApiCronJob,
]
- def _FillInSubject(self, job_id, approval_subject_obj=None):
- if not approval_subject_obj:
- approval_subject_obj = cronjobs.CronManager().ReadJob(job_id)
- self.subject = api_cron.ApiCronJob.InitFromObject(approval_subject_obj)
-
- def InitFromDatabaseObject(self, db_obj, approval_subject_obj=None):
- _InitApiApprovalFromDatabaseObject(self, db_obj)
- self._FillInSubject(
- db_obj.subject_id, approval_subject_obj=approval_subject_obj
- )
- return self
-
- @property
- def subject_title(self):
- return "a cron job %s" % (self.subject.cron_job_id)
-
- @property
- def review_url_path(self):
- return self.review_url_path_legacy
-
- @property
- def review_url_path_legacy(self):
- return (
- f"/#/users/{self.requestor}/approvals/cron-job/"
- f"{self.subject.cron_job_id}/{self.id}"
- )
-
- @property
- def subject_url_path(self):
- return self.subject_url_path_legacy
-
- @property
- def subject_url_path_legacy(self):
- return f"#/crons/{self.subject.cron_job_id}"
-
def ObjectReference(self):
at = rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CRON_JOB
return rdf_objects.ObjectReference(
@@ -613,6 +514,58 @@ def ObjectReference(self):
)
+class ApiCreateHuntApprovalArgs(rdf_structs.RDFProtoStruct):
+ protobuf = api_user_pb2.ApiCreateHuntApprovalArgs
+ rdf_deps = [
+ ApiHuntApproval,
+ api_hunt.ApiHuntId,
+ ]
+
+
+class ApiGetHuntApprovalArgs(rdf_structs.RDFProtoStruct):
+ protobuf = api_user_pb2.ApiGetHuntApprovalArgs
+ rdf_deps = [
+ api_hunt.ApiHuntId,
+ ]
+
+
+class ApiCreateCronJobApprovalArgs(rdf_structs.RDFProtoStruct):
+ protobuf = api_user_pb2.ApiCreateCronJobApprovalArgs
+ rdf_deps = [
+ api_cron.ApiCronJobId,
+ ApiCronJobApproval,
+ ]
+
+
+class ApiGetCronJobApprovalArgs(rdf_structs.RDFProtoStruct):
+ protobuf = api_user_pb2.ApiGetCronJobApprovalArgs
+ rdf_deps = [
+ api_cron.ApiCronJobId,
+ ]
+
+
+class ApiCreateClientApprovalArgs(rdf_structs.RDFProtoStruct):
+ protobuf = api_user_pb2.ApiCreateClientApprovalArgs
+ rdf_deps = [
+ ApiClientApproval,
+ api_client.ApiClientId,
+ ]
+
+
+class ApiGetClientApprovalArgs(rdf_structs.RDFProtoStruct):
+ protobuf = api_user_pb2.ApiGetClientApprovalArgs
+ rdf_deps = [
+ api_client.ApiClientId,
+ ]
+
+
+class ApiListClientApprovalsArgs(rdf_structs.RDFProtoStruct):
+ protobuf = api_user_pb2.ApiListClientApprovalsArgs
+ rdf_deps = [
+ api_client.ApiClientId,
+ ]
+
+
_EMAIL_HEADER = """
@@ -669,176 +622,6 @@ def ObjectReference(self):
""" + _EMAIL_FOOTER
-
-class ApiCreateApprovalHandlerBase(api_call_handler_base.ApiCallHandler):
- """Base class for all Create*Approval handlers."""
-
- # objects.ApprovalRequest.ApprovalType value describing the approval type.
- approval_type = None
-
- def SendApprovalEmail(self, approval):
- if not config.CONFIG.Get("Email.send_approval_emails"):
- return
-
- subject_template = jinja2.Template(
- "Approval for {{ user }} to access {{ subject }}.", autoescape=True
- )
- subject = subject_template.render(
- user=approval.requestor, subject=approval.subject_title
- )
-
- template = jinja2.Template(_APPROVAL_REQUESTED_TEMPLATE, autoescape=True)
- base_url = config.CONFIG["AdminUI.url"].rstrip("/") + "/"
- legacy_approval_url = base_url + approval.review_url_path_legacy.lstrip("/")
- approval_url = base_url + approval.review_url_path.lstrip("/")
-
- if approval_url == legacy_approval_url:
- # In case the new UI does not yet support approval reviews for the given
- # subject type (client, hunt, cronjob), hide the fallback link to the
- # old UI in the email template. Instead, clicking the main button will
- # link the user to the old UI.
- legacy_approval_url = None
-
- body = template.render(
- requestor=approval.requestor,
- reason=approval.reason,
- legacy_approval_url=legacy_approval_url,
- approval_url=approval_url,
- subject_title=approval.subject_title,
- # If you feel like it, add a cute dog picture here :)
- html_signature=config.CONFIG["Email.approval_signature"],
- text_signature=config.CONFIG["Email.signature"],
- )
-
- requestor_email = users.GetEmail(
- data_store.REL_DB.ReadGRRUser(approval.requestor)
- )
- notified_emails = []
- for user in approval.notified_users:
- user = data_store.REL_DB.ReadGRRUser(user)
- notified_emails.append(users.GetEmail(user))
-
- email_alerts.EMAIL_ALERTER.SendEmail(
- to_addresses=",".join(notified_emails),
- from_address=requestor_email,
- subject=subject,
- message=body,
- is_html=True,
- cc_addresses=",".join(approval.email_cc_addresses),
- message_id=approval.email_message_id,
- )
-
- def CreateApprovalNotification(self, approval):
- for user in approval.notified_users:
- try:
- notification_lib.Notify(
- user.strip(),
- self.__class__.approval_notification_type,
- "Please grant access to %s" % approval.subject_title,
- approval.ObjectReference(),
- )
- except db.UnknownGRRUserError:
- # The relational db does not allow sending notifications to users that
- # don't exist. This should happen rarely but we need to catch this case.
- logging.error("Notification sent for unknown user %s!", user.strip())
-
- def Handle(self, args, context=None):
- if not args.approval.reason:
- raise ValueError("Approval reason can't be empty.")
-
- request = rdf_objects.ApprovalRequest(
- requestor_username=context.username,
- approval_type=self.__class__.approval_type,
- reason=args.approval.reason,
- notified_users=args.approval.notified_users,
- email_cc_addresses=args.approval.email_cc_addresses,
- subject_id=args.BuildSubjectId(),
- expiration_time=self._CalculateExpiration(args),
- email_message_id=email.utils.make_msgid(),
- )
- proto_request = mig_objects.ToProtoApprovalRequest(request)
- request.approval_id = data_store.REL_DB.WriteApprovalRequest(proto_request)
-
- data_store.REL_DB.GrantApproval(
- approval_id=request.approval_id,
- requestor_username=context.username,
- grantor_username=context.username,
- )
-
- stored_request = data_store.REL_DB.ReadApprovalRequest(
- context.username, request.approval_id
- )
- rdf_stored_request = mig_objects.ToRDFApprovalRequest(stored_request)
- result = self.__class__.result_type().InitFromDatabaseObject(
- rdf_stored_request
- )
-
- self.SendApprovalEmail(result)
- self.CreateApprovalNotification(result)
- return result
-
- def _CalculateExpiration(self, args):
- del args # unused
- return rdfvalue.RDFDatetime.Now() + config.CONFIG["ACL.token_expiry"]
-
-
-class ApiListApprovalsHandlerBase(api_call_handler_base.ApiCallHandler):
- """Renders list of all user approvals."""
-
- def _FilterRelationalApprovalRequests(
- self, approval_requests, approval_create_fn, state
- ):
- for ar in approval_requests:
- client_approval = approval_create_fn(ar)
-
- if state == ApiListClientApprovalsArgs.State.ANY:
- yield client_approval
- elif state == ApiListClientApprovalsArgs.State.VALID:
- if client_approval.is_valid:
- yield client_approval
- elif state == ApiListClientApprovalsArgs.State.INVALID:
- if not client_approval.is_valid:
- yield client_approval
-
-
-class ApiGetApprovalHandlerBase(api_call_handler_base.ApiCallHandler):
- """Base class for all Get*Approval handlers."""
-
- # objects.ApprovalRequest.ApprovalType value describing the approval type.
- approval_type = None
-
- def Handle(self, args, context=None):
- try:
- proto_approval = data_store.REL_DB.ReadApprovalRequest(
- args.username, args.approval_id
- )
- except db.UnknownApprovalRequestError:
- raise ApprovalNotFoundError(
- "No approval with id=%s, type=%s, subject=%s could be found."
- % (
- args.approval_id,
- self.__class__.approval_type,
- args.BuildSubjectId(),
- )
- )
-
- approval_obj = mig_objects.ToRDFApprovalRequest(proto_approval)
-
- if approval_obj.approval_type != self.__class__.approval_type:
- raise ValueError(
- "Unexpected approval type: %s, expected: %s"
- % (approval_obj.approval_type, self.__class__.approval_type)
- )
-
- if approval_obj.subject_id != args.BuildSubjectId():
- raise ValueError(
- "Unexpected subject id: %s, expected: %s"
- % (approval_obj.subject_id, args.BuildSubjectId())
- )
-
- return self.__class__.result_type().InitFromDatabaseObject(approval_obj)
-
-
_APPROVAL_GRANTED_TEMPLATE = _EMAIL_HEADER + """
Access has been granted:
@@ -872,208 +655,459 @@ def Handle(self, args, context=None):
""" + _EMAIL_FOOTER
-class ApiGrantApprovalHandlerBase(api_call_handler_base.ApiCallHandler):
- """Base class reused by all client approval handlers."""
+def CreateApprovalRequest(
+ args: Union[
+ api_user_pb2.ApiCreateClientApprovalArgs,
+ api_user_pb2.ApiCreateCronJobApprovalArgs,
+ api_user_pb2.ApiCreateHuntApprovalArgs,
+ ],
+ approval_type: objects_pb2.ApprovalRequest.ApprovalType,
+ subject_id: str,
+ expiration_time: rdfvalue.RDFDatetime,
+ context: api_call_context.ApiCallContext,
+) -> objects_pb2.ApprovalRequest:
+ """Creates an approval request.
+
+ Args:
+ args: The API call arguments.
+ approval_type: The type of the approval request.
+ subject_id: The subject ID of the approval request.
+ expiration_time: The expiration time of the approval request.
+ context: The API call context.
+
+ Returns:
+ The created approval request.
+
+ Raises:
+ ValueError: If the approval reason is empty.
+ """
+
+ if not args.approval.reason:
+ raise ValueError("Empty approval reason")
+
+ request = objects_pb2.ApprovalRequest(
+ requestor_username=context.username,
+ approval_type=approval_type,
+ reason=args.approval.reason,
+ notified_users=args.approval.notified_users,
+ email_cc_addresses=args.approval.email_cc_addresses,
+ subject_id=subject_id,
+ expiration_time=int(expiration_time),
+ email_message_id=email.utils.make_msgid(),
+ )
+ request.approval_id = data_store.REL_DB.WriteApprovalRequest(request)
- # objects.ApprovalRequest.ApprovalType value describing the approval type.
- approval_type = None
+ data_store.REL_DB.GrantApproval(
+ approval_id=request.approval_id,
+ requestor_username=context.username,
+ grantor_username=context.username,
+ )
- # Class to be used to grant the approval. Should be set by a subclass.
- approval_grantor = None
+ return GetApprovalRequest(request.approval_id, context.username)
- def SendGrantEmail(self, approval, context=None):
- if not config.CONFIG.Get("Email.send_approval_emails"):
- return
- subject_template = jinja2.Template(
- "Approval for {{ user }} to access {{ subject }}.", autoescape=True
- )
- subject = subject_template.render(
- user=approval.requestor, subject=approval.subject_title
- )
+def GetApprovalRequest(
+ approval_id: str,
+ username: str,
+) -> objects_pb2.ApprovalRequest:
+ """Gets an approval request, raises if not found.
- template = jinja2.Template(_APPROVAL_GRANTED_TEMPLATE, autoescape=True)
- base_url = config.CONFIG["AdminUI.url"].rstrip("/") + "/"
- subject_url = base_url + approval.subject_url_path.lstrip("/")
- legacy_subject_url = base_url + approval.subject_url_path_legacy.lstrip("/")
-
- if subject_url == legacy_subject_url:
- # In case the new UI does not yet support showing the given subject type
- # (client, hunt, cronjob), hide the fallback link to the old UI in the
- # email template. Instead, clicking the main button will link the user to
- # the old UI.
- legacy_subject_url = None
-
- body = template.render(
- grantor=context.username,
- requestor=approval.requestor,
- reason=approval.reason,
- legacy_subject_url=legacy_subject_url,
- subject_url=subject_url,
- subject_title=approval.subject_title,
- html_signature=config.CONFIG["Email.approval_signature"],
- text_signature=config.CONFIG["Email.signature"],
- )
+ Args:
+ approval_id: The approval ID to look for.
+ username: The username of the user that is requesting the approval.
- # Email subject should match approval request, and we add message id
- # references so they are grouped together in a thread by gmail.
- headers = {
- "In-Reply-To": approval.email_message_id,
- "References": approval.email_message_id,
- }
-
- requestor = data_store.REL_DB.ReadGRRUser(approval.requestor)
- requestor_email = users.GetEmail(requestor)
- username = data_store.REL_DB.ReadGRRUser(context.username)
- username_email = users.GetEmail(username)
-
- email_alerts.EMAIL_ALERTER.SendEmail(
- to_addresses=requestor_email,
- from_address=username_email,
- subject=subject,
- message=body,
- is_html=True,
- cc_addresses=",".join(approval.email_cc_addresses),
- headers=headers,
- )
+ Returns:
+ The approval request.
- def CreateGrantNotification(self, approval, context=None):
- notification_lib.Notify(
- approval.requestor,
- self.__class__.approval_notification_type,
- "%s has granted you access to %s."
- % (context.username, approval.subject_title),
- approval.subject.ObjectReference(),
+ Raises:
+ ApprovalNotFoundError: If the approval could not be found.
+ """
+
+ try:
+ approval_request = data_store.REL_DB.ReadApprovalRequest(
+ username, approval_id
)
+ except db.UnknownApprovalRequestError as ex:
+ raise ApprovalNotFoundError(
+ "No approval with (id=%s, username=%s) could be found."
+ % (approval_id, username)
+ ) from ex
+ return approval_request
+
+
+def GrantApprovalRequest(
+ approval_id: str,
+ requestor_username: str,
+ grantor_username: str,
+) -> objects_pb2.ApprovalRequest:
+ """Grants an approval request.
+
+ Args:
+ approval_id: The approval ID to grant.
+ requestor_username: The username of the user that is requesting the
+ approval.
+ grantor_username: The username of the user that is granting the approval.
+
+ Returns:
+ The approval request.
+
+ Raises:
+ ValueError: If the username is empty.
+ ApprovalNotFoundError: If the approval could not be found.
+ """
+ if not requestor_username:
+ raise ValueError("Empty requestor username")
+
+ data_store.REL_DB.GrantApproval(
+ requestor_username, approval_id, grantor_username
+ )
- def Handle(self, args, context=None):
- if not args.username:
- raise ValueError("username can't be empty.")
+ return GetApprovalRequest(approval_id, requestor_username)
- try:
- data_store.REL_DB.GrantApproval(
- args.username, args.approval_id, context.username
- )
- proto_approval = data_store.REL_DB.ReadApprovalRequest(
- args.username, args.approval_id
- )
- approval_obj = mig_objects.ToRDFApprovalRequest(proto_approval)
- except db.UnknownApprovalRequestError:
- raise ApprovalNotFoundError(
- "No approval with id=%s, type=%s, subject=%s could be found."
- % (
- args.approval_id,
- self.__class__.approval_type,
- args.BuildSubjectId(),
- )
- )
+def SendApprovalRequestEmail(
+ approval: Union[
+ api_user_pb2.ApiClientApproval,
+ api_user_pb2.ApiHuntApproval,
+ api_user_pb2.ApiCronJobApproval,
+ ],
+ subject_title: str,
+ review_url_path: str,
+ review_url_path_legacy: str,
+) -> None:
+ """Sends a emails about a given approval request."""
+
+ if not config.CONFIG.Get("Email.send_approval_emails"):
+ return
- result = self.__class__.result_type().InitFromDatabaseObject(approval_obj)
+ subject_template = jinja2.Template(
+ "Approval for {{ user }} to access {{ subject }}.", autoescape=True
+ )
+ subject = subject_template.render(
+ user=approval.requestor, subject=subject_title
+ )
- self.SendGrantEmail(result, context=context)
- self.CreateGrantNotification(result, context=context)
- return result
+ template = jinja2.Template(_APPROVAL_REQUESTED_TEMPLATE, autoescape=True)
+ base_url = config.CONFIG["AdminUI.url"].rstrip("/") + "/"
+ legacy_approval_url = base_url + review_url_path_legacy.lstrip("/")
+ approval_url = base_url + review_url_path.lstrip("/")
+
+ if approval_url == legacy_approval_url:
+ # In case the new UI does not yet support approval reviews for the given
+ # subject type (client, hunt, cronjob), hide the fallback link to the
+ # old UI in the email template. Instead, clicking the main button will
+ # link the user to the old UI.
+ legacy_approval_url = None
+
+ body = template.render(
+ requestor=approval.requestor,
+ reason=approval.reason,
+ legacy_approval_url=legacy_approval_url,
+ approval_url=approval_url,
+ subject_title=subject_title,
+ # If you feel like it, add a cute dog picture here :)
+ html_signature=config.CONFIG["Email.approval_signature"],
+ text_signature=config.CONFIG["Email.signature"],
+ )
+ requestor_email = users.GetEmail(
+ data_store.REL_DB.ReadGRRUser(approval.requestor)
+ )
+ notified_emails = []
+ for user in approval.notified_users:
+ user = data_store.REL_DB.ReadGRRUser(user)
+ notified_emails.append(users.GetEmail(user))
+
+ email_alerts.EMAIL_ALERTER.SendEmail(
+ to_addresses=",".join(notified_emails),
+ from_address=requestor_email,
+ subject=subject,
+ message=body,
+ is_html=True,
+ cc_addresses=",".join(approval.email_cc_addresses),
+ message_id=approval.email_message_id,
+ )
-class ApiClientApprovalArgsBase(rdf_structs.RDFProtoStruct):
- """Base class for client approvals."""
- __abstract = True # pylint: disable=g-bad-name
+def SendGrantEmail(
+ approval: Union[
+ api_user_pb2.ApiClientApproval,
+ api_user_pb2.ApiHuntApproval,
+ api_user_pb2.ApiCronJobApproval,
+ ],
+ username: str,
+ subject_title: str,
+ subject_url_path: str,
+ subject_url_path_legacy: str,
+) -> None:
+ """Sends an email about a granted approval request."""
+
+ if not config.CONFIG.Get("Email.send_approval_emails"):
+ return
+
+ subject_template = jinja2.Template(
+ "Approval for {{ user }} to access {{ subject }}.", autoescape=True
+ )
+ subject = subject_template.render(
+ user=approval.requestor, subject=subject_title
+ )
- def BuildSubjectId(self):
- return str(self.client_id)
+ template = jinja2.Template(_APPROVAL_GRANTED_TEMPLATE, autoescape=True)
+ base_url = config.CONFIG["AdminUI.url"].rstrip("/") + "/"
+ subject_url = base_url + subject_url_path.lstrip("/")
+ legacy_subject_url = base_url + subject_url_path_legacy.lstrip("/")
+
+ if subject_url == legacy_subject_url:
+ # In case the new UI does not yet support showing the given subject type
+ # (client, hunt, cronjob), hide the fallback link to the old UI in the
+ # email template. Instead, clicking the main button will link the user to
+ # the old UI.
+ legacy_subject_url = None
+
+ body = template.render(
+ grantor=username,
+ requestor=approval.requestor,
+ reason=approval.reason,
+ legacy_subject_url=legacy_subject_url,
+ subject_url=subject_url,
+ subject_title=subject_title,
+ html_signature=config.CONFIG["Email.approval_signature"],
+ text_signature=config.CONFIG["Email.signature"],
+ )
+ # Email subject should match approval request, and we add message id
+ # references so they are grouped together in a thread by gmail.
+ headers = {
+ "In-Reply-To": approval.email_message_id,
+ "References": approval.email_message_id,
+ }
+
+ requestor = data_store.REL_DB.ReadGRRUser(approval.requestor)
+ requestor_email = users.GetEmail(requestor)
+ user = data_store.REL_DB.ReadGRRUser(username)
+ user_email = users.GetEmail(user)
+
+ email_alerts.EMAIL_ALERTER.SendEmail(
+ to_addresses=requestor_email,
+ from_address=user_email,
+ subject=subject,
+ message=body,
+ is_html=True,
+ cc_addresses=",".join(approval.email_cc_addresses),
+ headers=headers,
+ )
+
+
+def CreateApprovalNotification(
+ approval: Union[
+ ApiClientApproval,
+ ApiHuntApproval,
+ ApiCronJobApproval,
+ ],
+ notification_type: rdf_objects.UserNotification.Type,
+ subject_title: str,
+) -> None:
+ """Creates a user notification for the given approval."""
+
+ for user in approval.notified_users:
+ try:
+ notification_lib.Notify(
+ user.strip(),
+ notification_type,
+ "Please grant access to %s" % subject_title,
+ approval.ObjectReference(),
+ )
+ except db.UnknownGRRUserError:
+ # The relational db does not allow sending notifications to users that
+ # don't exist. This should happen rarely but we need to catch this case.
+ logging.error("Notification sent for unknown user %s!", user.strip())
-class ApiCreateClientApprovalArgs(ApiClientApprovalArgsBase):
- protobuf = api_user_pb2.ApiCreateClientApprovalArgs
- rdf_deps = [
- ApiClientApproval,
- api_client.ApiClientId,
- ]
+def _GetTokenExpirationTime() -> rdfvalue.RDFDatetime:
+ return rdfvalue.RDFDatetime.Now() + config.CONFIG["ACL.token_expiry"]
-class ApiCreateClientApprovalHandler(ApiCreateApprovalHandlerBase):
+
+def _FilterApiClientApprovals(
+ api_client_approval: Iterable[ApiClientApproval],
+ state: api_user_pb2.ApiListClientApprovalsArgs.State,
+) -> Iterable[ApiClientApproval]:
+ """Filters client approvals based on the given state."""
+
+ for approval in api_client_approval:
+ if state == api_user_pb2.ApiListClientApprovalsArgs.State.ANY:
+ yield approval
+ elif state == api_user_pb2.ApiListClientApprovalsArgs.State.VALID:
+ if approval.is_valid:
+ yield approval
+ elif state == api_user_pb2.ApiListClientApprovalsArgs.State.INVALID:
+ if not approval.is_valid:
+ yield approval
+
+
+class ApiCreateClientApprovalHandler(api_call_handler_base.ApiCallHandler):
"""Creates new user client approval and notifies requested approvers."""
args_type = ApiCreateClientApprovalArgs
result_type = ApiClientApproval
+ proto_args_type = api_user_pb2.ApiCreateClientApprovalArgs
+ proto_result_type = api_user_pb2.ApiClientApproval
- approval_type = rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT
- approval_notification_type = (
- rdf_objects.UserNotification.Type.TYPE_CLIENT_APPROVAL_REQUESTED
- )
-
- def _CalculateExpiration(self, args):
+ def _CalculateExpiration(
+ self,
+ args: api_user_pb2.ApiCreateClientApprovalArgs,
+ ) -> rdfvalue.RDFDatetime:
if not args.approval.expiration_time_us:
- return super()._CalculateExpiration(args)
+ return _GetTokenExpirationTime()
- if rdfvalue.RDFDatetime.Now() > args.approval.expiration_time_us:
+ approval_expiration_time = rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(
+ args.approval.expiration_time_us
+ )
+ if rdfvalue.RDFDatetime.Now() > approval_expiration_time:
raise ValueError(
- f"Requested expiration time {args.approval.expiration_time_us} "
+ f"Requested expiration time {approval_expiration_time} "
"is in the past."
)
- if args.approval.expiration_time_us > (
+ if approval_expiration_time > (
rdfvalue.RDFDatetime.Now() + config.CONFIG["ACL.token_max_expiry"]
):
raise ValueError(
- f"Requested expiration time {args.approval.expiration_time_us} "
+ f"Requested expiration time {approval_expiration_time} "
"is too far in the future."
)
- return args.approval.expiration_time_us
+ return approval_expiration_time
+
+ def Handle(
+ self,
+ args: api_user_pb2.ApiCreateClientApprovalArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> api_user_pb2.ApiClientApproval:
+ assert context is not None
+
+ approval_request = CreateApprovalRequest(
+ args,
+ objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT,
+ args.client_id,
+ self._CalculateExpiration(args),
+ context,
+ )
+ api_client_approval = InitApiClientApprovalFromApprovalRequest(
+ approval_request
+ )
+ subject_title = GetSubjectTitleForClientApproval(api_client_approval)
-class ApiGetClientApprovalArgs(ApiClientApprovalArgsBase):
- protobuf = api_user_pb2.ApiGetClientApprovalArgs
- rdf_deps = [
- api_client.ApiClientId,
- ]
+ review_url_path = (
+ f"/v2/clients/{api_client_approval.subject.client_id}/users/"
+ f"{api_client_approval.requestor}/approvals/{api_client_approval.id}"
+ )
+
+ review_url_path_legacy = (
+ f"/#/users/{api_client_approval.requestor}/approvals/client/"
+ f"{api_client_approval.subject.client_id}/{api_client_approval.id}"
+ )
+
+ SendApprovalRequestEmail(
+ api_client_approval,
+ subject_title,
+ review_url_path,
+ review_url_path_legacy,
+ )
+ rdf_api_client_approval = ToRDFApiClientApproval(api_client_approval)
+ CreateApprovalNotification(
+ rdf_api_client_approval,
+ rdf_objects.UserNotification.Type.TYPE_CLIENT_APPROVAL_REQUESTED,
+ subject_title,
+ )
+ return api_client_approval
-class ApiGetClientApprovalHandler(ApiGetApprovalHandlerBase):
+class ApiGetClientApprovalHandler(api_call_handler_base.ApiCallHandler):
"""Returns details about an approval for a given client and reason."""
args_type = ApiGetClientApprovalArgs
result_type = ApiClientApproval
+ proto_args_type = api_user_pb2.ApiGetClientApprovalArgs
+ proto_result_type = api_user_pb2.ApiClientApproval
+
+ def Handle(
+ self,
+ args: api_user_pb2.ApiGetClientApprovalArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> api_user_pb2.ApiClientApproval:
+ approval_request = GetApprovalRequest(args.approval_id, args.username)
+
+ expected_approval_type = (
+ objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT
+ )
+ if approval_request.approval_type != expected_approval_type:
+ raise ValueError(
+ "Unexpected approval type: %s, expected: %s"
+ % (approval_request.approval_type, expected_approval_type)
+ )
+
+ if approval_request.subject_id != args.client_id:
+ raise ValueError(
+ "Unexpected subject id: %s, expected: %s"
+ % (approval_request.subject_id, args.client_id)
+ )
- approval_type = rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT
+ return InitApiClientApprovalFromApprovalRequest(approval_request)
-class ApiGrantClientApprovalArgs(ApiClientApprovalArgsBase):
+class ApiGrantClientApprovalArgs(rdf_structs.RDFProtoStruct):
protobuf = api_user_pb2.ApiGrantClientApprovalArgs
rdf_deps = [
api_client.ApiClientId,
]
-class ApiGrantClientApprovalHandler(ApiGrantApprovalHandlerBase):
+class ApiGrantClientApprovalHandler(api_call_handler_base.ApiCallHandler):
"""Handle for GrantClientApproval requests."""
args_type = ApiGrantClientApprovalArgs
result_type = ApiClientApproval
+ proto_args_type = api_user_pb2.ApiGrantClientApprovalArgs
+ proto_result_type = api_user_pb2.ApiClientApproval
+
+ def Handle(
+ self,
+ args: api_user_pb2.ApiGrantClientApprovalArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> api_user_pb2.ApiClientApproval:
+ assert context is not None
+
+ approval_request = GrantApprovalRequest(
+ args.approval_id, args.username, context.username
+ )
+ api_client_approval = InitApiClientApprovalFromApprovalRequest(
+ approval_request
+ )
+ subject_title = GetSubjectTitleForClientApproval(api_client_approval)
- approval_type = rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT
- approval_notification_type = (
- rdf_objects.UserNotification.Type.TYPE_CLIENT_APPROVAL_GRANTED
- )
+ SendGrantEmail(
+ api_client_approval,
+ context.username,
+ subject_title,
+ f"/v2/clients/{api_client_approval.subject.client_id}",
+ f"#/clients/{api_client_approval.subject.client_id}",
+ )
- def Handle(self, args, context=None):
- approval = super().Handle(args, context=context)
+ rdf_api_client_approval = ToRDFApiClientApproval(api_client_approval)
+ notification_lib.Notify(
+ api_client_approval.requestor,
+ rdf_objects.UserNotification.Type.TYPE_CLIENT_APPROVAL_GRANTED,
+ "%s has granted you access to %s." % (context.username, subject_title),
+ rdf_api_client_approval.subject.ObjectReference(),
+ )
- if approval.is_valid:
+ if api_client_approval.is_valid:
flow.StartScheduledFlows(
- client_id=str(approval.subject.client_id), creator=approval.requestor
+ client_id=api_client_approval.subject.client_id,
+ creator=api_client_approval.requestor,
)
- return approval
-
-
-class ApiListClientApprovalsArgs(ApiClientApprovalArgsBase):
- protobuf = api_user_pb2.ApiListClientApprovalsArgs
- rdf_deps = [
- api_client.ApiClientId,
- ]
+ return api_client_approval
class ApiListClientApprovalsResult(rdf_structs.RDFProtoStruct):
@@ -1083,125 +1117,185 @@ class ApiListClientApprovalsResult(rdf_structs.RDFProtoStruct):
]
-class ApiListClientApprovalsHandler(ApiListApprovalsHandlerBase):
+class ApiListClientApprovalsHandler(api_call_handler_base.ApiCallHandler):
"""Returns list of user's clients approvals."""
args_type = ApiListClientApprovalsArgs
result_type = ApiListClientApprovalsResult
+ proto_args_type = api_user_pb2.ApiListClientApprovalsArgs
+ proto_result_type = api_user_pb2.ApiListClientApprovalsResult
- def _CheckClientId(self, client_id, approval):
- subject = approval.Get(approval.Schema.SUBJECT)
- return subject.Basename() == client_id
-
- def _CheckState(self, state, approval):
- try:
- approval.CheckAccess(approval.context)
- is_valid = True
- except access_control.UnauthorizedAccess:
- is_valid = False
-
- if state == ApiListClientApprovalsArgs.State.VALID:
- return is_valid
-
- if state == ApiListClientApprovalsArgs.State.INVALID:
- return not is_valid
+ def Handle(
+ self,
+ args: api_user_pb2.ApiListClientApprovalsArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> api_user_pb2.ApiListClientApprovalsResult:
+ assert context is not None
- def Handle(self, args, context=None):
subject_id = None
if args.client_id:
- subject_id = str(args.client_id)
+ subject_id = args.client_id
- proto_approvals = sorted(
+ approvals = sorted(
data_store.REL_DB.ReadApprovalRequests(
context.username,
- rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT,
+ objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT,
subject_id=subject_id,
include_expired=True,
),
key=lambda ar: ar.timestamp,
reverse=True,
)
- approvals = [mig_objects.ToRDFApprovalRequest(r) for r in proto_approvals]
- approvals = self._FilterRelationalApprovalRequests(
- approvals,
- lambda ar: ApiClientApproval().InitFromDatabaseObject(ar),
+ api_client_approvals = [
+ InitApiClientApprovalFromApprovalRequest(ar) for ar in approvals
+ ]
+
+ api_client_approvals = _FilterApiClientApprovals(
+ api_client_approvals,
args.state,
)
- if not args.count:
+ if not args.HasField("count"):
end = None
else:
end = args.offset + args.count
- items = list(itertools.islice(approvals, args.offset, end))
+ items = list(itertools.islice(api_client_approvals, args.offset, end))
api_client.UpdateClientsFromFleetspeak([a.subject for a in items])
- return ApiListClientApprovalsResult(items=items)
-
-
-class ApiHuntApprovalArgsBase(rdf_structs.RDFProtoStruct):
-
- __abstract = True # pylint: disable=g-bad-name
-
- def BuildSubjectId(self):
- return str(self.hunt_id)
+ return api_user_pb2.ApiListClientApprovalsResult(items=items)
-class ApiCreateHuntApprovalArgs(ApiHuntApprovalArgsBase):
- protobuf = api_user_pb2.ApiCreateHuntApprovalArgs
- rdf_deps = [
- ApiHuntApproval,
- api_hunt.ApiHuntId,
- ]
-
-
-class ApiCreateHuntApprovalHandler(ApiCreateApprovalHandlerBase):
+class ApiCreateHuntApprovalHandler(api_call_handler_base.ApiCallHandler):
"""Creates new user hunt approval and notifies requested approvers."""
args_type = ApiCreateHuntApprovalArgs
result_type = ApiHuntApproval
+ proto_args_type = api_user_pb2.ApiCreateHuntApprovalArgs
+ proto_result_type = api_user_pb2.ApiHuntApproval
+
+ def Handle(
+ self,
+ args: api_user_pb2.ApiCreateHuntApprovalArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> api_user_pb2.ApiHuntApproval:
+ assert context is not None
+
+ approval_request = CreateApprovalRequest(
+ args,
+ objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_HUNT,
+ args.hunt_id,
+ _GetTokenExpirationTime(),
+ context,
+ )
+ api_hunt_approval = InitApiHuntApprovalFromApprovalRequest(approval_request)
- approval_type = rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_HUNT
- approval_notification_type = (
- rdf_objects.UserNotification.Type.TYPE_HUNT_APPROVAL_REQUESTED
- )
+ subject_title = GetSubjectTitleForHuntApproval(api_hunt_approval)
+ review_url_path = (
+ f"/v2/hunts/{api_hunt_approval.subject.hunt_id}/users/"
+ f"{api_hunt_approval.requestor}/approvals/{api_hunt_approval.id}"
+ )
+ review_url_path_legacy = (
+ f"/#/users/{api_hunt_approval.requestor}/approvals/hunt/"
+ f"{api_hunt_approval.subject.hunt_id}/{api_hunt_approval.id}"
+ )
+ SendApprovalRequestEmail(
+ api_hunt_approval,
+ subject_title,
+ review_url_path,
+ review_url_path_legacy,
+ )
-class ApiGetHuntApprovalArgs(ApiHuntApprovalArgsBase):
- protobuf = api_user_pb2.ApiGetHuntApprovalArgs
- rdf_deps = [
- api_hunt.ApiHuntId,
- ]
+ rdf_api_hunt_approval = ToRDFApiHuntApproval(api_hunt_approval)
+ CreateApprovalNotification(
+ rdf_api_hunt_approval,
+ rdf_objects.UserNotification.Type.TYPE_HUNT_APPROVAL_REQUESTED,
+ subject_title,
+ )
+ return api_hunt_approval
-class ApiGetHuntApprovalHandler(ApiGetApprovalHandlerBase):
+class ApiGetHuntApprovalHandler(api_call_handler_base.ApiCallHandler):
"""Returns details about approval for a given hunt, user and approval id."""
args_type = ApiGetHuntApprovalArgs
result_type = ApiHuntApproval
+ proto_args_type = api_user_pb2.ApiGetHuntApprovalArgs
+ proto_result_type = api_user_pb2.ApiHuntApproval
+
+ def Handle(
+ self,
+ args: api_user_pb2.ApiGetHuntApprovalArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> api_user_pb2.ApiHuntApproval:
+ approval_obj = GetApprovalRequest(args.approval_id, args.username)
+
+ expected_approval_type = (
+ objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_HUNT
+ )
+ if approval_obj.approval_type != expected_approval_type:
+ raise ValueError(
+ "Unexpected approval type: %s, expected: %s"
+ % (approval_obj.approval_type, expected_approval_type)
+ )
+
+ if approval_obj.subject_id != args.hunt_id:
+ raise ValueError(
+ "Unexpected subject id: %s, expected: %s"
+ % (approval_obj.subject_id, args.hunt_id)
+ )
- approval_type = rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_HUNT
+ return InitApiHuntApprovalFromApprovalRequest(approval_obj)
-class ApiGrantHuntApprovalArgs(ApiHuntApprovalArgsBase):
+class ApiGrantHuntApprovalArgs(rdf_structs.RDFProtoStruct):
protobuf = api_user_pb2.ApiGrantHuntApprovalArgs
rdf_deps = [
api_hunt.ApiHuntId,
]
-class ApiGrantHuntApprovalHandler(ApiGrantApprovalHandlerBase):
+class ApiGrantHuntApprovalHandler(api_call_handler_base.ApiCallHandler):
"""Handle for GrantHuntApproval requests."""
args_type = ApiGrantHuntApprovalArgs
result_type = ApiHuntApproval
+ proto_args_type = api_user_pb2.ApiGrantHuntApprovalArgs
+ proto_result_type = api_user_pb2.ApiHuntApproval
+
+ def Handle(
+ self,
+ args: api_user_pb2.ApiGrantHuntApprovalArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> api_user_pb2.ApiHuntApproval:
+ assert context is not None
+
+ approval_request = GrantApprovalRequest(
+ args.approval_id, args.username, context.username
+ )
+ api_hunt_approval = InitApiHuntApprovalFromApprovalRequest(approval_request)
- approval_type = rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_HUNT
- approval_notification_type = (
- rdf_objects.UserNotification.Type.TYPE_HUNT_APPROVAL_GRANTED
- )
+ subject_title = GetSubjectTitleForHuntApproval(api_hunt_approval)
+
+ SendGrantEmail(
+ api_hunt_approval,
+ context.username,
+ subject_title,
+ f"/v2/hunts/{api_hunt_approval.subject.hunt_id}",
+ f"#/hunts/{api_hunt_approval.subject.hunt_id}",
+ )
+ rdf_api_hunt_approval = ToRDFApiHuntApproval(api_hunt_approval)
+ notification_lib.Notify(
+ api_hunt_approval.requestor,
+ rdf_objects.UserNotification.Type.TYPE_HUNT_APPROVAL_GRANTED,
+ "%s has granted you access to %s." % (context.username, subject_title),
+ rdf_api_hunt_approval.subject.ObjectReference(),
+ )
+
+ return api_hunt_approval
-class ApiListHuntApprovalsArgs(ApiHuntApprovalArgsBase):
+class ApiListHuntApprovalsArgs(rdf_structs.RDFProtoStruct):
protobuf = api_user_pb2.ApiListHuntApprovalsArgs
rdf_deps = [
api_hunt.ApiHuntId,
@@ -1215,18 +1309,26 @@ class ApiListHuntApprovalsResult(rdf_structs.RDFProtoStruct):
]
-class ApiListHuntApprovalsHandler(ApiListApprovalsHandlerBase):
+class ApiListHuntApprovalsHandler(api_call_handler_base.ApiCallHandler):
"""Returns list of user's hunts approvals."""
args_type = ApiListHuntApprovalsArgs
result_type = ApiListHuntApprovalsResult
+ proto_args_type = api_user_pb2.ApiListHuntApprovalsArgs
+ proto_result_type = api_user_pb2.ApiListHuntApprovalsResult
+
+ def Handle(
+ self,
+ args: api_user_pb2.ApiListHuntApprovalsArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> api_user_pb2.ApiListHuntApprovalsResult:
+ assert context is not None
- def Handle(self, args, context=None):
subject_id = None
if args.hunt_id:
- subject_id = str(args.hunt_id)
+ subject_id = args.hunt_id
- proto_approvals = sorted(
+ approvals = sorted(
data_store.REL_DB.ReadApprovalRequests(
context.username,
objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_HUNT,
@@ -1236,92 +1338,150 @@ def Handle(self, args, context=None):
key=lambda ar: ar.timestamp,
reverse=True,
)
- approvals = [mig_objects.ToRDFApprovalRequest(r) for r in proto_approvals]
- if not args.count:
+ if not args.HasField("count"):
end = None
else:
end = args.offset + args.count
- items = [
- ApiHuntApproval().InitFromDatabaseObject(ar)
- for ar in approvals[args.offset : end]
- ]
-
- return ApiListHuntApprovalsResult(items=items)
-
-
-class ApiCronJobApprovalArgsBase(rdf_structs.RDFProtoStruct):
- """Base class for Cron Job approvals."""
-
- __abstract = True # pylint: disable=g-bad-name
-
- def BuildSubjectId(self):
- return str(self.cron_job_id)
-
-
-class ApiCreateCronJobApprovalArgs(ApiCronJobApprovalArgsBase):
- protobuf = api_user_pb2.ApiCreateCronJobApprovalArgs
- rdf_deps = [
- api_cron.ApiCronJobId,
- ApiCronJobApproval,
- ]
+ return api_user_pb2.ApiListHuntApprovalsResult(
+ items=[
+ InitApiHuntApprovalFromApprovalRequest(ar)
+ for ar in approvals[args.offset : end]
+ ]
+ )
-class ApiCreateCronJobApprovalHandler(ApiCreateApprovalHandlerBase):
+class ApiCreateCronJobApprovalHandler(api_call_handler_base.ApiCallHandler):
"""Creates new user cron approval and notifies requested approvers."""
args_type = ApiCreateCronJobApprovalArgs
result_type = ApiCronJobApproval
+ proto_args_type = api_user_pb2.ApiCreateCronJobApprovalArgs
+ proto_result_type = api_user_pb2.ApiCronJobApproval
+
+ def Handle(
+ self,
+ args: api_user_pb2.ApiCreateCronJobApprovalArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> api_user_pb2.ApiCronJobApproval:
+ assert context is not None
+
+ approval_request = CreateApprovalRequest(
+ args,
+ objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CRON_JOB,
+ args.cron_job_id,
+ _GetTokenExpirationTime(),
+ context,
+ )
+ api_cron_job_approval = InitApiCronJobApprovalFromApprovalRequest(
+ approval_request
+ )
- approval_type = (
- rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CRON_JOB
- )
- approval_notification_type = (
- rdf_objects.UserNotification.Type.TYPE_CRON_JOB_APPROVAL_REQUESTED
- )
+ subject_title = GetSubjectTitleForCronJobApproval(api_cron_job_approval)
+ review_url_path = review_url_path_legacy = (
+ f"/#/users/{api_cron_job_approval.requestor}/approvals/cron-job/"
+ f"{api_cron_job_approval.subject.cron_job_id}/{api_cron_job_approval.id}"
+ )
+ SendApprovalRequestEmail(
+ api_cron_job_approval,
+ subject_title,
+ review_url_path,
+ review_url_path_legacy,
+ )
+ rdf_api_cron_job_approval = ToRDFApiCronJobApproval(api_cron_job_approval)
+ CreateApprovalNotification(
+ rdf_api_cron_job_approval,
+ rdf_objects.UserNotification.Type.TYPE_CRON_JOB_APPROVAL_REQUESTED,
+ subject_title,
+ )
-class ApiGetCronJobApprovalArgs(ApiCronJobApprovalArgsBase):
- protobuf = api_user_pb2.ApiGetCronJobApprovalArgs
- rdf_deps = [
- api_cron.ApiCronJobId,
- ]
+ return api_cron_job_approval
-class ApiGetCronJobApprovalHandler(ApiGetApprovalHandlerBase):
+class ApiGetCronJobApprovalHandler(api_call_handler_base.ApiCallHandler):
"""Returns details about approval for a given cron, user and approval id."""
args_type = ApiGetCronJobApprovalArgs
result_type = ApiCronJobApproval
+ proto_args_type = api_user_pb2.ApiGetCronJobApprovalArgs
+ proto_result_type = api_user_pb2.ApiCronJobApproval
+
+ def Handle(
+ self,
+ args: api_user_pb2.ApiGetCronJobApprovalArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> api_user_pb2.ApiCronJobApproval:
+ approval_obj = GetApprovalRequest(args.approval_id, args.username)
+
+ expected_approval_type = (
+ objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CRON_JOB
+ )
+ if approval_obj.approval_type != expected_approval_type:
+ raise ValueError(
+ "Unexpected approval type: %s, expected: %s"
+ % (approval_obj.approval_type, expected_approval_type)
+ )
- approval_type = (
- rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CRON_JOB
- )
+ if approval_obj.subject_id != args.cron_job_id:
+ raise ValueError(
+ "Unexpected subject id: %s, expected: %s"
+ % (approval_obj.subject_id, args.cron_job_id)
+ )
+
+ return InitApiCronJobApprovalFromApprovalRequest(approval_obj)
-class ApiGrantCronJobApprovalArgs(ApiCronJobApprovalArgsBase):
+class ApiGrantCronJobApprovalArgs(rdf_structs.RDFProtoStruct):
protobuf = api_user_pb2.ApiGrantCronJobApprovalArgs
rdf_deps = [
api_cron.ApiCronJobId,
]
-class ApiGrantCronJobApprovalHandler(ApiGrantApprovalHandlerBase):
+class ApiGrantCronJobApprovalHandler(api_call_handler_base.ApiCallHandler):
"""Handle for GrantCronJobApproval requests."""
args_type = ApiGrantCronJobApprovalArgs
result_type = ApiCronJobApproval
+ proto_args_type = api_user_pb2.ApiGrantCronJobApprovalArgs
+ proto_result_type = api_user_pb2.ApiCronJobApproval
+
+ def Handle(
+ self,
+ args: api_user_pb2.ApiGrantCronJobApprovalArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> api_user_pb2.ApiCronJobApproval:
+ assert context is not None
+
+ approval_request = GrantApprovalRequest(
+ args.approval_id, args.username, context.username
+ )
+ api_cron_job_approval = InitApiCronJobApprovalFromApprovalRequest(
+ approval_request
+ )
+ subject_title = GetSubjectTitleForCronJobApproval(api_cron_job_approval)
- approval_type = (
- rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CRON_JOB
- )
- approval_notification_type = (
- rdf_objects.UserNotification.Type.TYPE_CRON_JOB_APPROVAL_GRANTED
- )
+ SendGrantEmail(
+ api_cron_job_approval,
+ context.username,
+ subject_title,
+ f"#/crons/{api_cron_job_approval.subject.cron_job_id}",
+ f"#/crons/{api_cron_job_approval.subject.cron_job_id}",
+ )
+ rdf_api_cron_job_approval = ToRDFApiCronJobApproval(api_cron_job_approval)
+ notification_lib.Notify(
+ api_cron_job_approval.requestor,
+ rdf_objects.UserNotification.Type.TYPE_CRON_JOB_APPROVAL_GRANTED,
+ "%s has granted you access to %s." % (context.username, subject_title),
+ rdf_api_cron_job_approval.subject.ObjectReference(),
+ )
+
+ return api_cron_job_approval
-class ApiListCronJobApprovalsArgs(ApiCronJobApprovalArgsBase):
+class ApiListCronJobApprovalsArgs(rdf_structs.RDFProtoStruct):
protobuf = api_user_pb2.ApiListCronJobApprovalsArgs
@@ -1332,14 +1492,22 @@ class ApiListCronJobApprovalsResult(rdf_structs.RDFProtoStruct):
]
-class ApiListCronJobApprovalsHandler(ApiListApprovalsHandlerBase):
+class ApiListCronJobApprovalsHandler(api_call_handler_base.ApiCallHandler):
"""Returns list of user's cron jobs approvals."""
args_type = ApiListCronJobApprovalsArgs
result_type = ApiListCronJobApprovalsResult
+ proto_args_type = api_user_pb2.ApiListCronJobApprovalsArgs
+ proto_result_type = api_user_pb2.ApiListCronJobApprovalsResult
- def Handle(self, args, context=None):
- proto_approvals = sorted(
+ def Handle(
+ self,
+ args: api_user_pb2.ApiListCronJobApprovalsArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> api_user_pb2.ApiListCronJobApprovalsResult:
+ assert context is not None
+
+ approvals = sorted(
data_store.REL_DB.ReadApprovalRequests(
context.username,
objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CRON_JOB,
@@ -1349,62 +1517,73 @@ def Handle(self, args, context=None):
key=lambda ar: ar.timestamp,
reverse=True,
)
- approvals = [mig_objects.ToRDFApprovalRequest(r) for r in proto_approvals]
- if not args.count:
+ if not args.HasField("count"):
end = None
else:
end = args.offset + args.count
- items = [
- ApiCronJobApproval().InitFromDatabaseObject(ar)
+ api_approvals = [
+ InitApiCronJobApprovalFromApprovalRequest(ar)
for ar in approvals[args.offset : end]
]
- return ApiListCronJobApprovalsResult(items=items)
+ return api_user_pb2.ApiListCronJobApprovalsResult(items=api_approvals)
class ApiGetOwnGrrUserHandler(api_call_handler_base.ApiCallHandler):
"""Renders current user settings."""
result_type = ApiGrrUser
+ proto_result_type = api_user_pb2.ApiGrrUser
- def __init__(self, interface_traits=None):
+ def __init__(
+ self,
+ interface_traits: Optional[api_user_pb2.ApiGrrUserInterfaceTraits] = None,
+ ) -> None:
super().__init__()
self.interface_traits = interface_traits
- def Handle(self, unused_args, context=None):
+ def Handle(
+ self,
+ unused_args: Optional[None] = None,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> api_user_pb2.ApiGrrUser:
"""Fetches and renders current user's settings."""
-
- result = ApiGrrUser(username=context.username)
+ assert context is not None
# TODO: Use function to get API from proto user.
- proto_user_record = data_store.REL_DB.ReadGRRUser(context.username)
- rdf_user_record = mig_objects.ToRDFGRRUser(proto_user_record)
- result.InitFromDatabaseObject(rdf_user_record)
+ user_record = data_store.REL_DB.ReadGRRUser(context.username)
+ api_user = InitApiGrrUserFromGrrUser(user_record)
- result.interface_traits = (
- self.interface_traits or ApiGrrUserInterfaceTraits()
- )
+ if self.interface_traits:
+ api_user.interface_traits.CopyFrom(self.interface_traits)
- return result
+ return api_user
class ApiUpdateGrrUserHandler(api_call_handler_base.ApiCallHandler):
"""Sets current user settings."""
args_type = ApiGrrUser
+ proto_args_type = api_user_pb2.ApiGrrUser
+
+ def Handle(
+ self,
+ args: api_user_pb2.ApiGrrUser,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> api_user_pb2.ApiGrrUser:
+ assert context is not None
- def Handle(self, args, context=None):
if args.username or args.HasField("interface_traits"):
- raise ValueError("Only user settings can be updated.")
+ raise ValueError(
+ "Username or interface traits are set but cannot be updated"
+ )
- # Cannot use `mig_user.ToProtoApiUpdateGrrUserHandler(args)` (circular dep).
- proto_args = args.AsPrimitiveProto()
data_store.REL_DB.WriteGRRUser(
context.username,
- ui_mode=proto_args.settings.mode,
- canary_mode=proto_args.settings.canary_mode,
+ ui_mode=args.settings.mode,
+ canary_mode=args.settings.canary_mode,
)
@@ -1418,16 +1597,25 @@ class ApiGetPendingUserNotificationsCountHandler(
"""Returns the number of pending notifications for the current user."""
result_type = ApiGetPendingUserNotificationsCountResult
+ proto_result_type = api_user_pb2.ApiGetPendingUserNotificationsCountResult
- def Handle(self, args, context=None):
+ def Handle(
+ self,
+ args: Optional[None] = None,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> api_user_pb2.ApiGetPendingUserNotificationsCountResult:
"""Fetches the pending notification count."""
- ns = list(
+ assert context is not None
+
+ user_notifications = list(
data_store.REL_DB.ReadUserNotifications(
context.username,
- state=rdf_objects.UserNotification.State.STATE_PENDING,
+ state=objects_pb2.UserNotification.State.STATE_PENDING,
)
)
- return ApiGetPendingUserNotificationsCountResult(count=len(ns))
+ return api_user_pb2.ApiGetPendingUserNotificationsCountResult(
+ count=len(user_notifications)
+ )
class ApiListPendingUserNotificationsArgs(rdf_structs.RDFProtoStruct):
@@ -1451,29 +1639,42 @@ class ApiListPendingUserNotificationsHandler(
args_type = ApiListPendingUserNotificationsArgs
result_type = ApiListPendingUserNotificationsResult
-
- def Handle(self, args, context=None):
+ proto_args_type = api_user_pb2.ApiListPendingUserNotificationsArgs
+ proto_result_type = api_user_pb2.ApiListPendingUserNotificationsResult
+
+ def Handle(
+ self,
+ args: api_user_pb2.ApiListPendingUserNotificationsArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> api_user_pb2.ApiListPendingUserNotificationsResult:
"""Fetches the pending notifications."""
- ns = data_store.REL_DB.ReadUserNotifications(
+ assert context is not None
+
+ user_notifications = data_store.REL_DB.ReadUserNotifications(
context.username,
- state=rdf_objects.UserNotification.State.STATE_PENDING,
- timerange=(args.timestamp, None),
+ state=objects_pb2.UserNotification.State.STATE_PENDING,
+ timerange=(
+ rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(args.timestamp),
+ None,
+ ),
)
- ns = [mig_objects.ToRDFUserNotification(n) for n in ns]
# TODO(user): Remove this, so that the order is reversed. This will
# be an API-breaking change.
- ns = sorted(ns, key=lambda x: x.timestamp)
+ user_notifications = sorted(user_notifications, key=lambda x: x.timestamp)
# Make sure that only notifications with timestamp > args.timestamp
# are returned.
# Semantics of the API call (strict >) differs slightly from the
# semantics of the db.ReadUserNotifications call (inclusive >=).
- if ns and ns[0].timestamp == args.timestamp:
- ns.pop(0)
-
- return ApiListPendingUserNotificationsResult(
- items=[ApiNotification().InitFromUserNotification(n) for n in ns]
+ if user_notifications and user_notifications[0].timestamp == args.timestamp:
+ user_notifications.pop(0)
+
+ return api_user_pb2.ApiListPendingUserNotificationsResult(
+ items=[
+ InitApiNotificationFromUserNotification(n)
+ for n in user_notifications
+ ]
)
@@ -1490,12 +1691,19 @@ class ApiDeletePendingUserNotificationHandler(
"""Removes the pending notification with the given timestamp."""
args_type = ApiDeletePendingUserNotificationArgs
+ proto_args_type = api_user_pb2.ApiDeletePendingUserNotificationArgs
- def Handle(self, args, context=None):
+ def Handle(
+ self,
+ args: api_user_pb2.ApiDeletePendingUserNotificationArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> None:
"""Deletes the notification from the pending notifications."""
+ assert context is not None
+
data_store.REL_DB.UpdateUserNotifications(
context.username,
- [args.timestamp],
+ [rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(args.timestamp)],
state=objects_pb2.UserNotification.State.STATE_NOT_PENDING,
)
@@ -1518,22 +1726,29 @@ class ApiListAndResetUserNotificationsHandler(
args_type = ApiListAndResetUserNotificationsArgs
result_type = ApiListAndResetUserNotificationsResult
-
- def Handle(self, args, context=None):
+ proto_args_type = api_user_pb2.ApiListAndResetUserNotificationsArgs
+ proto_result_type = api_user_pb2.ApiListAndResetUserNotificationsResult
+
+ def Handle(
+ self,
+ args: api_user_pb2.ApiListAndResetUserNotificationsArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> api_user_pb2.ApiListAndResetUserNotificationsResult:
"""Fetches the user notifications."""
+ assert context is not None
+
back_timestamp = max(
rdfvalue.RDFDatetime.Now()
- rdfvalue.Duration.From(2 * 52, rdfvalue.WEEKS),
data_store.REL_DB.MinTimestamp(),
)
- ns = data_store.REL_DB.ReadUserNotifications(
+ user_notifications = data_store.REL_DB.ReadUserNotifications(
context.username, timerange=(back_timestamp, None)
)
- ns = [mig_objects.ToRDFUserNotification(n) for n in ns]
pending_timestamps = [
- n.timestamp
- for n in ns
+ rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(n.timestamp)
+ for n in user_notifications
if n.state == objects_pb2.UserNotification.State.STATE_PENDING
]
data_store.REL_DB.UpdateUserNotifications(
@@ -1542,25 +1757,32 @@ def Handle(self, args, context=None):
state=objects_pb2.UserNotification.State.STATE_NOT_PENDING,
)
- total_count = len(ns)
+ total_count = len(user_notifications)
if args.filter:
- ns = [n for n in ns if args.filter.lower() in n.message.lower()]
+ user_notifications = [
+ n
+ for n in user_notifications
+ if args.filter.lower() in n.message.lower()
+ ]
- if not args.count:
+ if not args.HasField("count"):
args.count = 50
start = args.offset
end = args.offset + args.count
api_notifications = []
-
- for n in ns[start:end]:
+ for user_notification in user_notifications[start:end]:
try:
- api_notifications.append(ApiNotification().InitFromUserNotification(n))
+ api_notifications.append(
+ InitApiNotificationFromUserNotification(user_notification)
+ )
except ValueError as e:
- logging.error("Unable to convert notification %s: %s", n, e)
+ logging.exception(
+ "Unable to convert notification %s: %s", user_notification, e
+ )
- return ApiListAndResetUserNotificationsResult(
+ return api_user_pb2.ApiListAndResetUserNotificationsResult(
items=api_notifications, total_count=total_count
)
@@ -1580,17 +1802,18 @@ class ApiListApproverSuggestionsResult(rdf_structs.RDFProtoStruct):
rdf_deps = [ApproverSuggestion]
-def _GetAllUsernames():
+def _GetAllUsernames() -> Sequence[str]:
return sorted(user.username for user in data_store.REL_DB.ReadGRRUsers())
-def _GetMostRequestedUsernames(context):
- proto_requests = data_store.REL_DB.ReadApprovalRequests(
+def _GetMostRequestedUsernames(
+ context: api_call_context.ApiCallContext,
+) -> Sequence[str]:
+ requests = data_store.REL_DB.ReadApprovalRequests(
context.username,
objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT,
include_expired=True,
)
- requests = [mig_objects.ToRDFApprovalRequest(r) for r in proto_requests]
not_users = collection.Flatten(req.notified_users for req in requests)
user_counts = collections.Counter(not_users)
return [username for (username, _) in user_counts.most_common()]
@@ -1601,8 +1824,16 @@ class ApiListApproverSuggestionsHandler(api_call_handler_base.ApiCallHandler):
args_type = ApiListApproverSuggestionsArgs
result_type = ApiListApproverSuggestionsResult
+ proto_args_type = api_user_pb2.ApiListApproverSuggestionsArgs
+ proto_result_type = api_user_pb2.ApiListApproverSuggestionsResult
+
+ def Handle(
+ self,
+ args: api_user_pb2.ApiListApproverSuggestionsArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> api_user_pb2.ApiListApproverSuggestionsResult:
+ assert context is not None
- def Handle(self, args, context=None):
all_usernames = _GetAllUsernames()
all_usernames = sorted(set(all_usernames) - access_control.SYSTEM_USERS)
usernames = []
@@ -1631,5 +1862,30 @@ def Handle(self, args, context=None):
except ValueError:
pass
- suggestions = [ApproverSuggestion(username=u) for u in usernames]
- return ApiListApproverSuggestionsResult(suggestions=suggestions)
+ return api_user_pb2.ApiListApproverSuggestionsResult(
+ suggestions=[
+ api_user_pb2.ApiListApproverSuggestionsResult.ApproverSuggestion(
+ username=u
+ )
+ for u in usernames
+ ]
+ )
+
+
+# Copy of migration functions to avoid circular dependency.
+def ToRDFApiClientApproval(
+ proto: api_user_pb2.ApiClientApproval,
+) -> ApiClientApproval:
+ return ApiClientApproval.FromSerializedBytes(proto.SerializeToString())
+
+
+def ToRDFApiHuntApproval(
+ proto: api_user_pb2.ApiHuntApproval,
+) -> ApiHuntApproval:
+ return ApiHuntApproval.FromSerializedBytes(proto.SerializeToString())
+
+
+def ToRDFApiCronJobApproval(
+ proto: api_user_pb2.ApiCronJobApproval,
+) -> ApiCronJobApproval:
+ return ApiCronJobApproval.FromSerializedBytes(proto.SerializeToString())
diff --git a/grr/server/grr_response_server/gui/api_plugins/user_test.py b/grr/server/grr_response_server/gui/api_plugins/user_test.py
index 1081929945..4964672aaf 100644
--- a/grr/server/grr_response_server/gui/api_plugins/user_test.py
+++ b/grr/server/grr_response_server/gui/api_plugins/user_test.py
@@ -1,6 +1,7 @@
#!/usr/bin/env python
"""This module contains tests for user API handlers."""
+from typing import Optional
from unittest import mock
from absl import app
@@ -8,10 +9,10 @@
from google.protobuf import any_pb2
from grr_response_core import config
from grr_response_core.lib import rdfvalue
-from grr_response_core.lib.rdfvalues import client as rdf_client
-from grr_response_core.lib.rdfvalues import flows as rdf_flows
from grr_response_proto import flows_pb2
+from grr_response_proto import objects_pb2
from grr_response_proto import user_pb2
+from grr_response_proto.api import user_pb2 as api_user_pb2
from grr_response_server import access_control
from grr_response_server import cronjobs
from grr_response_server import data_store
@@ -45,135 +46,162 @@ def setUp(self):
super().setUp()
self.client_id = self.SetupClient(0)
- def InitFromObj_(self, notification_type, reference, message=None):
+ def InitFromObj_(
+ self,
+ notification_type: objects_pb2.UserNotification.Type,
+ reference: Optional[objects_pb2.ObjectReference] = None,
+ message: Optional[str] = None,
+ ) -> api_user_pb2.ApiNotification:
self.CreateUser(self.context.username)
+ rdf_reference = None
+ if reference is not None:
+ rdf_reference = mig_objects.ToRDFObjectReference(reference)
notification.Notify(
- self.context.username, notification_type, message or "", reference
+ self.context.username, notification_type, message or "", rdf_reference
)
- ns = self.GetUserNotifications(self.context.username)
+ notifications = self.GetUserNotifications(self.context.username)
+ notifications = [
+ mig_objects.ToProtoUserNotification(n) for n in notifications
+ ]
# Treat the notification as an object coming from REL_DB.
- return user_plugin.ApiNotification().InitFromUserNotification(ns[0])
+ return user_plugin.InitApiNotificationFromUserNotification(notifications[0])
def testDiscoveryNotificationIsParsedCorrectly(self):
n = self.InitFromObj_(
- rdf_objects.UserNotification.Type.TYPE_CLIENT_INTERROGATED,
- rdf_objects.ObjectReference(
- reference_type=rdf_objects.ObjectReference.Type.CLIENT,
- client=rdf_objects.ClientReference(client_id=self.client_id),
+ objects_pb2.UserNotification.Type.TYPE_CLIENT_INTERROGATED,
+ objects_pb2.ObjectReference(
+ reference_type=objects_pb2.ObjectReference.Type.CLIENT,
+ client=objects_pb2.ClientReference(client_id=self.client_id),
),
)
- self.assertEqual(n.reference.type, "CLIENT")
- self.assertEqual(n.reference.client.client_id.ToString(), self.client_id)
+ self.assertEqual(
+ n.reference.type, api_user_pb2.ApiNotificationReference.Type.CLIENT
+ )
+ self.assertEqual(n.reference.client.client_id, self.client_id)
def testClientApprovalGrantedNotificationIsParsedCorrectly(self):
n = self.InitFromObj_(
- rdf_objects.UserNotification.Type.TYPE_CLIENT_APPROVAL_GRANTED,
- rdf_objects.ObjectReference(
- reference_type=rdf_objects.ObjectReference.Type.CLIENT,
- client=rdf_objects.ClientReference(client_id=self.client_id),
+ objects_pb2.UserNotification.Type.TYPE_CLIENT_APPROVAL_GRANTED,
+ objects_pb2.ObjectReference(
+ reference_type=objects_pb2.ObjectReference.Type.CLIENT,
+ client=objects_pb2.ClientReference(client_id=self.client_id),
),
)
- self.assertEqual(n.reference.type, "CLIENT")
- self.assertEqual(n.reference.client.client_id.ToString(), self.client_id)
+ self.assertEqual(
+ n.reference.type, api_user_pb2.ApiNotificationReference.Type.CLIENT
+ )
+ self.assertEqual(n.reference.client.client_id, self.client_id)
def testHuntNotificationIsParsedCorrectly(self):
n = self.InitFromObj_(
- rdf_objects.UserNotification.Type.TYPE_HUNT_STOPPED,
- rdf_objects.ObjectReference(
- reference_type=rdf_objects.ObjectReference.Type.HUNT,
- hunt=rdf_objects.HuntReference(hunt_id="H:123456"),
+ objects_pb2.UserNotification.Type.TYPE_HUNT_STOPPED,
+ objects_pb2.ObjectReference(
+ reference_type=objects_pb2.ObjectReference.Type.HUNT,
+ hunt=objects_pb2.HuntReference(hunt_id="H:123456"),
),
)
- self.assertEqual(n.reference.type, "HUNT")
+ self.assertEqual(
+ n.reference.type, api_user_pb2.ApiNotificationReference.Type.HUNT
+ )
self.assertEqual(n.reference.hunt.hunt_id, "H:123456")
def testCronNotificationIsParsedCorrectly(self):
n = self.InitFromObj_(
- rdf_objects.UserNotification.Type.TYPE_CRON_JOB_APPROVAL_GRANTED,
- rdf_objects.ObjectReference(
- reference_type=rdf_objects.ObjectReference.Type.CRON_JOB,
- cron_job=rdf_objects.CronJobReference(cron_job_id="FooBar"),
+ objects_pb2.UserNotification.Type.TYPE_CRON_JOB_APPROVAL_GRANTED,
+ objects_pb2.ObjectReference(
+ reference_type=objects_pb2.ObjectReference.Type.CRON_JOB,
+ cron_job=objects_pb2.CronJobReference(cron_job_id="FooBar"),
),
)
- self.assertEqual(n.reference.type, "CRON")
+ self.assertEqual(
+ n.reference.type, api_user_pb2.ApiNotificationReference.Type.CRON
+ )
self.assertEqual(n.reference.cron.cron_job_id, "FooBar")
def testFlowSuccessNotificationIsParsedCorrectly(self):
n = self.InitFromObj_(
- rdf_objects.UserNotification.Type.TYPE_FLOW_RUN_COMPLETED,
- rdf_objects.ObjectReference(
- reference_type=rdf_objects.ObjectReference.Type.FLOW,
- flow=rdf_objects.FlowReference(
+ objects_pb2.UserNotification.Type.TYPE_FLOW_RUN_COMPLETED,
+ objects_pb2.ObjectReference(
+ reference_type=objects_pb2.ObjectReference.Type.FLOW,
+ flow=objects_pb2.FlowReference(
client_id=self.client_id, flow_id="F:123456"
),
),
)
- self.assertEqual(n.reference.type, "FLOW")
- self.assertEqual(n.reference.flow.client_id.ToString(), self.client_id)
+ self.assertEqual(
+ n.reference.type, api_user_pb2.ApiNotificationReference.Type.FLOW
+ )
+ self.assertEqual(n.reference.flow.client_id, self.client_id)
self.assertEqual(n.reference.flow.flow_id, "F:123456")
def testFlowFailureNotificationIsParsedCorrectly(self):
n = self.InitFromObj_(
- rdf_objects.UserNotification.Type.TYPE_FLOW_RUN_FAILED,
- rdf_objects.ObjectReference(
- reference_type=rdf_objects.ObjectReference.Type.FLOW,
- flow=rdf_objects.FlowReference(
+ objects_pb2.UserNotification.Type.TYPE_FLOW_RUN_FAILED,
+ objects_pb2.ObjectReference(
+ reference_type=objects_pb2.ObjectReference.Type.FLOW,
+ flow=objects_pb2.FlowReference(
client_id=self.client_id, flow_id="F:123456"
),
),
)
- self.assertEqual(n.reference.type, "FLOW")
- self.assertEqual(n.reference.flow.client_id.ToString(), self.client_id)
+ self.assertEqual(
+ n.reference.type, api_user_pb2.ApiNotificationReference.Type.FLOW
+ )
+ self.assertEqual(n.reference.flow.client_id, self.client_id)
self.assertEqual(n.reference.flow.flow_id, "F:123456")
def testVfsNotificationIsParsedCorrectly(self):
n = self.InitFromObj_(
- rdf_objects.UserNotification.Type.TYPE_VFS_FILE_COLLECTED,
- rdf_objects.ObjectReference(
- reference_type=rdf_objects.ObjectReference.Type.VFS_FILE,
- vfs_file=rdf_objects.VfsFileReference(
+ objects_pb2.UserNotification.Type.TYPE_VFS_FILE_COLLECTED,
+ objects_pb2.ObjectReference(
+ reference_type=objects_pb2.ObjectReference.Type.VFS_FILE,
+ vfs_file=objects_pb2.VfsFileReference(
client_id=self.client_id,
- path_type=rdf_objects.PathInfo.PathType.OS,
+ path_type=objects_pb2.PathInfo.PathType.OS,
path_components=["foo", "bar"],
),
),
)
- self.assertEqual(n.reference.type, "VFS")
- self.assertEqual(n.reference.vfs.client_id.ToString(), self.client_id)
+ self.assertEqual(
+ n.reference.type, api_user_pb2.ApiNotificationReference.Type.VFS
+ )
+ self.assertEqual(n.reference.vfs.client_id, self.client_id)
self.assertEqual(n.reference.vfs.vfs_path, "fs/os/foo/bar")
def testVfsNotificationWithInvalidReferenceIsParsedDefensively(self):
n = self.InitFromObj_(
- rdf_objects.UserNotification.Type.TYPE_VFS_FILE_COLLECTED,
- rdf_objects.ObjectReference(
- reference_type=rdf_objects.ObjectReference.Type.VFS_FILE,
- vfs_file=rdf_objects.VfsFileReference(
+ objects_pb2.UserNotification.Type.TYPE_VFS_FILE_COLLECTED,
+ objects_pb2.ObjectReference(
+ reference_type=objects_pb2.ObjectReference.Type.VFS_FILE,
+ vfs_file=objects_pb2.VfsFileReference(
client_id=self.client_id,
# UNSET path type is an invalid value here:
# it make it impossible to find the file.
- path_type=rdf_objects.PathInfo.PathType.UNSET,
+ path_type=objects_pb2.PathInfo.PathType.UNSET,
path_components=["foo", "bar"],
),
),
)
- self.assertEqual(n.reference.type, "UNSET")
+ self.assertEqual(
+ n.reference.type, api_user_pb2.ApiNotificationReference.Type.UNSET
+ )
def testClientApprovalNotificationIsParsedCorrectly(self):
n = self.InitFromObj_(
- rdf_objects.UserNotification.Type.TYPE_CLIENT_APPROVAL_REQUESTED,
- rdf_objects.ObjectReference(
- reference_type=rdf_objects.ObjectReference.Type.APPROVAL_REQUEST,
- approval_request=rdf_objects.ApprovalRequestReference(
- approval_type=rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT,
+ objects_pb2.UserNotification.Type.TYPE_CLIENT_APPROVAL_REQUESTED,
+ objects_pb2.ObjectReference(
+ reference_type=objects_pb2.ObjectReference.Type.APPROVAL_REQUEST,
+ approval_request=objects_pb2.ApprovalRequestReference(
+ approval_type=objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT,
approval_id="foo-bar",
subject_id=self.client_id,
requestor_username=self.context.username,
@@ -181,20 +209,23 @@ def testClientApprovalNotificationIsParsedCorrectly(self):
),
)
- self.assertEqual(n.reference.type, "CLIENT_APPROVAL")
+ self.assertEqual(
+ n.reference.type,
+ api_user_pb2.ApiNotificationReference.Type.CLIENT_APPROVAL,
+ )
client_approval = n.reference.client_approval
- self.assertEqual(client_approval.client_id.ToString(), self.client_id)
+ self.assertEqual(client_approval.client_id, self.client_id)
self.assertEqual(client_approval.username, self.context.username)
self.assertEqual(client_approval.approval_id, "foo-bar")
def testHuntApprovalNotificationIsParsedCorrectly(self):
n = self.InitFromObj_(
- rdf_objects.UserNotification.Type.TYPE_HUNT_APPROVAL_REQUESTED,
- rdf_objects.ObjectReference(
- reference_type=rdf_objects.ObjectReference.Type.APPROVAL_REQUEST,
- approval_request=rdf_objects.ApprovalRequestReference(
- approval_type=rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_HUNT,
+ objects_pb2.UserNotification.Type.TYPE_HUNT_APPROVAL_REQUESTED,
+ objects_pb2.ObjectReference(
+ reference_type=objects_pb2.ObjectReference.Type.APPROVAL_REQUEST,
+ approval_request=objects_pb2.ApprovalRequestReference(
+ approval_type=objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_HUNT,
approval_id="foo-bar",
subject_id="H:123456",
requestor_username=self.context.username,
@@ -202,18 +233,21 @@ def testHuntApprovalNotificationIsParsedCorrectly(self):
),
)
- self.assertEqual(n.reference.type, "HUNT_APPROVAL")
+ self.assertEqual(
+ n.reference.type,
+ api_user_pb2.ApiNotificationReference.Type.HUNT_APPROVAL,
+ )
self.assertEqual(n.reference.hunt_approval.hunt_id, "H:123456")
self.assertEqual(n.reference.hunt_approval.username, self.context.username)
self.assertEqual(n.reference.hunt_approval.approval_id, "foo-bar")
def testCronJobApprovalNotificationIsParsedCorrectly(self):
n = self.InitFromObj_(
- rdf_objects.UserNotification.Type.TYPE_CRON_JOB_APPROVAL_REQUESTED,
- rdf_objects.ObjectReference(
- reference_type=rdf_objects.ObjectReference.Type.APPROVAL_REQUEST,
- approval_request=rdf_objects.ApprovalRequestReference(
- approval_type=rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CRON_JOB,
+ objects_pb2.UserNotification.Type.TYPE_CRON_JOB_APPROVAL_REQUESTED,
+ objects_pb2.ObjectReference(
+ reference_type=objects_pb2.ObjectReference.Type.APPROVAL_REQUEST,
+ approval_request=objects_pb2.ApprovalRequestReference(
+ approval_type=objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CRON_JOB,
approval_id="foo-bar",
subject_id="FooBar",
requestor_username=self.context.username,
@@ -221,7 +255,10 @@ def testCronJobApprovalNotificationIsParsedCorrectly(self):
),
)
- self.assertEqual(n.reference.type, "CRON_JOB_APPROVAL")
+ self.assertEqual(
+ n.reference.type,
+ api_user_pb2.ApiNotificationReference.Type.CRON_JOB_APPROVAL,
+ )
self.assertEqual(n.reference.cron_job_approval.cron_job_id, "FooBar")
self.assertEqual(
n.reference.cron_job_approval.username, self.context.username
@@ -230,49 +267,37 @@ def testCronJobApprovalNotificationIsParsedCorrectly(self):
def testFileArchiveGenerationFailedNotificationIsParsedAsUnknownOrUnset(self):
n = self.InitFromObj_(
- rdf_objects.UserNotification.Type.TYPE_FILE_ARCHIVE_GENERATION_FAILED,
+ objects_pb2.UserNotification.Type.TYPE_FILE_ARCHIVE_GENERATION_FAILED,
None,
message="blah",
)
- self.assertIn(n.reference.type, ["UNSET", "UNKNOWN"])
+ self.assertIn(
+ n.reference.type,
+ [
+ api_user_pb2.ApiNotificationReference.Type.UNSET,
+ api_user_pb2.ApiNotificationReference.Type.UNKNOWN,
+ ],
+ )
self.assertEqual(n.message, "blah")
def testVfsListDirectoryCompletedIsParsedCorrectly(self):
n = self.InitFromObj_(
- rdf_objects.UserNotification.Type.TYPE_VFS_LIST_DIRECTORY_COMPLETED,
- rdf_objects.ObjectReference(
- reference_type=rdf_objects.ObjectReference.Type.VFS_FILE,
- vfs_file=rdf_objects.VfsFileReference(
+ objects_pb2.UserNotification.Type.TYPE_VFS_LIST_DIRECTORY_COMPLETED,
+ objects_pb2.ObjectReference(
+ reference_type=objects_pb2.ObjectReference.Type.VFS_FILE,
+ vfs_file=objects_pb2.VfsFileReference(
client_id=self.client_id,
- path_type=rdf_objects.PathInfo.PathType.OS,
+ path_type=objects_pb2.PathInfo.PathType.OS,
path_components=["foo", "bar"],
),
),
)
- self.assertEqual(n.reference.type, "VFS")
- self.assertEqual(n.reference.vfs.client_id.ToString(), self.client_id)
- self.assertEqual(n.reference.vfs.vfs_path, "fs/os/foo/bar")
-
- def testUnknownNotificationIsParsedCorrectly(self):
- urn = rdf_client.ClientURN(self.client_id).Add("foo/bar")
- n = user_plugin.ApiNotification().InitFromNotification(
- rdf_flows.Notification(type="ViewObject", subject=urn)
- )
- self.assertEqual(n.reference.type, "UNKNOWN")
- self.assertEqual(n.reference.unknown.subject_urn, urn)
-
- n = user_plugin.ApiNotification().InitFromNotification(
- rdf_flows.Notification(type="FlowStatus", subject="foo/bar")
- )
- self.assertEqual(n.reference.type, "UNKNOWN")
- self.assertEqual(n.reference.unknown.subject_urn, "foo/bar")
-
- def testNotificationWithoutSubject(self):
- n = user_plugin.ApiNotification().InitFromNotification(
- rdf_flows.Notification(type="ViewObject")
+ self.assertEqual(
+ n.reference.type, api_user_pb2.ApiNotificationReference.Type.VFS
)
- self.assertEqual(n.reference.type, "UNKNOWN")
+ self.assertEqual(n.reference.vfs.client_id, self.client_id)
+ self.assertEqual(n.reference.vfs.vfs_path, "fs/os/foo/bar")
class ApiCreateApprovalHandlerTestMixin(
@@ -302,7 +327,7 @@ def testApproversFromArgsAreIgnored(self):
# It shouldn't be possible to specify list of approvers when creating
# an approval. List of approvers contains names of GRR users who
# approved the approval.
- self.args.approval.approvers = [self.context.username, "approver"]
+ self.args.approval.approvers.extend([self.context.username, "approver"])
approval_id = self.handler.Handle(self.args, context=self.context).id
approval_obj = self.ReadApproval(approval_id)
@@ -364,7 +389,7 @@ def testDoesNotStartScheduledFlowsIfGrantedApprovalIsNotValid(self):
requestor=self.context.username,
)
- args = user_plugin.ApiGetClientApprovalArgs(
+ args = api_user_pb2.ApiGetClientApprovalArgs(
client_id=self.client_id,
approval_id=approval_id,
username=self.context.username,
@@ -384,7 +409,7 @@ def testStartsScheduledFlowsIfGrantedApprovalIsValid(self):
requestor=self.context.username,
)
- args = user_plugin.ApiGetClientApprovalArgs(
+ args = api_user_pb2.ApiGetClientApprovalArgs(
client_id=self.client_id,
approval_id=approval_id,
username=self.context.username,
@@ -419,7 +444,7 @@ def testErrorDuringStartFlowDoesNotBubbleUpToApprovalApiCall(self):
requestor=self.context.username,
)
- args = user_plugin.ApiGetClientApprovalArgs(
+ args = api_user_pb2.ApiGetClientApprovalArgs(
client_id=self.client_id,
approval_id=approval_id,
username=self.context.username,
@@ -450,14 +475,14 @@ def testRendersRequestedClientApproval(self):
email_cc_address="test@example.com",
)
- args = user_plugin.ApiGetClientApprovalArgs(
+ args = api_user_pb2.ApiGetClientApprovalArgs(
client_id=self.client_id,
approval_id=approval_id,
username=self.context.username,
)
result = self.handler.Handle(args, context=self.context)
- self.assertEqual(result.subject.client_id.ToString(), self.client_id)
+ self.assertEqual(result.subject.client_id, self.client_id)
self.assertEqual(result.reason, "blah")
self.assertEqual(result.is_valid, False)
self.assertEqual(
@@ -479,7 +504,7 @@ def testIncludesApproversInResultWhenApprovalIsGranted(self):
requestor=self.context.username,
)
- args = user_plugin.ApiGetClientApprovalArgs(
+ args = api_user_pb2.ApiGetClientApprovalArgs(
client_id=self.client_id,
approval_id=approval_id,
username=self.context.username,
@@ -490,7 +515,7 @@ def testIncludesApproversInResultWhenApprovalIsGranted(self):
self.assertCountEqual(result.approvers, [self.context.username, "approver"])
def testRaisesWhenApprovalIsNotFound(self):
- args = user_plugin.ApiGetClientApprovalArgs(
+ args = api_user_pb2.ApiGetClientApprovalArgs(
client_id=self.client_id,
approval_id="approval:112233",
username=self.context.username,
@@ -520,12 +545,12 @@ def setUp(self):
self.handler = user_plugin.ApiCreateClientApprovalHandler()
- self.args = user_plugin.ApiCreateClientApprovalArgs(
+ self.args = api_user_pb2.ApiCreateClientApprovalArgs(
client_id=self.client_id
)
self.args.approval.reason = "Running tests"
- self.args.approval.notified_users = ["approver"]
- self.args.approval.email_cc_addresses = ["test@example.com"]
+ self.args.approval.notified_users.append("approver")
+ self.args.approval.email_cc_addresses.append("test@example.com")
def testSendsEmailWithApprovalInformation(self):
with mock.patch.object(email_alerts.EMAIL_ALERTER, "SendEmail") as send_fn:
@@ -569,7 +594,7 @@ def testDefaultExpiration(self):
config.CONFIG["ACL.token_expiry"] * 1000000
) + oneday_s.AsMicrosecondsSinceEpoch()
- self.assertEqual(approval.expiration_time_us, twentyninedays_us)
+ self.assertEqual(approval.expiration_time_us, int(twentyninedays_us))
def testCorrectNonDefaultExpiration(self):
"""Tests that a custom expiration is correctly applied."""
@@ -580,12 +605,11 @@ def testCorrectNonDefaultExpiration(self):
mock_now.return_value = ( # 'Now' is 1 day past epoch
rdfvalue.RDFDatetime.FromSecondsSinceEpoch(24 * 60 * 60)
)
-
onetwentydays_us = 120 * 24 * 60 * 60 * 1000000
- self.args.approval.expiration_time_us = (
+
+ self.args.approval.expiration_time_us = int(
rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(onetwentydays_us)
)
-
approval = self.handler.Handle(self.args, self.context)
self.assertEqual(approval.expiration_time_us, onetwentydays_us)
@@ -601,9 +625,7 @@ def testNonDefaultExpirationInPast(self):
)
onehour_us = 60 * 60 * 1000000
- self.args.approval.expiration_time_us = (
- rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(onehour_us)
- )
+ self.args.approval.expiration_time_us = onehour_us
with self.assertRaisesRegex(
ValueError,
@@ -622,9 +644,7 @@ def testNonDefaultExpirationTooLong(self):
)
fourhundreddays_us = 400 * 24 * 60 * 60 * 1000000
- self.args.approval.expiration_time_us = (
- rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(fourhundreddays_us)
- )
+ self.args.approval.expiration_time_us = fourhundreddays_us
with self.assertRaisesRegex(
ValueError,
@@ -655,7 +675,7 @@ def _RequestClientApprovals(self):
def testRendersRequestedClientApprovals(self):
self._RequestClientApprovals()
- args = user_plugin.ApiListClientApprovalsArgs()
+ args = api_user_pb2.ApiListClientApprovalsArgs()
result = self.handler.Handle(args, context=self.context)
# All approvals should be returned.
@@ -667,18 +687,18 @@ def testFiltersApprovalsByClientId(self):
self._RequestClientApprovals()
# Get approvals for a specific client. There should be exactly one.
- args = user_plugin.ApiListClientApprovalsArgs(client_id=client_id)
+ args = api_user_pb2.ApiListClientApprovalsArgs(client_id=client_id)
result = self.handler.Handle(args, context=self.context)
self.assertLen(result.items, 1)
- self.assertEqual(result.items[0].subject.client_id.ToString(), client_id)
+ self.assertEqual(result.items[0].subject.client_id, client_id)
def testFiltersApprovalsByInvalidState(self):
approval_ids = self._RequestClientApprovals()
# We only requested approvals so far, so all of them should be invalid.
- args = user_plugin.ApiListClientApprovalsArgs(
- state=user_plugin.ApiListClientApprovalsArgs.State.INVALID
+ args = api_user_pb2.ApiListClientApprovalsArgs(
+ state=api_user_pb2.ApiListClientApprovalsArgs.State.INVALID
)
result = self.handler.Handle(args, context=self.context)
@@ -697,8 +717,8 @@ def testFiltersApprovalsByValidState(self):
approval_ids = self._RequestClientApprovals()
# We only requested approvals so far, so none of them is valid.
- args = user_plugin.ApiListClientApprovalsArgs(
- state=user_plugin.ApiListClientApprovalsArgs.State.VALID
+ args = api_user_pb2.ApiListClientApprovalsArgs(
+ state=api_user_pb2.ApiListClientApprovalsArgs.State.VALID
)
result = self.handler.Handle(args, context=self.context)
@@ -713,9 +733,7 @@ def testFiltersApprovalsByValidState(self):
)
result = self.handler.Handle(args, context=self.context)
self.assertLen(result.items, 1)
- self.assertEqual(
- result.items[0].subject.client_id.ToString(), self.client_ids[0]
- )
+ self.assertEqual(result.items[0].subject.client_id, self.client_ids[0])
def testFiltersApprovalsByClientIdAndState(self):
client_id = self.client_ids[0]
@@ -727,16 +745,16 @@ def testFiltersApprovalsByClientIdAndState(self):
client_id, requestor=self.context.username, approval_id=approval_ids[0]
)
- args = user_plugin.ApiListClientApprovalsArgs(
+ args = api_user_pb2.ApiListClientApprovalsArgs(
client_id=client_id,
- state=user_plugin.ApiListClientApprovalsArgs.State.VALID,
+ state=api_user_pb2.ApiListClientApprovalsArgs.State.VALID,
)
result = self.handler.Handle(args, context=self.context)
# We have a valid approval for the requested client.
self.assertLen(result.items, 1)
- args.state = user_plugin.ApiListClientApprovalsArgs.State.INVALID
+ args.state = api_user_pb2.ApiListClientApprovalsArgs.State.INVALID
result = self.handler.Handle(args, context=self.context)
# However, we do not have any invalid approvals for the client.
@@ -750,7 +768,7 @@ def testFilterConsidersOffsetAndCount(self):
with test_lib.FakeTime(42 + i):
self.RequestClientApproval(client_id, reason="Request reason %d" % i)
- args = user_plugin.ApiListClientApprovalsArgs(
+ args = api_user_pb2.ApiListClientApprovalsArgs(
client_id=client_id, offset=0, count=5
)
result = self.handler.Handle(args, context=self.context)
@@ -762,7 +780,9 @@ def testFilterConsidersOffsetAndCount(self):
self.assertEqual(item.reason, "Request reason %d" % i)
# When no count is specified, take all items from offset to the end.
- args = user_plugin.ApiListClientApprovalsArgs(client_id=client_id, offset=7)
+ args = api_user_pb2.ApiListClientApprovalsArgs(
+ client_id=client_id, offset=7
+ )
result = self.handler.Handle(args, context=self.context)
self.assertLen(result.items, 3)
@@ -792,10 +812,10 @@ def setUp(self):
self.handler = user_plugin.ApiCreateHuntApprovalHandler()
- self.args = user_plugin.ApiCreateHuntApprovalArgs(hunt_id=hunt_id)
+ self.args = api_user_pb2.ApiCreateHuntApprovalArgs(hunt_id=hunt_id)
self.args.approval.reason = "Running tests"
- self.args.approval.notified_users = ["approver"]
- self.args.approval.email_cc_addresses = ["test@example.com"]
+ self.args.approval.notified_users.append("approver")
+ self.args.approval.email_cc_addresses.append("test@example.com")
class ApiListHuntApprovalsHandlerTest(
@@ -817,7 +837,7 @@ def testRendersRequestedHuntAppoval(self):
requestor=self.context.username,
)
- args = user_plugin.ApiListHuntApprovalsArgs()
+ args = api_user_pb2.ApiListHuntApprovalsArgs()
result = self.handler.Handle(args, context=self.context)
self.assertLen(result.items, 1)
@@ -850,10 +870,10 @@ def setUp(self):
self.handler = user_plugin.ApiCreateCronJobApprovalHandler()
- self.args = user_plugin.ApiCreateCronJobApprovalArgs(cron_job_id=cron_id)
+ self.args = api_user_pb2.ApiCreateCronJobApprovalArgs(cron_job_id=cron_id)
self.args.approval.reason = "Running tests"
- self.args.approval.notified_users = ["approver"]
- self.args.approval.email_cc_addresses = ["test@example.com"]
+ self.args.approval.notified_users.append("approver")
+ self.args.approval.email_cc_addresses.append("test@example.com")
class ApiListCronJobApprovalsHandlerTest(
@@ -881,7 +901,7 @@ def testRendersRequestedCronJobApproval(self):
requestor=self.context.username,
)
- args = user_plugin.ApiListCronJobApprovalsArgs()
+ args = api_user_pb2.ApiListCronJobApprovalsArgs()
result = self.handler.Handle(args, context=self.context)
self.assertLen(result.items, 1)
@@ -903,7 +923,7 @@ def testRendersSettingsForUserCorrespondingToContext(self):
result = self.handler.Handle(
None, context=api_call_context.ApiCallContext(username="foo")
)
- self.assertEqual(result.settings.mode, "ADVANCED")
+ self.assertEqual(result.settings.mode, user_pb2.GUISettings.UIMode.ADVANCED)
self.assertEqual(result.settings.canary_mode, True)
def testRendersTraitsPassedInConstructor(self):
@@ -913,7 +933,7 @@ def testRendersTraitsPassedInConstructor(self):
self.assertFalse(result.interface_traits.create_hunt_action_enabled)
handler = user_plugin.ApiGetOwnGrrUserHandler(
- interface_traits=user_plugin.ApiGrrUserInterfaceTraits(
+ interface_traits=api_user_pb2.ApiGrrUserInterfaceTraits(
create_hunt_action_enabled=True
)
)
@@ -931,21 +951,21 @@ def setUp(self):
self.handler = user_plugin.ApiUpdateGrrUserHandler()
def testRaisesIfUsernameSetInRequest(self):
- user = user_plugin.ApiGrrUser(username="foo")
+ user = api_user_pb2.ApiGrrUser(username="foo")
with self.assertRaises(ValueError):
self.handler.Handle(
user, context=api_call_context.ApiCallContext(username="foo")
)
- user = user_plugin.ApiGrrUser(username="bar")
+ user = api_user_pb2.ApiGrrUser(username="bar")
with self.assertRaises(ValueError):
self.handler.Handle(
user, context=api_call_context.ApiCallContext(username="foo")
)
def testRaisesIfTraitsSetInRequest(self):
- user = user_plugin.ApiGrrUser(
- interface_traits=user_plugin.ApiGrrUserInterfaceTraits()
+ user = api_user_pb2.ApiGrrUser(
+ interface_traits=api_user_pb2.ApiGrrUserInterfaceTraits()
)
with self.assertRaises(ValueError):
self.handler.Handle(
@@ -953,17 +973,18 @@ def testRaisesIfTraitsSetInRequest(self):
)
def testSetsSettingsForUserCorrespondingToToken(self):
- settings = user_plugin.GUISettings(mode="ADVANCED", canary_mode=True)
- user = user_plugin.ApiGrrUser(settings=settings)
+ settings = user_pb2.GUISettings(
+ mode=user_pb2.GUISettings.UIMode.ADVANCED, canary_mode=True
+ )
+ user = api_user_pb2.ApiGrrUser(settings=settings)
self.handler.Handle(
user, context=api_call_context.ApiCallContext(username="foo")
)
- proto_user = data_store.REL_DB.ReadGRRUser("foo")
- rdf_user = mig_objects.ToRDFGRRUser(proto_user)
- self.assertEqual(settings.mode, rdf_user.ui_mode)
- self.assertEqual(settings.canary_mode, rdf_user.canary_mode)
+ user = data_store.REL_DB.ReadGRRUser("foo")
+ self.assertEqual(settings.mode, user.ui_mode)
+ self.assertEqual(settings.canary_mode, user.canary_mode)
class ApiDeletePendingUserNotificationHandlerTest(
@@ -1012,17 +1033,19 @@ def setUp(self):
),
)
- def _GetNotifications(self):
+ def _GetNotifications(
+ self,
+ ) -> tuple[
+ list[objects_pb2.UserNotification], list[objects_pb2.UserNotification]
+ ]:
pending = data_store.REL_DB.ReadUserNotifications(
self.context.username,
- state=rdf_objects.UserNotification.State.STATE_PENDING,
+ state=objects_pb2.UserNotification.State.STATE_PENDING,
)
- pending = [mig_objects.ToRDFUserNotification(n) for n in pending]
shown = data_store.REL_DB.ReadUserNotifications(
self.context.username,
- state=rdf_objects.UserNotification.State.STATE_NOT_PENDING,
+ state=objects_pb2.UserNotification.State.STATE_NOT_PENDING,
)
- shown = [mig_objects.ToRDFUserNotification(n) for n in shown]
return pending, shown
def testDeletesFromPendingAndAddsToShown(self):
@@ -1032,8 +1055,8 @@ def testDeletesFromPendingAndAddsToShown(self):
self.assertEmpty(shown)
# Delete a pending notification.
- args = user_plugin.ApiDeletePendingUserNotificationArgs(
- timestamp=self.TIME_1
+ args = api_user_pb2.ApiDeletePendingUserNotificationArgs(
+ timestamp=int(self.TIME_1)
)
self.handler.Handle(args, context=self.context)
@@ -1042,7 +1065,7 @@ def testDeletesFromPendingAndAddsToShown(self):
self.assertLen(pending, 2)
self.assertLen(shown, 1)
self.assertIn("", shown[0].message)
- self.assertEqual(shown[0].timestamp, self.TIME_1)
+ self.assertEqual(shown[0].timestamp, int(self.TIME_1))
def testUnknownTimestampIsIgnored(self):
# Check that there are three pending notifications and no shown ones yet.
@@ -1052,8 +1075,8 @@ def testUnknownTimestampIsIgnored(self):
# A timestamp not matching any pending notifications does not change any of
# the collections.
- args = user_plugin.ApiDeletePendingUserNotificationArgs(
- timestamp=self.TIME_2
+ args = api_user_pb2.ApiDeletePendingUserNotificationArgs(
+ timestamp=int(self.TIME_2)
)
self.handler.Handle(args, context=self.context)
@@ -1077,7 +1100,7 @@ def setUp(self):
self.CreateUser("api_user_2")
def _query(self, username):
- args = user_plugin.ApiListApproverSuggestionsArgs(username_query=username)
+ args = api_user_pb2.ApiListApproverSuggestionsArgs(username_query=username)
return self.handler.Handle(args, context=self.context)
def testListsSingleSuggestions(self):
@@ -1154,7 +1177,7 @@ def testSendsEmailWithApprovalGrantInformation(self):
with mock.patch.object(email_alerts.EMAIL_ALERTER, "SendEmail") as send_fn:
self.handler.Handle(
- user_plugin.ApiGrantClientApprovalArgs(
+ api_user_pb2.ApiGrantClientApprovalArgs(
client_id=self.client_id,
approval_id=approval_id,
username="requestuser",
@@ -1198,7 +1221,7 @@ def testSendsEmailWithApprovalGrantInformation(self):
with mock.patch.object(email_alerts.EMAIL_ALERTER, "SendEmail") as send_fn:
self.handler.Handle(
- user_plugin.ApiGrantHuntApprovalArgs(
+ api_user_pb2.ApiGrantHuntApprovalArgs(
hunt_id=self.hunt_id,
approval_id=approval_id,
username="requestuser",
diff --git a/grr/server/grr_response_server/gui/api_plugins/vfs.py b/grr/server/grr_response_server/gui/api_plugins/vfs.py
index 24860f7b20..d14a197a39 100644
--- a/grr/server/grr_response_server/gui/api_plugins/vfs.py
+++ b/grr/server/grr_response_server/gui/api_plugins/vfs.py
@@ -7,7 +7,7 @@
import os
import re
import stat
-from typing import Collection, Dict, Iterable, Iterator, List, Optional, Set, Tuple
+from typing import Collection, Dict, Iterable, Iterator, List, Optional, Sequence, Set, Tuple
import zipfile
from grr_response_core import config
@@ -201,7 +201,9 @@ class ApiGetFileDetailsResult(rdf_structs.RDFProtoStruct):
]
-def _GenerateApiFileDetails(path_infos):
+def _GenerateApiFileDetails(
+ path_infos: Sequence[rdf_objects.PathInfo],
+) -> ApiAff4ObjectRepresentation:
"""Generate file details based on path infos history."""
type_attrs = []
@@ -287,8 +289,14 @@ class ApiGetFileDetailsHandler(api_call_handler_base.ApiCallHandler):
args_type = ApiGetFileDetailsArgs
result_type = ApiGetFileDetailsResult
+ proto_args_type = vfs_pb2.ApiGetFileDetailsArgs
+ proto_result_type = vfs_pb2.ApiGetFileDetailsResult
- def Handle(self, args, context=None):
+ def Handle(
+ self,
+ args: vfs_pb2.ApiGetFileDetailsArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> vfs_pb2.ApiGetFileDetailsResult:
ValidateVfsPath(args.file_path)
# Directories are not really "files" so they cannot be stored in the
@@ -307,58 +315,63 @@ def Handle(self, args, context=None):
"fs/tsk",
"fs/ntfs",
]:
- api_file = ApiFile(
+ api_file = vfs_pb2.ApiFile(
name=args.file_path,
path=args.file_path,
is_directory=True,
- details=_GenerateApiFileDetails([]),
+ details=ToProtoApiAff4ObjectRepresentation(
+ _GenerateApiFileDetails([])
+ ),
)
- return ApiGetFileDetailsResult(file=api_file)
+ return vfs_pb2.ApiGetFileDetailsResult(file=api_file)
path_type, components = rdf_objects.ParseCategorizedPath(args.file_path)
-
- client_id = str(args.client_id)
-
+ args_timestamp = None
+ if args.HasField("timestamp"):
+ args_timestamp = rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(
+ args.timestamp
+ )
try:
- proto_path_info = data_store.REL_DB.ReadPathInfo(
- client_id=client_id,
+ path_info = data_store.REL_DB.ReadPathInfo(
+ client_id=args.client_id,
path_type=path_type,
components=components,
- timestamp=args.timestamp,
+ timestamp=args_timestamp,
)
- except db.UnknownPathError:
+ except db.UnknownPathError as ex:
raise FileNotFoundError(
- client_id=client_id, path_type=path_type, components=components
- )
+ client_id=args.client_id, path_type=path_type, components=components
+ ) from ex
- path_info = None
- if proto_path_info is not None:
- path_info = mig_objects.ToRDFPathInfo(proto_path_info)
- last_collection_pi = file_store.GetLastCollectionPathInfo(
- db.ClientPath.FromPathInfo(client_id, path_info),
- max_timestamp=args.timestamp,
+ client_path = db.ClientPath.FromPathInfo(args.client_id, path_info)
+ last_collection_pi = (
+ data_store.REL_DB.ReadLatestPathInfosWithHashBlobReferences(
+ [client_path],
+ max_timestamp=args_timestamp,
+ )[client_path]
)
- proto_history = data_store.REL_DB.ReadPathInfoHistory(
- client_id=client_id,
+ history = data_store.REL_DB.ReadPathInfoHistory(
+ client_id=args.client_id,
path_type=path_type,
components=components,
- cutoff=args.timestamp,
+ cutoff=args_timestamp,
)
- history = [mig_objects.ToRDFPathInfo(pi) for pi in proto_history]
history.reverse()
# It might be the case that we do not have any history about the file, but
# we have some information because it is an implicit path.
if not history:
history = [path_info]
-
- file_obj = ApiFile(
+ history = [mig_objects.ToRDFPathInfo(pi) for pi in history]
+ file_obj = vfs_pb2.ApiFile(
name=components[-1],
path=rdf_objects.ToCategorizedPath(path_type, components),
stat=path_info.stat_entry,
hash=path_info.hash_entry,
- details=_GenerateApiFileDetails(history),
+ details=ToProtoApiAff4ObjectRepresentation(
+ _GenerateApiFileDetails(history)
+ ),
is_directory=path_info.directory,
age=path_info.timestamp,
)
@@ -367,7 +380,7 @@ def Handle(self, args, context=None):
file_obj.last_collected = last_collection_pi.timestamp
file_obj.last_collected_size = last_collection_pi.hash_entry.num_bytes
- return ApiGetFileDetailsResult(file=file_obj)
+ return vfs_pb2.ApiGetFileDetailsResult(file=file_obj)
class ApiListFilesArgs(rdf_structs.RDFProtoStruct):
@@ -385,23 +398,23 @@ class ApiListFilesResult(rdf_structs.RDFProtoStruct):
]
-def _PathInfoToApiFile(path_info: rdf_objects.PathInfo) -> ApiFile:
+def _PathInfoToApiFile(path_info: objects_pb2.PathInfo) -> vfs_pb2.ApiFile:
"""Converts a PathInfo to an ApiFile."""
- if path_info.path_type == rdf_objects.PathInfo.PathType.OS:
+ if path_info.path_type == objects_pb2.PathInfo.PathType.OS:
prefix = "fs/os/"
- elif path_info.path_type == rdf_objects.PathInfo.PathType.TSK:
+ elif path_info.path_type == objects_pb2.PathInfo.PathType.TSK:
prefix = "fs/tsk/"
- elif path_info.path_type == rdf_objects.PathInfo.PathType.NTFS:
+ elif path_info.path_type == objects_pb2.PathInfo.PathType.NTFS:
prefix = "fs/ntfs/"
- elif path_info.path_type == rdf_objects.PathInfo.PathType.REGISTRY:
+ elif path_info.path_type == objects_pb2.PathInfo.PathType.REGISTRY:
prefix = "registry/"
- elif path_info.path_type == rdf_objects.PathInfo.PathType.TEMP:
+ elif path_info.path_type == objects_pb2.PathInfo.PathType.TEMP:
prefix = "temp/"
else:
raise ValueError(f"Unknown PathType {path_info.path_type}")
- api_file = ApiFile(
- name=path_info.basename,
+ api_file = vfs_pb2.ApiFile(
+ name=path_info.components[-1] if path_info.components else "",
path=prefix + "/".join(path_info.components),
# TODO(hanuszczak): `PathInfo#directory` tells us whether given path has
# ever been observed as a directory. Is this what we want here or should
@@ -410,10 +423,10 @@ def _PathInfoToApiFile(path_info: rdf_objects.PathInfo) -> ApiFile:
age=path_info.timestamp,
)
- if path_info.stat_entry:
- api_file.stat = path_info.stat_entry
+ if path_info.HasField("stat_entry"):
+ api_file.stat.CopyFrom(path_info.stat_entry)
- if path_info.last_hash_entry_timestamp:
+ if path_info.HasField("last_hash_entry_timestamp"):
api_file.last_collected = path_info.last_hash_entry_timestamp
api_file.last_collected_size = path_info.hash_entry.num_bytes
@@ -425,26 +438,30 @@ class ApiListFilesHandler(api_call_handler_base.ApiCallHandler):
args_type = ApiListFilesArgs
result_type = ApiListFilesResult
+ proto_args_type = vfs_pb2.ApiListFilesArgs
+ proto_result_type = vfs_pb2.ApiListFilesResult
- def _GetRootChildren(self, args, context=None):
- client_id = str(args.client_id)
+ def _GetRootChildren(
+ self,
+ args: vfs_pb2.ApiListFilesArgs,
+ ) -> vfs_pb2.ApiListFilesResult:
items = []
- fs_item = ApiFile()
+ fs_item = vfs_pb2.ApiFile()
fs_item.name = "fs"
fs_item.path = "fs"
fs_item.is_directory = True
items.append(fs_item)
- temp_item = ApiFile()
+ temp_item = vfs_pb2.ApiFile()
temp_item.name = "temp"
temp_item.path = "temp"
temp_item.is_directory = True
items.append(temp_item)
- if data_store_utils.GetClientOs(client_id) == "Windows":
- registry_item = ApiFile()
+ if data_store_utils.GetClientOs(args.client_id) == "Windows":
+ registry_item = vfs_pb2.ApiFile()
registry_item.name = "registry"
registry_item.path = "registry"
registry_item.is_directory = True
@@ -455,24 +472,26 @@ def _GetRootChildren(self, args, context=None):
else:
items = items[args.offset :]
- return ApiListFilesResult(items=items)
+ return vfs_pb2.ApiListFilesResult(items=items)
- def _GetFilesystemChildren(self, args):
+ def _GetFilesystemChildren(
+ self, args: vfs_pb2.ApiListFilesArgs
+ ) -> vfs_pb2.ApiListFilesResult:
items = []
- ntfs_item = ApiFile()
+ ntfs_item = vfs_pb2.ApiFile()
ntfs_item.name = "ntfs"
ntfs_item.path = "fs/ntfs"
ntfs_item.is_directory = True
items.append(ntfs_item)
- os_item = ApiFile()
+ os_item = vfs_pb2.ApiFile()
os_item.name = "os"
os_item.path = "fs/os"
os_item.is_directory = True
items.append(os_item)
- tsk_item = ApiFile()
+ tsk_item = vfs_pb2.ApiFile()
tsk_item.name = "tsk"
tsk_item.path = "fs/tsk"
tsk_item.is_directory = True
@@ -483,34 +502,40 @@ def _GetFilesystemChildren(self, args):
else:
items = items[args.offset :]
- return ApiListFilesResult(items=items)
+ return vfs_pb2.ApiListFilesResult(items=items)
- def Handle(self, args, context=None):
+ def Handle(
+ self,
+ args: vfs_pb2.ApiListFilesArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> vfs_pb2.ApiListFilesResult:
if not args.file_path or args.file_path == "/":
- return self._GetRootChildren(args, context=context)
+ return self._GetRootChildren(args)
if args.file_path == "fs":
return self._GetFilesystemChildren(args)
path_type, components = rdf_objects.ParseCategorizedPath(args.file_path)
-
+ args_timestamp = None
+ if args.HasField("timestamp"):
+ args_timestamp = rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(
+ args.timestamp
+ )
# TODO: This API handler should return a 404 response if the
# path is not found. Currently, 500 is returned.
- proto_child_path_infos = data_store.REL_DB.ListChildPathInfos(
- client_id=args.client_id.ToString(),
+ child_path_infos = data_store.REL_DB.ListChildPathInfos(
+ client_id=args.client_id,
path_type=path_type,
components=components,
- timestamp=args.timestamp,
+ timestamp=args_timestamp,
)
items = []
- for child_path_info in proto_child_path_infos:
+ for child_path_info in child_path_infos:
if args.directories_only and not child_path_info.directory:
continue
- items.append(
- _PathInfoToApiFile(mig_objects.ToRDFPathInfo(child_path_info))
- )
+ items.append(_PathInfoToApiFile(child_path_info))
# TODO(hanuszczak): Instead of getting the whole list from the database and
# then filtering the results we should do the filtering directly in the
@@ -527,7 +552,7 @@ def Handle(self, args, context=None):
else:
items = items[args.offset :]
- return ApiListFilesResult(items=items)
+ return vfs_pb2.ApiListFilesResult(items=items)
class ApiBrowseFilesystemArgs(rdf_structs.RDFProtoStruct):
@@ -557,22 +582,23 @@ class ApiBrowseFilesystemHandler(api_call_handler_base.ApiCallHandler):
args_type = ApiBrowseFilesystemArgs
result_type = ApiBrowseFilesystemResult
+ proto_args_type = vfs_pb2.ApiBrowseFilesystemArgs
+ proto_result_type = vfs_pb2.ApiBrowseFilesystemResult
def Handle(
self,
- args: ApiBrowseFilesystemArgs,
+ args: vfs_pb2.ApiBrowseFilesystemArgs,
context: Optional[api_call_context.ApiCallContext] = None,
- ) -> ApiBrowseFilesystemResult:
+ ) -> vfs_pb2.ApiBrowseFilesystemResult:
del context # Unused.
last_components = rdf_objects.ParsePath(args.path)
- client_id = args.client_id.ToString()
results = []
path_types_to_query = {
- rdf_objects.PathInfo.PathType.OS,
- rdf_objects.PathInfo.PathType.TSK,
- rdf_objects.PathInfo.PathType.NTFS,
+ objects_pb2.PathInfo.PathType.OS,
+ objects_pb2.PathInfo.PathType.TSK,
+ objects_pb2.PathInfo.PathType.NTFS,
}
if args.include_directory_tree:
@@ -582,7 +608,12 @@ def Handle(
for cur_components in all_components:
path_types_to_query, children = self._ListDirectory(
- client_id, path_types_to_query, cur_components, args.timestamp
+ args.client_id,
+ path_types_to_query,
+ cur_components,
+ rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(args.timestamp)
+ if args.HasField("timestamp")
+ else None,
)
if children is None:
@@ -591,12 +622,12 @@ def Handle(
break
results.append(
- ApiBrowseFilesystemEntry(
+ vfs_pb2.ApiBrowseFilesystemEntry(
path="/" + "/".join(cur_components), children=children
)
)
- return ApiBrowseFilesystemResult(items=results)
+ return vfs_pb2.ApiBrowseFilesystemResult(items=results)
def _GetDirectoryTree(
self, components: Collection[str]
@@ -607,34 +638,34 @@ def _GetDirectoryTree(
def _MergePathInfos(
self,
- path_infos: Dict[str, rdf_objects.PathInfo],
- cur_path_infos: Collection[rdf_objects.PathInfo],
+ path_infos: Dict[str, objects_pb2.PathInfo],
+ cur_path_infos: Collection[objects_pb2.PathInfo],
) -> None:
"""Merges PathInfos from different PathTypes (OS, TSK, NTFS)."""
- for proto_pi in cur_path_infos:
- pi = mig_objects.ToRDFPathInfo(proto_pi)
- existing = path_infos.get(pi.basename)
+ for pi in cur_path_infos:
+ existing = path_infos.get(pi.components[-1] if pi.components else "")
# If the VFS has the same file in two PathTypes, use the latest collected
# version.
if (
existing is None
- or existing.timestamp is None
- or (pi.timestamp is not None and existing.timestamp < pi.timestamp)
+ or not existing.HasField("timestamp")
+ or (pi.HasField("timestamp") and existing.timestamp < pi.timestamp)
):
- path_infos[pi.basename] = pi
+ path_infos[pi.components[-1] if pi.components else ""] = pi
def _ListDirectory(
self,
client_id: str,
- path_types: Collection["rdf_objects.PathInfo.PathType"],
+ path_types: Collection["objects_pb2.PathInfo.PathType"],
components: Collection[str],
- timestamp: rdfvalue.RDFDatetime,
+ timestamp: Optional[rdfvalue.RDFDatetime] = None,
) -> Tuple[
- Set["rdf_objects.PathInfo.PathType"], Optional[Collection[ApiFile]]
+ Set["objects_pb2.PathInfo.PathType"],
+ Optional[Collection[vfs_pb2.ApiFile]],
]:
- path_infos = {}
+ path_infos: Dict[str, objects_pb2.PathInfo] = {}
existing_path_types = set(path_types)
for path_type in path_types:
@@ -1621,3 +1652,17 @@ def Handle(
return api_call_handler_base.ApiBinaryStream(
prefix + ".zip", content_generator=content_generator
)
+
+
+# TODO: Temporary copy of migration function due to cyclic
+# dependency.
+def ToProtoApiAff4ObjectRepresentation(
+ rdf: ApiAff4ObjectRepresentation,
+) -> vfs_pb2.ApiAff4ObjectRepresentation:
+ return rdf.AsPrimitiveProto()
+
+
+# TODO: Temporary copy of migration function due to cyclic
+# dependency.
+def ToRDFApiFile(proto: vfs_pb2.ApiFile) -> ApiFile:
+ return ApiFile.FromSerializedBytes(proto.SerializeToString())
diff --git a/grr/server/grr_response_server/gui/api_plugins/vfs_test.py b/grr/server/grr_response_server/gui/api_plugins/vfs_test.py
index b82baf1ac0..1c79e23d08 100644
--- a/grr/server/grr_response_server/gui/api_plugins/vfs_test.py
+++ b/grr/server/grr_response_server/gui/api_plugins/vfs_test.py
@@ -87,28 +87,26 @@ def setUp(self):
self.CreateFileVersions(self.client_id, self.file_path)
def testRaisesOnEmptyPath(self):
- args = vfs_plugin.ApiGetFileDetailsArgs(
- client_id=self.client_id, file_path=""
- )
+ args = vfs_pb2.ApiGetFileDetailsArgs(client_id=self.client_id, file_path="")
with self.assertRaises(ValueError):
self.handler.Handle(args, context=self.context)
def testRaisesOnRootPath(self):
- args = vfs_plugin.ApiGetFileDetailsArgs(
+ args = vfs_pb2.ApiGetFileDetailsArgs(
client_id=self.client_id, file_path="/"
)
with self.assertRaises(ValueError):
self.handler.Handle(args, context=self.context)
def testRaisesIfFirstComponentNotInAllowlist(self):
- args = vfs_plugin.ApiGetFileDetailsArgs(
+ args = vfs_pb2.ApiGetFileDetailsArgs(
client_id=self.client_id, file_path="/analysis"
)
with self.assertRaises(ValueError):
self.handler.Handle(args, context=self.context)
def testRaisesOnNonexistentPath(self):
- args = vfs_plugin.ApiGetFileDetailsArgs(
+ args = vfs_pb2.ApiGetFileDetailsArgs(
client_id=self.client_id, file_path="/fs/os/foo/bar"
)
with self.assertRaises(vfs_plugin.FileNotFoundError):
@@ -116,7 +114,7 @@ def testRaisesOnNonexistentPath(self):
def testHandlerReturnsNewestVersionByDefault(self):
# Get file version without specifying a timestamp.
- args = vfs_plugin.ApiGetFileDetailsArgs(
+ args = vfs_pb2.ApiGetFileDetailsArgs(
client_id=self.client_id, file_path=self.file_path
)
result = self.handler.Handle(args, context=self.context)
@@ -124,24 +122,24 @@ def testHandlerReturnsNewestVersionByDefault(self):
# Should return the newest version.
self.assertEqual(result.file.path, self.file_path)
self.assertAlmostEqual(
- result.file.age,
+ rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(result.file.age),
self.time_2,
delta=rdfvalue.Duration.From(1, rdfvalue.SECONDS),
)
def testHandlerReturnsClosestSpecificVersion(self):
# Get specific version.
- args = vfs_plugin.ApiGetFileDetailsArgs(
+ args = vfs_pb2.ApiGetFileDetailsArgs(
client_id=self.client_id,
file_path=self.file_path,
- timestamp=self.time_1,
+ timestamp=int(self.time_1),
)
result = self.handler.Handle(args, context=self.context)
# The age of the returned version might have a slight deviation.
self.assertEqual(result.file.path, self.file_path)
self.assertAlmostEqual(
- result.file.age,
+ rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(result.file.age),
self.time_1,
delta=rdfvalue.Duration.From(1, rdfvalue.SECONDS),
)
@@ -153,7 +151,7 @@ def testResultIncludesDetails(self):
attributes here and make sure they are returned.
"""
- args = vfs_plugin.ApiGetFileDetailsArgs(
+ args = vfs_pb2.ApiGetFileDetailsArgs(
client_id=self.client_id, file_path=self.file_path
)
result = self.handler.Handle(args, context=self.context)
@@ -176,13 +174,13 @@ def testIsDirectoryFlag(self):
client_path = db.ClientPath(self.client_id, path_type, components)
vfs_test_lib.CreateDirectory(client_path)
- args = vfs_plugin.ApiGetFileDetailsArgs(
+ args = vfs_pb2.ApiGetFileDetailsArgs(
client_id=self.client_id, file_path=self.file_path
)
result = self.handler.Handle(args, context=self.context)
self.assertFalse(result.file.is_directory)
- args = vfs_plugin.ApiGetFileDetailsArgs(
+ args = vfs_pb2.ApiGetFileDetailsArgs(
client_id=self.client_id, file_path=dir_path
)
result = self.handler.Handle(args, context=self.context)
@@ -199,15 +197,15 @@ def setUp(self):
self.file_path = "fs/os/etc"
def testDoesNotRaiseIfFirstComponentIsEmpty(self):
- args = vfs_plugin.ApiListFilesArgs(client_id=self.client_id, file_path="")
+ args = vfs_pb2.ApiListFilesArgs(client_id=self.client_id, file_path="")
self.handler.Handle(args, context=self.context)
def testDoesNotRaiseIfPathIsRoot(self):
- args = vfs_plugin.ApiListFilesArgs(client_id=self.client_id, file_path="/")
+ args = vfs_pb2.ApiListFilesArgs(client_id=self.client_id, file_path="/")
self.handler.Handle(args, context=self.context)
def testRaisesIfFirstComponentIsNotAllowlisted(self):
- args = vfs_plugin.ApiListFilesArgs(
+ args = vfs_pb2.ApiListFilesArgs(
client_id=self.client_id, file_path="/analysis"
)
with self.assertRaises(ValueError):
@@ -217,7 +215,7 @@ def testHandlerListsFilesAndDirectories(self):
fixture_test_lib.ClientFixture(self.client_id)
# Fetch all children of a directory.
- args = vfs_plugin.ApiListFilesArgs(
+ args = vfs_pb2.ApiListFilesArgs(
client_id=self.client_id, file_path=self.file_path
)
result = self.handler.Handle(args, context=self.context)
@@ -231,7 +229,7 @@ def testHandlerFiltersDirectoriesIfFlagIsSet(self):
fixture_test_lib.ClientFixture(self.client_id)
# Only fetch sub-directories.
- args = vfs_plugin.ApiListFilesArgs(
+ args = vfs_pb2.ApiListFilesArgs(
client_id=self.client_id,
file_path=self.file_path,
directories_only=True,
@@ -246,35 +244,35 @@ def testHandlerRespectsTimestamp(self):
# file_path is "fs/os/etc", a directory.
self.CreateFileVersions(self.client_id, self.file_path + "/file")
- args = vfs_plugin.ApiListFilesArgs(
+ args = vfs_pb2.ApiListFilesArgs(
client_id=self.client_id,
file_path=self.file_path,
- timestamp=self.time_2,
+ timestamp=int(self.time_2),
)
result = self.handler.Handle(args, context=self.context)
self.assertLen(result.items, 1)
self.assertIsInstance(result.items[0].last_collected_size, int)
self.assertEqual(result.items[0].last_collected_size, 13)
- args = vfs_plugin.ApiListFilesArgs(
+ args = vfs_pb2.ApiListFilesArgs(
client_id=self.client_id,
file_path=self.file_path,
- timestamp=self.time_1,
+ timestamp=int(self.time_1),
)
result = self.handler.Handle(args, context=self.context)
self.assertLen(result.items, 1)
self.assertEqual(result.items[0].last_collected_size, 11)
- args = vfs_plugin.ApiListFilesArgs(
+ args = vfs_pb2.ApiListFilesArgs(
client_id=self.client_id,
file_path=self.file_path,
- timestamp=self.time_0,
+ timestamp=int(self.time_0),
)
result = self.handler.Handle(args, context=self.context)
self.assertEmpty(result.items)
def testRoot(self):
- args = vfs_plugin.ApiListFilesArgs(client_id=self.client_id, file_path="/")
+ args = vfs_pb2.ApiListFilesArgs(client_id=self.client_id, file_path="/")
result = self.handler.Handle(args, context=self.context)
self.assertSameElements(
[(item.name, item.path) for item in result.items],
@@ -282,7 +280,7 @@ def testRoot(self):
)
def testFs(self):
- args = vfs_plugin.ApiListFilesArgs(client_id=self.client_id, file_path="fs")
+ args = vfs_pb2.ApiListFilesArgs(client_id=self.client_id, file_path="fs")
result = self.handler.Handle(args, context=self.context)
self.assertSameElements(
[(item.name, item.path) for item in result.items],
@@ -325,7 +323,7 @@ def setUp(self):
)
def testQueriesRootPathForEmptyPath(self):
- args = vfs_plugin.ApiBrowseFilesystemArgs(client_id=self.client_id, path="")
+ args = vfs_pb2.ApiBrowseFilesystemArgs(client_id=self.client_id, path="")
results = self.handler.Handle(args, context=self.context)
self.assertLen(results.items, 1)
@@ -333,9 +331,7 @@ def testQueriesRootPathForEmptyPath(self):
self.assertLen(results.items[0].children, 1)
def testQueriesRootPathForSingleSlashPath(self):
- args = vfs_plugin.ApiBrowseFilesystemArgs(
- client_id=self.client_id, path="/"
- )
+ args = vfs_pb2.ApiBrowseFilesystemArgs(client_id=self.client_id, path="/")
results = self.handler.Handle(args, context=self.context)
self.assertLen(results.items, 1)
@@ -343,7 +339,7 @@ def testQueriesRootPathForSingleSlashPath(self):
self.assertLen(results.items[0].children, 1)
def testHandlerListsFilesAndDirectories(self):
- args = vfs_plugin.ApiBrowseFilesystemArgs(
+ args = vfs_pb2.ApiBrowseFilesystemArgs(
client_id=self.client_id, path="/mixeddir"
)
results = self.handler.Handle(args, context=self.context)
@@ -355,7 +351,7 @@ def testHandlerListsFilesAndDirectories(self):
self.assertIn("/mixeddir", item.path)
def testHandlerCanListDirectoryTree(self):
- args = vfs_plugin.ApiBrowseFilesystemArgs(
+ args = vfs_pb2.ApiBrowseFilesystemArgs(
client_id=self.client_id, path="/mixeddir", include_directory_tree=True
)
results = self.handler.Handle(args, context=self.context)
@@ -374,7 +370,7 @@ def testHandlerCanListDirectoryTree(self):
)
def testHandlerCanListDirectoryTreeWhenPointingToFile(self):
- args = vfs_plugin.ApiBrowseFilesystemArgs(
+ args = vfs_pb2.ApiBrowseFilesystemArgs(
client_id=self.client_id,
path="/mixeddir/os-only",
include_directory_tree=True,
@@ -392,7 +388,7 @@ def testHandlerCanListDirectoryTreeWhenPointingToFile(self):
self.assertEqual(results.items[1].children[1].name, "os-only")
def testHandlerMergesFilesOfDifferentPathSpecs(self):
- args = vfs_plugin.ApiBrowseFilesystemArgs(
+ args = vfs_pb2.ApiBrowseFilesystemArgs(
client_id=self.client_id, path="/mixeddir"
)
results = self.handler.Handle(args, context=self.context)
@@ -410,26 +406,26 @@ def testHandlerMergesFilesOfDifferentPathSpecs(self):
self.assertEqual(
children,
[
- ("ntfs-then-os", rdf_paths.PathSpec.PathType.OS, len("OS")),
- ("os-only", rdf_paths.PathSpec.PathType.OS, len("OS")),
- ("os-then-ntfs", rdf_paths.PathSpec.PathType.NTFS, len("NTFS")),
- ("tsk-only", rdf_paths.PathSpec.PathType.TSK, len("TSK")),
+ ("ntfs-then-os", jobs_pb2.PathSpec.PathType.OS, len("OS")),
+ ("os-only", jobs_pb2.PathSpec.PathType.OS, len("OS")),
+ ("os-then-ntfs", jobs_pb2.PathSpec.PathType.NTFS, len("NTFS")),
+ ("tsk-only", jobs_pb2.PathSpec.PathType.TSK, len("TSK")),
],
)
def testHandlerRespectsTimestamp(self):
- args = vfs_plugin.ApiBrowseFilesystemArgs(
+ args = vfs_pb2.ApiBrowseFilesystemArgs(
client_id=self.client_id,
path="/mixeddir",
- timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(0),
+ timestamp=int(rdfvalue.RDFDatetime.FromSecondsSinceEpoch(0)),
)
results = self.handler.Handle(args, context=self.context)
self.assertEmpty(results.items[0].children)
- args = vfs_plugin.ApiBrowseFilesystemArgs(
+ args = vfs_pb2.ApiBrowseFilesystemArgs(
client_id=self.client_id,
path="/mixeddir",
- timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(1),
+ timestamp=int(rdfvalue.RDFDatetime.FromSecondsSinceEpoch(1)),
)
results = self.handler.Handle(args, context=self.context)
self.assertLen(results.items, 1)
@@ -439,13 +435,13 @@ def testHandlerRespectsTimestamp(self):
)
self.assertEqual(
results.items[0].children[0].stat.pathspec.pathtype,
- rdf_paths.PathSpec.PathType.NTFS,
+ jobs_pb2.PathSpec.PathType.NTFS,
)
- args = vfs_plugin.ApiBrowseFilesystemArgs(
+ args = vfs_pb2.ApiBrowseFilesystemArgs(
client_id=self.client_id,
path="/mixeddir",
- timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(2),
+ timestamp=int(rdfvalue.RDFDatetime.FromSecondsSinceEpoch(2)),
)
results = self.handler.Handle(args, context=self.context)
self.assertLen(results.items, 1)
@@ -455,13 +451,13 @@ def testHandlerRespectsTimestamp(self):
)
self.assertEqual(
results.items[0].children[0].stat.pathspec.pathtype,
- rdf_paths.PathSpec.PathType.OS,
+ jobs_pb2.PathSpec.PathType.OS,
)
- args = vfs_plugin.ApiBrowseFilesystemArgs(
+ args = vfs_pb2.ApiBrowseFilesystemArgs(
client_id=self.client_id,
path="/mixeddir",
- timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(10),
+ timestamp=int(rdfvalue.RDFDatetime.FromSecondsSinceEpoch(10)),
)
results = self.handler.Handle(args, context=self.context)
self.assertLen(results.items, 1)
@@ -471,7 +467,7 @@ def testHandlerRespectsTimestamp(self):
)
self.assertEqual(
results.items[0].children[0].stat.pathspec.pathtype,
- rdf_paths.PathSpec.PathType.OS,
+ jobs_pb2.PathSpec.PathType.OS,
)
diff --git a/grr/server/grr_response_server/gui/api_plugins/yara.py b/grr/server/grr_response_server/gui/api_plugins/yara.py
index e3b8d5793d..4d7ab9b94d 100644
--- a/grr/server/grr_response_server/gui/api_plugins/yara.py
+++ b/grr/server/grr_response_server/gui/api_plugins/yara.py
@@ -1,6 +1,8 @@
#!/usr/bin/env python
"""A module with API handlers related to the YARA memory scanning."""
+from typing import Optional
+
from grr_response_core.lib.rdfvalues import structs as rdf_structs
from grr_response_proto.api import yara_pb2
from grr_response_server import data_store
@@ -27,17 +29,21 @@ class ApiUploadYaraSignatureHandler(api_call_handler_base.ApiCallHandler):
args_type = ApiUploadYaraSignatureArgs
result_type = ApiUploadYaraSignatureResult
+ proto_args_type = yara_pb2.ApiUploadYaraSignatureArgs
+ proto_result_type = yara_pb2.ApiUploadYaraSignatureResult
def Handle( # pytype: disable=signature-mismatch # overriding-parameter-count-checks
self,
- args: ApiUploadYaraSignatureArgs,
- context: api_call_context.ApiCallContext,
- ) -> ApiUploadYaraSignatureResult:
+ args: yara_pb2.ApiUploadYaraSignatureArgs,
+ context: Optional[api_call_context.ApiCallContext],
+ ) -> yara_pb2.ApiUploadYaraSignatureResult:
+ assert context is not None
+
blob = args.signature.encode("utf-8")
blob_id = data_store.BLOBS.WriteBlobWithUnknownHash(blob)
data_store.REL_DB.WriteYaraSignatureReference(blob_id, context.username)
- result = ApiUploadYaraSignatureResult()
+ result = yara_pb2.ApiUploadYaraSignatureResult()
result.blob_id = bytes(blob_id)
return result
diff --git a/grr/server/grr_response_server/gui/api_plugins/yara_test.py b/grr/server/grr_response_server/gui/api_plugins/yara_test.py
index 6af2a3d6ef..c684760a61 100644
--- a/grr/server/grr_response_server/gui/api_plugins/yara_test.py
+++ b/grr/server/grr_response_server/gui/api_plugins/yara_test.py
@@ -1,6 +1,7 @@
#!/usr/bin/env python
from absl.testing import absltest
+from grr_response_proto.api import yara_pb2
from grr_response_server import data_store
from grr_response_server.gui import api_test_lib
from grr_response_server.gui.api_plugins import yara as api_yara
@@ -22,7 +23,7 @@ def setUp(self):
def testSignatureIsUploadedToBlobStore(self):
signature = "rule foo { condition: true };"
- args = api_yara.ApiUploadYaraSignatureArgs()
+ args = yara_pb2.ApiUploadYaraSignatureArgs()
args.signature = signature
blob_id = self.handler.Handle(args, context=self.context).blob_id
@@ -31,7 +32,7 @@ def testSignatureIsUploadedToBlobStore(self):
self.assertEqual(blob.decode("utf-8"), signature)
def testBlobIsMarkedAsYaraSignature(self):
- args = api_yara.ApiUploadYaraSignatureArgs()
+ args = yara_pb2.ApiUploadYaraSignatureArgs()
args.signature = "rule foo { condition: false };"
blob_id_bytes = self.handler.Handle(args, context=self.context).blob_id
diff --git a/grr/server/grr_response_server/gui/approval_checks.py b/grr/server/grr_response_server/gui/approval_checks.py
index 68e9788f9e..8642334d9b 100644
--- a/grr/server/grr_response_server/gui/approval_checks.py
+++ b/grr/server/grr_response_server/gui/approval_checks.py
@@ -3,29 +3,39 @@
from grr_response_core import config
from grr_response_core.lib import rdfvalue
+from grr_response_proto import objects_pb2
from grr_response_server import access_control
from grr_response_server import data_store
from grr_response_server.authorization import client_approval_auth
-from grr_response_server.rdfvalues import mig_objects
-from grr_response_server.rdfvalues import objects as rdf_objects
-def BuildLegacySubject(subject_id, approval_type):
+def BuildLegacySubject(
+ subject_id: str,
+ approval_type: objects_pb2.ApprovalRequest.ApprovalType,
+) -> str:
"""Builds a legacy AFF4 urn string for a given subject and approval type."""
- at = rdf_objects.ApprovalRequest.ApprovalType
- if approval_type == at.APPROVAL_TYPE_CLIENT:
+ if (
+ approval_type
+ == objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT
+ ):
return "aff4:/%s" % subject_id
- elif approval_type == at.APPROVAL_TYPE_HUNT:
+ elif (
+ approval_type
+ == objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_HUNT
+ ):
return "aff4:/hunts/%s" % subject_id
- elif approval_type == at.APPROVAL_TYPE_CRON_JOB:
+ elif (
+ approval_type
+ == objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CRON_JOB
+ ):
return "aff4:/cron/%s" % subject_id
raise ValueError("Invalid approval type.")
-def _CheckExpired(approval_request):
- if approval_request.expiration_time < rdfvalue.RDFDatetime.Now():
+def _CheckExpired(approval_request: objects_pb2.ApprovalRequest) -> None:
+ if approval_request.expiration_time < int(rdfvalue.RDFDatetime.Now()):
raise access_control.UnauthorizedAccess(
"Approval request is expired.",
subject=BuildLegacySubject(
@@ -34,7 +44,7 @@ def _CheckExpired(approval_request):
)
-def _CheckHasEnoughGrants(approval_request):
+def _CheckHasEnoughGrants(approval_request: objects_pb2.ApprovalRequest):
approvers_required = config.CONFIG["ACL.approvers_required"]
approvers = set(g.grantor_username for g in approval_request.grants)
@@ -52,13 +62,14 @@ def _CheckHasEnoughGrants(approval_request):
)
-def _CheckHasAdminApprovers(approval_request):
+def _CheckHasAdminApprovers(
+ approval_request: objects_pb2.ApprovalRequest,
+) -> None:
grantors = set(g.grantor_username for g in approval_request.grants)
for g in grantors:
- proto_user = data_store.REL_DB.ReadGRRUser(g)
- user_obj = mig_objects.ToRDFGRRUser(proto_user)
- if user_obj.user_type == user_obj.UserType.USER_TYPE_ADMIN:
- return True
+ user = data_store.REL_DB.ReadGRRUser(g)
+ if user.user_type == objects_pb2.GRRUser.UserType.USER_TYPE_ADMIN:
+ return
raise access_control.UnauthorizedAccess(
"Need at least 1 admin approver for access.",
@@ -68,14 +79,16 @@ def _CheckHasAdminApprovers(approval_request):
)
-def CheckClientApprovalRequest(approval_request):
+def CheckClientApprovalRequest(
+ approval_request: objects_pb2.ApprovalRequest,
+) -> None:
"""Checks if a client approval request is granted."""
_CheckExpired(approval_request)
_CheckHasEnoughGrants(approval_request)
if not client_approval_auth.CLIENT_APPROVAL_AUTH_MGR.IsActive():
- return True
+ return
approvers = set(g.grantor_username for g in approval_request.grants)
@@ -91,10 +104,8 @@ def CheckClientApprovalRequest(approval_request):
label.name,
)
- return True
-
-def CheckHuntApprovalRequest(approval_request):
+def CheckHuntApprovalRequest(approval_request) -> None:
"""Checks if a hunt approval request is granted."""
_CheckExpired(approval_request)
@@ -102,7 +113,7 @@ def CheckHuntApprovalRequest(approval_request):
_CheckHasAdminApprovers(approval_request)
-def CheckCronJobApprovalRequest(approval_request):
+def CheckCronJobApprovalRequest(approval_request) -> None:
"""Checks if a cron job approval request is granted."""
_CheckExpired(approval_request)
@@ -110,16 +121,23 @@ def CheckCronJobApprovalRequest(approval_request):
_CheckHasAdminApprovers(approval_request)
-def CheckApprovalRequest(approval_request):
+def CheckApprovalRequest(approval_request: objects_pb2.ApprovalRequest):
"""Checks if an approval request is granted."""
- at = rdf_objects.ApprovalRequest.ApprovalType
-
- if approval_request.approval_type == at.APPROVAL_TYPE_CLIENT:
+ if (
+ approval_request.approval_type
+ == objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT
+ ):
return CheckClientApprovalRequest(approval_request)
- elif approval_request.approval_type == at.APPROVAL_TYPE_HUNT:
+ elif (
+ approval_request.approval_type
+ == objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_HUNT
+ ):
return CheckHuntApprovalRequest(approval_request)
- elif approval_request.approval_type == at.APPROVAL_TYPE_CRON_JOB:
+ elif (
+ approval_request.approval_type
+ == objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CRON_JOB
+ ):
return CheckCronJobApprovalRequest(approval_request)
else:
raise ValueError(
diff --git a/grr/server/grr_response_server/gui/approval_checks_test.py b/grr/server/grr_response_server/gui/approval_checks_test.py
index 7e9e896dca..036f8ba51d 100644
--- a/grr/server/grr_response_server/gui/approval_checks_test.py
+++ b/grr/server/grr_response_server/gui/approval_checks_test.py
@@ -1,34 +1,38 @@
#!/usr/bin/env python
"""Tests for approval_checks module."""
+from typing import Iterable
from unittest import mock
from absl import app
from grr_response_core.lib import rdfvalue
+from grr_response_proto import objects_pb2
from grr_response_server import access_control
from grr_response_server import data_store
from grr_response_server.authorization import client_approval_auth
from grr_response_server.gui import approval_checks
-from grr_response_server.rdfvalues import objects as rdf_objects
from grr.test_lib import acl_test_lib
from grr.test_lib import test_lib
def _CreateApprovalRequest(
- approval_type, subject_id, expiration_time=None, grants=None
+ approval_type: objects_pb2.ApprovalRequest.ApprovalType,
+ subject_id: str,
+ expiration_time: rdfvalue.RDFDatetime = None,
+ grants: Iterable[objects_pb2.ApprovalGrant] = None,
):
expiration_time = expiration_time or (
rdfvalue.RDFDatetime.Now() + rdfvalue.Duration.From(1, rdfvalue.HOURS)
)
- return rdf_objects.ApprovalRequest(
+ return objects_pb2.ApprovalRequest(
approval_type=approval_type,
approval_id="1234",
subject_id=subject_id,
requestor_username="requestor",
reason="reason",
- timestamp=rdfvalue.RDFDatetime.Now(),
- expiration_time=expiration_time,
+ timestamp=int(rdfvalue.RDFDatetime.Now()),
+ expiration_time=int(expiration_time),
grants=grants,
)
@@ -42,7 +46,7 @@ def _CreateRequest(self, expiration_time=None, grants=None):
rdfvalue.RDFDatetime.Now() + rdfvalue.Duration.From(1, rdfvalue.HOURS)
)
return _CreateApprovalRequest(
- rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT,
+ objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT,
self.client_id,
expiration_time=expiration_time,
grants=grants,
@@ -63,7 +67,7 @@ def testRaisesWhenNoGrants(self):
def testRaisesWhenJustOneGrant(self):
approval_request = self._CreateRequest(
- grants=[rdf_objects.ApprovalGrant(grantor_username="grantor")]
+ grants=[objects_pb2.ApprovalGrant(grantor_username="grantor")]
)
with self.assertRaisesRegex(
@@ -77,8 +81,8 @@ def testRaisesIfApprovalExpired(self):
expiration_time=rdfvalue.RDFDatetime.Now()
- rdfvalue.Duration.From(1, rdfvalue.MINUTES),
grants=[
- rdf_objects.ApprovalGrant(grantor_username="grantor1"),
- rdf_objects.ApprovalGrant(grantor_username="grantor2"),
+ objects_pb2.ApprovalGrant(grantor_username="grantor1"),
+ objects_pb2.ApprovalGrant(grantor_username="grantor2"),
],
)
@@ -90,8 +94,8 @@ def testRaisesIfApprovalExpired(self):
def testReturnsIfApprovalIsNotExpiredAndHasTwoGrants(self):
approval_request = self._CreateRequest(
grants=[
- rdf_objects.ApprovalGrant(grantor_username="grantor1"),
- rdf_objects.ApprovalGrant(grantor_username="grantor2"),
+ objects_pb2.ApprovalGrant(grantor_username="grantor1"),
+ objects_pb2.ApprovalGrant(grantor_username="grantor2"),
]
)
@@ -101,8 +105,8 @@ def testReturnsIfApprovalIsNotExpiredAndHasTwoGrants(self):
def testWhenAuthMgrActiveReturnsIfClientHasNoLabels(self, mock_mgr):
approval_request = self._CreateRequest(
grants=[
- rdf_objects.ApprovalGrant(grantor_username="grantor1"),
- rdf_objects.ApprovalGrant(grantor_username="grantor2"),
+ objects_pb2.ApprovalGrant(grantor_username="grantor1"),
+ objects_pb2.ApprovalGrant(grantor_username="grantor2"),
]
)
@@ -118,8 +122,8 @@ def testWhenAuthMgrActiveChecksApproversForEachClientLabel(self, mock_mgr):
approval_request = self._CreateRequest(
grants=[
- rdf_objects.ApprovalGrant(grantor_username="grantor1"),
- rdf_objects.ApprovalGrant(grantor_username="grantor2"),
+ objects_pb2.ApprovalGrant(grantor_username="grantor1"),
+ objects_pb2.ApprovalGrant(grantor_username="grantor2"),
]
)
@@ -158,8 +162,8 @@ def testWhenAuthMgrActiveRaisesIfAuthMgrRaises(self, mock_mgr):
approval_request = self._CreateRequest(
grants=[
- rdf_objects.ApprovalGrant(grantor_username="grantor1"),
- rdf_objects.ApprovalGrant(grantor_username="grantor2"),
+ objects_pb2.ApprovalGrant(grantor_username="grantor1"),
+ objects_pb2.ApprovalGrant(grantor_username="grantor2"),
]
)
@@ -207,7 +211,7 @@ def testRaisesWhenNoGrants(self):
def testRaisesWhenJustOneGrant(self):
approval_request = self._CreateRequest(
- grants=[rdf_objects.ApprovalGrant(grantor_username="grantor1")]
+ grants=[objects_pb2.ApprovalGrant(grantor_username="grantor1")]
)
with self.assertRaisesRegex(
@@ -219,8 +223,8 @@ def testRaisesWhenJustOneGrant(self):
def testRaisesWhenNoGrantsFromAdmins(self):
approval_request = self._CreateRequest(
grants=[
- rdf_objects.ApprovalGrant(grantor_username="grantor1"),
- rdf_objects.ApprovalGrant(grantor_username="grantor2"),
+ objects_pb2.ApprovalGrant(grantor_username="grantor1"),
+ objects_pb2.ApprovalGrant(grantor_username="grantor2"),
]
)
@@ -238,8 +242,8 @@ def testRaisesIfApprovalExpired(self):
expiration_time=rdfvalue.RDFDatetime.Now()
- rdfvalue.Duration.From(1, rdfvalue.MINUTES),
grants=[
- rdf_objects.ApprovalGrant(grantor_username="grantor1"),
- rdf_objects.ApprovalGrant(grantor_username="grantor2"),
+ objects_pb2.ApprovalGrant(grantor_username="grantor1"),
+ objects_pb2.ApprovalGrant(grantor_username="grantor2"),
],
)
@@ -253,8 +257,8 @@ def testReturnsIfApprovalIsNotExpiredAndHasTwoGrantsIncludingAdmin(self):
approval_request = self._CreateRequest(
grants=[
- rdf_objects.ApprovalGrant(grantor_username="grantor1"),
- rdf_objects.ApprovalGrant(grantor_username="grantor2"),
+ objects_pb2.ApprovalGrant(grantor_username="grantor1"),
+ objects_pb2.ApprovalGrant(grantor_username="grantor2"),
]
)
@@ -264,14 +268,14 @@ def testReturnsIfApprovalIsNotExpiredAndHasTwoGrantsIncludingAdmin(self):
class CheckHuntApprovalRequestTest(
CheckHuntAndCronJobApprovalRequestTestMixin, test_lib.GRRBaseTest
):
- APPROVAL_TYPE = rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_HUNT
+ APPROVAL_TYPE = objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_HUNT
class CheckCronJobApprovalRequestTest(
CheckHuntAndCronJobApprovalRequestTestMixin, test_lib.GRRBaseTest
):
APPROVAL_TYPE = (
- rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CRON_JOB
+ objects_pb2.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CRON_JOB
)
diff --git a/grr/server/grr_response_server/gui/archive_generator.py b/grr/server/grr_response_server/gui/archive_generator.py
index 163ccd5772..c103d3152a 100644
--- a/grr/server/grr_response_server/gui/archive_generator.py
+++ b/grr/server/grr_response_server/gui/archive_generator.py
@@ -18,7 +18,8 @@
from grr_response_server.databases import db
from grr_response_server.flows.general import export as flow_export
from grr_response_server.gui.api_plugins import client as api_client
-from grr_response_server.rdfvalues import mig_objects
+from grr_response_server.gui.api_plugins import mig_client
+from grr_response_server.models import clients
from grr_response_server.rdfvalues import objects as rdf_objects
@@ -198,10 +199,8 @@ def Generate(
if client_ids:
client_infos = data_store.REL_DB.MultiReadClientFullInfo(client_ids)
for client_id, client_info in client_infos.items():
- client_info = mig_objects.ToRDFClientFullInfo(client_info)
- client = api_client.ApiClient()
- client.InitFromClientInfo(client_id, client_info)
-
+ client = clients.ApiClientFromClientFullInfo(client_id, client_info)
+ client = mig_client.ToRDFApiClient(client)
for chunk in self._GenerateClientInfo(client_id, client):
yield chunk
diff --git a/grr/server/grr_response_server/gui/gui_test_lib.py b/grr/server/grr_response_server/gui/gui_test_lib.py
index f470a28c8e..ca8d213a80 100644
--- a/grr/server/grr_response_server/gui/gui_test_lib.py
+++ b/grr/server/grr_response_server/gui/gui_test_lib.py
@@ -656,7 +656,24 @@ def MatSelect(self, target, label):
Raises:
ValueError: An invalid selector was provided - must be CSS.
"""
- self.Click(target)
+ target = self.WaitUntil(self.GetElement, target)
+
+ # Latest versions of Angular Material use a different implementation of
+ # mat-select, which uses a different technique to increase the clickable
+ # area of the mat-select element. Thus we need to check if there's a
+ # touch-target element, and if so, use it for clicking.
+ touch_target = self.driver.execute_script(
+ """return $(arguments[0]).siblings('.mat-mdc-paginator-touch-target')[0]""",
+ target,
+ )
+ if touch_target:
+ target = touch_target
+
+ try:
+ target.click()
+ except exceptions.ElementNotInteractableException:
+ self.driver.execute_script("arguments[0].scrollIntoView();", target)
+ target.click()
def GetOption():
options = self.driver.execute_script(
diff --git a/grr/server/grr_response_server/gui/root/api_plugins/binary_management.py b/grr/server/grr_response_server/gui/root/api_plugins/binary_management.py
index 690160d7e7..8c52091aeb 100644
--- a/grr/server/grr_response_server/gui/root/api_plugins/binary_management.py
+++ b/grr/server/grr_response_server/gui/root/api_plugins/binary_management.py
@@ -1,14 +1,18 @@
#!/usr/bin/env python
"""Root-access-level API handlers for binary management."""
+from typing import Optional
+
from grr_response_core import config
+from grr_response_core.lib import rdfvalue
from grr_response_core.lib.rdfvalues import crypto as rdf_crypto
from grr_response_core.lib.rdfvalues import structs as rdf_structs
+from grr_response_proto.api import config_pb2
from grr_response_proto.api.root import binary_management_pb2
from grr_response_server import access_control
from grr_response_server import signed_binary_utils
+from grr_response_server.gui import api_call_context
from grr_response_server.gui import api_call_handler_base
-from grr_response_server.gui.api_plugins import config as api_config
class GrrBinaryNotFoundError(api_call_handler_base.ResourceNotFoundError):
@@ -35,10 +39,12 @@ class ApiDeleteGrrBinaryArgs(rdf_structs.RDFProtoStruct):
rdf_deps = []
-def _GetBinaryRootUrn(binary_type):
- if binary_type == api_config.ApiGrrBinary.Type.PYTHON_HACK:
+def _GetBinaryRootUrn(
+ binary_type: config_pb2.ApiGrrBinary.Type,
+) -> rdfvalue.RDFURN:
+ if binary_type == config_pb2.ApiGrrBinary.Type.PYTHON_HACK:
return signed_binary_utils.GetAFF4PythonHackRoot()
- elif binary_type == api_config.ApiGrrBinary.Type.EXECUTABLE:
+ elif binary_type == config_pb2.ApiGrrBinary.Type.EXECUTABLE:
return signed_binary_utils.GetAFF4ExecutablesRoot()
else:
raise ValueError("Invalid binary type: %s" % binary_type)
@@ -48,8 +54,13 @@ class ApiUploadGrrBinaryHandler(api_call_handler_base.ApiCallHandler):
"""Uploads GRR binary to a given path."""
args_type = ApiUploadGrrBinaryArgs
+ proto_args_type = binary_management_pb2.ApiUploadGrrBinaryArgs
- def Handle(self, args, context=None):
+ def Handle(
+ self,
+ args: binary_management_pb2.ApiUploadGrrBinaryArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> None:
if not args.path:
raise ValueError("Invalid binary path: %s" % args.path)
@@ -74,8 +85,13 @@ class ApiDeleteGrrBinaryHandler(api_call_handler_base.ApiCallHandler):
"""Deletes GRR binary with a given type and path."""
args_type = ApiDeleteGrrBinaryArgs
+ proto_args_type = binary_management_pb2.ApiDeleteGrrBinaryArgs
- def Handle(self, args, context=None):
+ def Handle(
+ self,
+ args: binary_management_pb2.ApiDeleteGrrBinaryArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> None:
if not args.path:
raise ValueError("Invalid binary path: %s" % args.path)
diff --git a/grr/server/grr_response_server/gui/root/api_plugins/user_management.py b/grr/server/grr_response_server/gui/root/api_plugins/user_management.py
index 97b3404b25..0c8449c760 100644
--- a/grr/server/grr_response_server/gui/root/api_plugins/user_management.py
+++ b/grr/server/grr_response_server/gui/root/api_plugins/user_management.py
@@ -1,16 +1,19 @@
#!/usr/bin/env python
"""Root-access-level API handlers for user management."""
+from typing import Optional
+
from grr_response_core import config
from grr_response_core.lib.rdfvalues import crypto as rdf_crypto
-from grr_response_core.lib.rdfvalues import mig_crypto
from grr_response_core.lib.rdfvalues import structs as rdf_structs
+from grr_response_proto import jobs_pb2
+from grr_response_proto.api import user_pb2 as api_user_pb2
from grr_response_proto.api.root import user_management_pb2
from grr_response_server import data_store
from grr_response_server.databases import db
+from grr_response_server.gui import api_call_context
from grr_response_server.gui import api_call_handler_base
from grr_response_server.gui.api_plugins import user as api_user
-from grr_response_server.rdfvalues import mig_objects
class ApiCreateGrrUserArgs(rdf_structs.RDFProtoStruct):
@@ -22,13 +25,19 @@ class ApiCreateGrrUserHandler(api_call_handler_base.ApiCallHandler):
args_type = ApiCreateGrrUserArgs
result_type = api_user.ApiGrrUser
-
- def Handle(self, args, context=None):
+ proto_args_type = user_management_pb2.ApiCreateGrrUserArgs
+ proto_result_type = api_user_pb2.ApiGrrUser
+
+ def Handle(
+ self,
+ args: user_management_pb2.ApiCreateGrrUserArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> api_user.ApiGrrUser:
if not args.username:
raise ValueError("username can't be empty.")
- if args.user_type != args.UserType.USER_TYPE_ADMIN:
- args.user_type = args.UserType.USER_TYPE_STANDARD
+ if args.user_type != api_user_pb2.ApiGrrUser.UserType.USER_TYPE_ADMIN:
+ args.user_type = api_user_pb2.ApiGrrUser.UserType.USER_TYPE_STANDARD
if args.email:
if config.CONFIG["Email.enable_custom_email_address"]:
@@ -41,19 +50,18 @@ def Handle(self, args, context=None):
password = None
if args.HasField("password"):
- rdf_password = rdf_crypto.Password()
- rdf_password.SetPassword(args.password)
- password = mig_crypto.ToProtoPassword(rdf_password)
+ password = jobs_pb2.Password()
+ rdf_crypto.SetPassword(password, args.password)
+
data_store.REL_DB.WriteGRRUser(
username=args.username,
password=password,
- user_type=int(args.user_type),
+ user_type=args.user_type,
email=email,
)
# TODO: Use function to get API from proto user.
- proto_user = data_store.REL_DB.ReadGRRUser(args.username)
- rdf_user = mig_objects.ToRDFGRRUser(proto_user)
- return api_user.ApiGrrUser().InitFromDatabaseObject(rdf_user)
+ user = data_store.REL_DB.ReadGRRUser(args.username)
+ return api_user.InitApiGrrUserFromGrrUser(user)
class ApiDeleteGrrUserArgs(rdf_structs.RDFProtoStruct):
@@ -64,10 +72,15 @@ class ApiDeleteGrrUserHandler(api_call_handler_base.ApiCallHandler):
"""Deletes a GRR user."""
args_type = ApiDeleteGrrUserArgs
+ proto_args_type = user_management_pb2.ApiDeleteGrrUserArgs
- def Handle(self, args, context=None):
+ def Handle(
+ self,
+ args: user_management_pb2.ApiDeleteGrrUserArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> None:
if not args.username:
- raise ValueError("username can't be empty.")
+ raise ValueError("Username is not set")
try:
data_store.REL_DB.DeleteGRRUser(args.username)
@@ -84,28 +97,34 @@ class ApiModifyGrrUserHandler(api_call_handler_base.ApiCallHandler):
args_type = ApiModifyGrrUserArgs
result_type = api_user.ApiGrrUser
-
- def Handle(self, args, context=None):
+ proto_args_type = user_management_pb2.ApiModifyGrrUserArgs
+ proto_result_type = api_user_pb2.ApiGrrUser
+
+ def Handle(
+ self,
+ args: user_management_pb2.ApiModifyGrrUserArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> api_user.ApiGrrUser:
if not args.username:
- raise ValueError("username can't be empty.")
+ raise ValueError("Username is empty")
- if args.HasField(
- "user_type") and args.user_type != args.UserType.USER_TYPE_ADMIN:
- args.user_type = args.UserType.USER_TYPE_STANDARD
+ if (
+ args.HasField("user_type")
+ and args.user_type != api_user_pb2.ApiGrrUser.UserType.USER_TYPE_ADMIN
+ ):
+ args.user_type = api_user_pb2.ApiGrrUser.UserType.USER_TYPE_STANDARD
# query user, to throw if a nonexistent user should be modified
data_store.REL_DB.ReadGRRUser(args.username)
password = None
if args.HasField("password"):
- rdf_password = rdf_crypto.Password()
- rdf_password.SetPassword(args.password)
- password = mig_crypto.ToProtoPassword(rdf_password)
+ password = jobs_pb2.Password()
+ rdf_crypto.SetPassword(password, args.password)
+ user_type = None
if args.HasField("user_type"):
- user_type = int(args.user_type)
- else:
- user_type = None
+ user_type = args.user_type
if args.HasField("email"):
if config.CONFIG["Email.enable_custom_email_address"]:
@@ -120,12 +139,12 @@ def Handle(self, args, context=None):
username=args.username,
password=password,
user_type=user_type,
- email=email)
+ email=email,
+ )
# TODO: Use function to get API from proto user.
- proto_user = data_store.REL_DB.ReadGRRUser(args.username)
- rdf_user = mig_objects.ToRDFGRRUser(proto_user)
- return api_user.ApiGrrUser().InitFromDatabaseObject(rdf_user)
+ user = data_store.REL_DB.ReadGRRUser(args.username)
+ return api_user.InitApiGrrUserFromGrrUser(user)
class ApiListGrrUsersArgs(rdf_structs.RDFProtoStruct):
@@ -144,15 +163,21 @@ class ApiListGrrUsersHandler(api_call_handler_base.ApiCallHandler):
args_type = ApiListGrrUsersArgs
result_type = ApiListGrrUsersResult
-
- def Handle(self, args, context=None):
+ proto_args_type = user_management_pb2.ApiListGrrUsersArgs
+ proto_result_type = user_management_pb2.ApiListGrrUsersResult
+
+ def Handle(
+ self,
+ args: user_management_pb2.ApiListGrrUsersArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> user_management_pb2.ApiListGrrUsersResult:
total_count = data_store.REL_DB.CountGRRUsers()
- db_users = data_store.REL_DB.ReadGRRUsers(
- offset=args.offset, count=args.count)
- rdf_users = [mig_objects.ToRDFGRRUser(user) for user in db_users]
+ users = data_store.REL_DB.ReadGRRUsers(offset=args.offset, count=args.count)
# TODO: Use function to get API from proto user.
- items = [api_user.ApiGrrUser().InitFromDatabaseObject(u) for u in rdf_users]
- return ApiListGrrUsersResult(total_count=total_count, items=items)
+ items = [api_user.InitApiGrrUserFromGrrUser(u) for u in users]
+ return user_management_pb2.ApiListGrrUsersResult(
+ total_count=total_count, items=items
+ )
class ApiGetGrrUserArgs(rdf_structs.RDFProtoStruct):
@@ -164,15 +189,21 @@ class ApiGetGrrUserHandler(api_call_handler_base.ApiCallHandler):
args_type = ApiGetGrrUserArgs
result_type = api_user.ApiGrrUser
-
- def Handle(self, args, context=None):
+ proto_args_type = user_management_pb2.ApiGetGrrUserArgs
+ proto_result_type = api_user_pb2.ApiGrrUser
+
+ def Handle(
+ self,
+ args: user_management_pb2.ApiGetGrrUserArgs,
+ context: Optional[api_call_context.ApiCallContext] = None,
+ ) -> api_user.ApiGrrUser:
if not args.username:
- raise ValueError("username can't be empty.")
+ raise ValueError("Username is empty.")
try:
# TODO: Use function to get API from proto user.
- proto_user = data_store.REL_DB.ReadGRRUser(args.username)
- rdf_user = mig_objects.ToRDFGRRUser(proto_user)
- return api_user.ApiGrrUser().InitFromDatabaseObject(rdf_user)
+ user = data_store.REL_DB.ReadGRRUser(args.username)
except db.UnknownGRRUserError as e:
raise api_call_handler_base.ResourceNotFoundError(e)
+
+ return api_user.InitApiGrrUserFromGrrUser(user)
diff --git a/grr/server/grr_response_server/gui/selenium_tests/artifact_view_test.py b/grr/server/grr_response_server/gui/selenium_tests/artifact_view_test.py
index 88edd767a4..14be75c655 100644
--- a/grr/server/grr_response_server/gui/selenium_tests/artifact_view_test.py
+++ b/grr/server/grr_response_server/gui/selenium_tests/artifact_view_test.py
@@ -7,21 +7,13 @@
from absl import app
from grr_response_core import config
-from grr_response_core.lib import parser
from grr_response_server import artifact
from grr_response_server import artifact_registry
from grr_response_server.flows.general import collectors
from grr_response_server.gui import gui_test_lib
-from grr.test_lib import parser_test_lib
from grr.test_lib import test_lib
-class TestCmdProcessor(parser.CommandParser):
-
- output_types = []
- supported_artifacts = ["TestCmdArtifact"]
-
-
class TestArtifactRender(gui_test_lib.GRRSeleniumTest):
"""Test the Cron view GUI."""
@@ -45,7 +37,6 @@ def setUp(self):
self.client_id = self.SetupClient(0, system="linux")
self.RequestAndGrantClientApproval(self.client_id)
- @parser_test_lib.WithParser("TestCmd", TestCmdProcessor)
def testArtifactRendering(self):
self._LoadSystemArtifacts()
@@ -93,7 +84,6 @@ def testArtifactRendering(self):
# Check the artifact description loaded.
self.WaitUntil(self.IsTextPresent, "Test command artifact for dpkg.")
- self.WaitUntil(self.IsTextPresent, "TestCmdProcessor")
def testSystemArtifactsAreNotMarkedInStartFlowForm(self):
self._LoadSystemArtifacts()
diff --git a/grr/server/grr_response_server/gui/selenium_tests/forms_test.py b/grr/server/grr_response_server/gui/selenium_tests/forms_test.py
index ea0df7d94e..78d7919f90 100644
--- a/grr/server/grr_response_server/gui/selenium_tests/forms_test.py
+++ b/grr/server/grr_response_server/gui/selenium_tests/forms_test.py
@@ -6,6 +6,7 @@
from grr_response_core.lib.rdfvalues import structs as rdf_structs
from grr_response_proto import tests_pb2
+from grr_response_proto.api import user_pb2 as api_user_pb2
from grr_response_server import flow_base
from grr_response_server.flows.general import file_finder as flows_file_finder
from grr_response_server.gui import api_call_context
@@ -164,7 +165,7 @@ def testApproverInputShowsAutocompletion(self):
self.WaitUntilNot(self.IsVisible, "css=.modal-open")
handler = user_plugin.ApiListClientApprovalsHandler()
- args = user_plugin.ApiListClientApprovalsArgs(client_id=client_id)
+ args = api_user_pb2.ApiListClientApprovalsArgs(client_id=client_id)
res = handler.Handle(
args=args, context=api_call_context.ApiCallContext(self.test_username)
)
diff --git a/grr/server/grr_response_server/gui/selenium_tests/settings_view_test.py b/grr/server/grr_response_server/gui/selenium_tests/settings_view_test.py
index dddba2cedf..4b10bbb637 100644
--- a/grr/server/grr_response_server/gui/selenium_tests/settings_view_test.py
+++ b/grr/server/grr_response_server/gui/selenium_tests/settings_view_test.py
@@ -3,6 +3,7 @@
from absl import app
+from grr_response_core.lib.rdfvalues import config as rdf_config
from grr_response_server.gui import gui_test_lib
from grr_response_server.gui.api_plugins import config_test as api_config_test
from grr.test_lib import test_lib
@@ -15,17 +16,101 @@ def testSettingsView(self):
with test_lib.ConfigOverrider({
"ACL.group_access_manager_class": "Foo bar.",
"AdminUI.bind": "127.0.0.1",
+ "AdminUI.hunt_config": rdf_config.AdminUIHuntConfig(
+ default_exclude_labels=["oh-oh"],
+ ),
+ "Source.version_major": 42,
+ "Hunt.default_client_rate": 42.0,
+ "Email.enable_custom_email_address": True,
+ "Cron.disabled_cron_jobs": ["Job1", "Job2"],
+ "Server.fleetspeak_last_ping_threshold": "1h",
+ "Server.raw_filesystem_access_pathtype": "TSK",
+ "ClientBuilder.build_type": "Debug",
+ "ClientBuilder.target_platforms": [
+ "darwin_amd64_dmg",
+ "linux_amd64_deb",
+ ],
+ "ClientRepacker.output_filename": (
+ "%(ClientRepacker.output_basename)%(ClientBuilder.output_extension)"
+ ),
+ "Mysql.password": "top-secret",
}):
self.Open("/legacy#/config")
self.WaitUntil(self.IsTextPresent, "Configuration")
# Check that configuration values are displayed.
- self.WaitUntil(self.IsTextPresent, "ACL.group_access_manager_class")
- self.WaitUntil(self.IsTextPresent, "Foo bar.")
-
- self.WaitUntil(self.IsTextPresent, "AdminUI.bind")
- self.WaitUntil(self.IsTextPresent, "127.0.0.1")
+ self.WaitUntil(
+ self.IsElementPresent,
+ "css=tr:contains('ACL.group_access_manager_class'):contains('Foo"
+ " bar.')",
+ )
+ self.WaitUntil(
+ self.IsElementPresent,
+ "css=tr:contains('AdminUI.bind'):contains('127.0.0.1')",
+ )
+ self.WaitUntil(
+ self.IsElementPresent,
+ "css=tr:contains('PrivateKeys.executable_signing_private_key'):contains('(redacted)')",
+ )
+ # AdminUI.hunt_config is an RDFProtoStruct.
+ self.WaitUntil(
+ self.IsElementPresent,
+ "css=tr:contains('AdminUI.hunt_config') tr:contains('Default exclude"
+ " labels'):contains('oh-oh')",
+ )
+ # Source.version_major is an int field.
+ self.WaitUntil(
+ self.IsElementPresent,
+ "css=tr:contains('Source.version_major'):contains('42')",
+ )
+ # Hunt.default_client_rate is a float field, displayed as an int.
+ self.WaitUntil(
+ self.IsElementPresent,
+ "css=tr:contains('Hunt.default_client_rate'):contains('42')",
+ )
+ # Email.enable_custom_email_address is a boolean field, displayed as int.
+ self.WaitUntil(
+ self.IsElementPresent,
+ "css=tr:contains('Email.enable_custom_email_address'):contains('1')",
+ )
+ # Cron.disabled_cron_jobs is a list (unsupported).
+ self.WaitUntil(
+ self.IsElementPresent,
+ "css=tr:contains('Cron.disabled_cron_jobs'):not(:contains('Job1,"
+ " Job2'))",
+ )
+ # Server.fleetspeak_last_ping_threshold is an RDF Duration.
+ self.WaitUntil(
+ self.IsElementPresent,
+ "css=tr:contains('Server.fleetspeak_last_ping_threshold'):contains('3600000000')",
+ )
+ # Server.raw_filesystem_access_pathtype is an enum.
+ self.WaitUntil(
+ self.IsElementPresent,
+ "css=tr:contains('Server.raw_filesystem_access_pathtype'):contains('TSK')",
+ )
+ # ClientBuilder.build_type is a "choice".
+ self.WaitUntil(
+ self.IsElementPresent,
+ "css=tr:contains('ClientBuilder.build_type'):contains('Debug')",
+ )
+ # ClientBuilder.target_platforms is a "multi-choice" (unsupported).
+ self.WaitUntil(
+ self.IsElementPresent,
+ "css=tr:contains('ClientBuilder.target_platforms'):not(:contains('darwin_amd64_dmg,"
+ " linux_amd64_deb'))",
+ )
+ # ClientRepacker.output_filename is an "option".
+ self.WaitUntil(
+ self.IsElementPresent,
+ "css=tr:contains('ClientRepacker.output_filename'):contains('GRR_0.0.0.0_')",
+ )
+ # Mysql.password should be redacted.
+ self.WaitUntil(
+ self.IsElementPresent,
+ "css=tr:contains('Mysql.password'):not(:contains('top-secret')):contains(redacted)",
+ )
class TestManageBinariesView(
diff --git a/grr/server/grr_response_server/gui/selenium_tests/v2/browser_history_test.py b/grr/server/grr_response_server/gui/selenium_tests/v2/browser_history_test.py
index cda2630d63..e84b277cc0 100644
--- a/grr/server/grr_response_server/gui/selenium_tests/v2/browser_history_test.py
+++ b/grr/server/grr_response_server/gui/selenium_tests/v2/browser_history_test.py
@@ -4,6 +4,8 @@
from grr_response_core.lib import rdfvalue
from grr_response_core.lib.rdfvalues import client_fs as rdf_client_fs
from grr_response_core.lib.rdfvalues import paths as rdf_paths
+from grr_response_proto import flows_pb2
+from grr_response_proto.api import flow_pb2
from grr_response_server.flows.general import webhistory
from grr_response_server.gui import api_call_context
from grr_response_server.gui import gui_test_lib
@@ -570,7 +572,7 @@ def testBrowserHistoryFlowForm(self):
def FlowHasBeenStarted():
handler = api_flow.ApiListFlowsHandler()
flows = handler.Handle(
- api_flow.ApiListFlowsArgs(
+ flow_pb2.ApiListFlowsArgs(
client_id=self.client_id, top_flows_only=True
),
context=api_call_context.ApiCallContext(username=self.test_username),
@@ -581,8 +583,10 @@ def FlowHasBeenStarted():
flow = self.WaitUntil(FlowHasBeenStarted)
self.assertEqual(flow.name, webhistory.CollectBrowserHistory.__name__)
+ flow_args = flows_pb2.CollectBrowserHistoryArgs()
+ flow.args.Unpack(flow_args)
self.assertCountEqual(
- flow.args.browsers,
+ flow_args.browsers,
[
webhistory.CollectBrowserHistoryArgs.Browser.CHROME,
# Only Firefox has been unchecked, so it should not appear.
diff --git a/grr/server/grr_response_server/gui/selenium_tests/v2/file_test.py b/grr/server/grr_response_server/gui/selenium_tests/v2/file_test.py
index b5bc6e95b6..e87b3e1bf8 100644
--- a/grr/server/grr_response_server/gui/selenium_tests/v2/file_test.py
+++ b/grr/server/grr_response_server/gui/selenium_tests/v2/file_test.py
@@ -8,6 +8,8 @@
from grr_response_core.lib.rdfvalues import crypto as rdf_crypto
from grr_response_core.lib.rdfvalues import file_finder as rdf_file_finder
from grr_response_core.lib.rdfvalues import paths as rdf_paths
+from grr_response_proto import flows_pb2
+from grr_response_proto.api import flow_pb2
from grr_response_server.flows import file
from grr_response_server.flows.general import transfer
from grr_response_server.gui import api_call_context
@@ -315,7 +317,7 @@ def testFlowArgumentForm(self):
def FlowHasBeenStarted():
handler = api_flow.ApiListFlowsHandler()
flows = handler.Handle(
- api_flow.ApiListFlowsArgs(
+ flow_pb2.ApiListFlowsArgs(
client_id=self.client_id, top_flows_only=True
),
context=api_call_context.ApiCallContext(username=self.test_username),
@@ -325,8 +327,10 @@ def FlowHasBeenStarted():
flow = self.WaitUntil(FlowHasBeenStarted)
self.assertEqual(flow.name, file.CollectFilesByKnownPath.__name__)
+ flow_args = flows_pb2.CollectFilesByKnownPathArgs()
+ flow.args.Unpack(flow_args)
self.assertCountEqual(
- flow.args.paths, ["/foo/firstpath", "/bar/secondpath"]
+ flow_args.paths, ["/foo/firstpath", "/bar/secondpath"]
)
@@ -676,7 +680,7 @@ def testFlowArgumentForm(self):
def FlowHasBeenStarted():
handler = api_flow.ApiListFlowsHandler()
flows = handler.Handle(
- api_flow.ApiListFlowsArgs(
+ flow_pb2.ApiListFlowsArgs(
client_id=self.client_id, top_flows_only=True
),
context=api_call_context.ApiCallContext(username=self.test_username),
@@ -686,8 +690,10 @@ def FlowHasBeenStarted():
flow = self.WaitUntil(FlowHasBeenStarted)
self.assertEqual(flow.name, file.StatMultipleFiles.__name__)
+ flow_args = flows_pb2.StatMultipleFilesArgs()
+ flow.args.Unpack(flow_args)
self.assertCountEqual(
- flow.args.path_expressions, ["/foo/firstpath", "/bar/secondpath"]
+ flow_args.path_expressions, ["/foo/firstpath", "/bar/secondpath"]
)
@@ -894,7 +900,7 @@ def testFlowArgumentForm(self):
def FlowHasBeenStarted():
handler = api_flow.ApiListFlowsHandler()
flows = handler.Handle(
- api_flow.ApiListFlowsArgs(
+ flow_pb2.ApiListFlowsArgs(
client_id=self.client_id, top_flows_only=True
),
context=api_call_context.ApiCallContext(username=self.test_username),
@@ -904,8 +910,10 @@ def FlowHasBeenStarted():
flow = self.WaitUntil(FlowHasBeenStarted)
self.assertEqual(flow.name, file.HashMultipleFiles.__name__)
+ flow_args = flows_pb2.HashMultipleFilesArgs()
+ flow.args.Unpack(flow_args)
self.assertCountEqual(
- flow.args.path_expressions, ["/foo/firstpath", "/bar/secondpath"]
+ flow_args.path_expressions, ["/foo/firstpath", "/bar/secondpath"]
)
diff --git a/grr/server/grr_response_server/gui/selenium_tests/v2/flow_test.py b/grr/server/grr_response_server/gui/selenium_tests/v2/flow_test.py
index 00ffe2403c..5f0b9ed143 100644
--- a/grr/server/grr_response_server/gui/selenium_tests/v2/flow_test.py
+++ b/grr/server/grr_response_server/gui/selenium_tests/v2/flow_test.py
@@ -31,7 +31,7 @@
def _ListFlows(client_id: str, creator: str):
handler = api_flow.ApiListFlowsHandler()
return handler.Handle(
- api_flow.ApiListFlowsArgs(client_id=client_id, top_flows_only=True),
+ flow_pb2.ApiListFlowsArgs(client_id=client_id, top_flows_only=True),
context=api_call_context.ApiCallContext(username=creator),
).items
@@ -661,7 +661,9 @@ def testCanCreateFlowWithApprovalsDisabled(self):
self.assertEqual(flows[0].client_id, self.client_id)
self.assertEqual(flows[0].creator, self.test_username)
self.assertEqual(flows[0].name, 'CollectMultipleFiles')
- self.assertEqual(flows[0].args.path_expressions, ['/foo/test'])
+ flow_args = flows_pb2.CollectMultipleFilesArgs()
+ flows[0].args.Unpack(flow_args)
+ self.assertEqual(flow_args.path_expressions, ['/foo/test'])
if __name__ == '__main__':
diff --git a/grr/server/grr_response_server/gui/selenium_tests/v2/hunt_create_test.py b/grr/server/grr_response_server/gui/selenium_tests/v2/hunt_create_test.py
index eb2de62c21..93cbbf7b9b 100644
--- a/grr/server/grr_response_server/gui/selenium_tests/v2/hunt_create_test.py
+++ b/grr/server/grr_response_server/gui/selenium_tests/v2/hunt_create_test.py
@@ -4,6 +4,7 @@
from absl import app
+from grr_response_core.lib.rdfvalues import config as rdf_config
from grr_response_core.lib.rdfvalues import paths as rdf_paths
from grr_response_core.lib.util import retry
from grr_response_proto import flows_pb2
@@ -281,6 +282,50 @@ def testNoHuntIsCreatedWhenInputInvalid(self):
self._ListHuntsAndAssertCount(self.test_username, 0)
self.WaitUntil(self.IsElementPresent, "css=.mdc-snackbar")
+ def testHuntPresubmit(self):
+ client_id = self.SetupClient(0)
+ self.RequestAndGrantClientApproval(client_id)
+ flow_id = flow_test_lib.StartFlow(
+ network.Netstat,
+ creator=self.test_username,
+ client_id=client_id,
+ flow_args=network.NetstatArgs(),
+ )
+
+ hunt_cfg = rdf_config.AdminUIHuntConfig(
+ default_exclude_labels=["no-no"],
+ make_default_exclude_labels_a_presubmit_check=True,
+ presubmit_warning_message="not cool",
+ )
+ with test_lib.ConfigOverrider({"AdminUI.hunt_config": hunt_cfg}):
+ self.Open(f"/v2/new-hunt?clientId={client_id}&flowId={flow_id}")
+
+ # Make sure default exclude labels are displayed.
+ self.WaitUntil(
+ self.IsElementPresent, "css=input[id=condition_1_label_name_0]"
+ )
+ self.Type("css=input[id=condition_1_label_name_0]", "no-no")
+
+ self.WaitUntilNot(
+ self.IsElementPresent, "css=mat-card.warning:contains('not cool')"
+ )
+
+ # Removing the label should trigger the warning.
+ self.Click("css=[name=condition_1] button#close")
+
+ # Make sure the warning is now displayed.
+ self.WaitUntil(
+ self.IsElementPresent, "css=mat-card.warning:contains('not cool')"
+ )
+
+ # Fill out necessary approval information and create hunt.
+ self.Type("css=approval-card input[name=reason]", "because")
+ self.Click("css=button[id=runHunt]")
+
+ self._ListHuntsAndAssertCount(self.test_username, 0)
+ # Also fails in the API level,
+ self.WaitUntil(self.IsElementPresent, "css=.mdc-snackbar")
+
if __name__ == "__main__":
app.run(test_lib.main)
diff --git a/grr/server/grr_response_server/gui/static/angular-components/docs/api-docs-examples.json b/grr/server/grr_response_server/gui/static/angular-components/docs/api-docs-examples.json
index 9bbbc974a3..b6a8a42d3e 100644
--- a/grr/server/grr_response_server/gui/static/angular-components/docs/api-docs-examples.json
+++ b/grr/server/grr_response_server/gui/static/angular-components/docs/api-docs-examples.json
@@ -68,7 +68,7 @@
},
"type": {
"type": "unicode",
- "value": "RDFString"
+ "value": "DummyFlowResult"
}
}
}
@@ -79,7 +79,7 @@
"items": [
{
"count": 1,
- "type": "RDFString"
+ "type": "DummyFlowResult"
}
]
},
@@ -304,7 +304,6 @@
]
}
},
- "labels": [],
"last_seen_at": {
"type": "RDFDatetime",
"value": 42000000
@@ -432,7 +431,6 @@
}
]
},
- "labels": [],
"last_seen_at": 42000000,
"os_info": {
"fqdn": "Host-0.example.com",
@@ -1718,7 +1716,6 @@
"value": "api_test_user"
}
],
- "email_cc_addresses": [],
"email_message_id": {
"type": "unicode",
"value": ""
@@ -1902,7 +1899,6 @@
]
}
},
- "labels": [],
"last_seen_at": {
"type": "RDFDatetime",
"value": 42000000
@@ -1965,7 +1961,6 @@
"approvers": [
"api_test_user"
],
- "email_cc_addresses": [],
"email_message_id": "",
"expiration_time_us": 2419244000000,
"id": "approval:111111",
@@ -2027,7 +2022,6 @@
}
]
},
- "labels": [],
"last_seen_at": 42000000,
"os_info": {
"fqdn": "Host-0.example.com",
@@ -2065,7 +2059,6 @@
"value": "approver"
}
],
- "email_cc_addresses": [],
"email_message_id": {
"type": "unicode",
"value": ""
@@ -2245,7 +2238,6 @@
]
}
},
- "labels": [],
"last_seen_at": {
"type": "RDFDatetime",
"value": 42000000
@@ -2309,7 +2301,6 @@
"api_test_user",
"approver"
],
- "email_cc_addresses": [],
"email_message_id": "",
"expiration_time_us": 2419245000000,
"id": "approval:222222",
@@ -2370,7 +2361,6 @@
}
]
},
- "labels": [],
"last_seen_at": 42000000,
"os_info": {
"fqdn": "Host-1.example.com",
@@ -3815,6 +3805,241 @@
"url": "/api/clients/C.1000000000000000/versions?end=46000000&mode=full&start=44000000"
}
],
+ "ApiGetConfigOptionHandler": [
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "is_invalid": false,
+ "name": "Client.company_name",
+ "type": "RDFString",
+ "value": {
+ "type": "RDFString",
+ "value": "Monstros S.A."
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v1",
+ "type_stripped_response": "Monstros S.A.",
+ "url": "/api/config/Client.company_name"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "is_invalid": false,
+ "name": "AdminUI.hunt_config",
+ "type": "AdminUIHuntConfig",
+ "value": {
+ "type": "AdminUIHuntConfig",
+ "value": {
+ "default_exclude_labels": [
+ {
+ "type": "unicode",
+ "value": "oh-oh"
+ }
+ ]
+ }
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v1",
+ "type_stripped_response": {
+ "default_exclude_labels": [
+ "oh-oh"
+ ]
+ },
+ "url": "/api/config/AdminUI.hunt_config"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "is_invalid": false,
+ "name": "Source.version_major",
+ "type": "RDFInteger",
+ "value": {
+ "type": "RDFInteger",
+ "value": 42
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v1",
+ "type_stripped_response": 42,
+ "url": "/api/config/Source.version_major"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "is_invalid": false,
+ "name": "Source.version_major",
+ "type": "RDFInteger",
+ "value": {
+ "type": "RDFInteger",
+ "value": 42
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v1",
+ "type_stripped_response": 42,
+ "url": "/api/config/Source.version_major"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "is_invalid": false,
+ "name": "Source.version_major",
+ "type": "RDFInteger",
+ "value": {
+ "type": "RDFInteger",
+ "value": 42
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v1",
+ "type_stripped_response": 42,
+ "url": "/api/config/Source.version_major"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "is_invalid": false,
+ "name": "Source.version_major",
+ "type": "RDFInteger",
+ "value": {
+ "type": "RDFInteger",
+ "value": 42
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v1",
+ "type_stripped_response": 42,
+ "url": "/api/config/Source.version_major"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "is_invalid": false,
+ "name": "Hunt.default_client_rate",
+ "type": "RDFInteger",
+ "value": {
+ "type": "RDFInteger",
+ "value": 42
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v1",
+ "type_stripped_response": 42,
+ "url": "/api/config/Hunt.default_client_rate"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "is_invalid": false,
+ "name": "Email.enable_custom_email_address",
+ "type": "RDFInteger",
+ "value": {
+ "type": "RDFInteger",
+ "value": 1
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v1",
+ "type_stripped_response": 1,
+ "url": "/api/config/Email.enable_custom_email_address"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "is_invalid": true,
+ "name": "Cron.disabled_cron_jobs"
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v1",
+ "url": "/api/config/Cron.disabled_cron_jobs"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "is_invalid": false,
+ "name": "Server.fleetspeak_last_ping_threshold",
+ "type": "Duration",
+ "value": {
+ "type": "Duration",
+ "value": 3600000000
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v1",
+ "type_stripped_response": 3600000000,
+ "url": "/api/config/Server.fleetspeak_last_ping_threshold"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "is_invalid": false,
+ "name": "Server.raw_filesystem_access_pathtype",
+ "type": "EnumNamedValue",
+ "value": {
+ "type": "EnumNamedValue",
+ "value": "TSK"
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v1",
+ "type_stripped_response": "TSK",
+ "url": "/api/config/Server.raw_filesystem_access_pathtype"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "is_invalid": false,
+ "name": "ClientBuilder.build_type",
+ "type": "RDFString",
+ "value": {
+ "type": "RDFString",
+ "value": "Debug"
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v1",
+ "type_stripped_response": "Debug",
+ "url": "/api/config/ClientBuilder.build_type"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "is_invalid": true,
+ "name": "ClientBuilder.target_platforms"
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v1",
+ "url": "/api/config/ClientBuilder.target_platforms"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "is_invalid": false,
+ "name": "ClientRepacker.output_filename",
+ "type": "RDFString",
+ "value": {
+ "type": "RDFString",
+ "value": "GRR_0.0.0.0_"
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v1",
+ "type_stripped_response": "GRR_0.0.0.0_",
+ "url": "/api/config/ClientRepacker.output_filename"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "is_redacted": true,
+ "name": "Mysql.password"
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v1",
+ "url": "/api/config/Mysql.password"
+ }
+ ],
"ApiGetCronJobApprovalHandler": [
{
"api_method": "GetCronJobApproval",
@@ -3828,7 +4053,6 @@
"value": "api_test_user"
}
],
- "email_cc_addresses": [],
"email_message_id": {
"type": "unicode",
"value": ""
@@ -3942,7 +4166,6 @@
"approvers": [
"api_test_user"
],
- "email_cc_addresses": [],
"email_message_id": "",
"id": "approval:111111",
"is_valid": false,
@@ -3993,7 +4216,6 @@
"value": "approver"
}
],
- "email_cc_addresses": [],
"email_message_id": {
"type": "unicode",
"value": ""
@@ -4104,7 +4326,6 @@
"api_test_user",
"approver"
],
- "email_cc_addresses": [],
"email_message_id": "",
"id": "approval:222222",
"is_valid": true,
@@ -4893,7 +5114,6 @@
"value": "api_test_user"
}
],
- "email_cc_addresses": [],
"email_message_id": {
"type": "unicode",
"value": ""
@@ -5176,7 +5396,6 @@
"approvers": [
"api_test_user"
],
- "email_cc_addresses": [],
"email_message_id": "",
"expiration_time_us": 2419244000000,
"id": "approval:111111",
@@ -5279,7 +5498,6 @@
"value": "approver"
}
],
- "email_cc_addresses": [],
"email_message_id": {
"type": "unicode",
"value": ""
@@ -5559,7 +5777,6 @@
"api_test_user",
"approver"
],
- "email_cc_addresses": [],
"email_message_id": "",
"expiration_time_us": 2419245000000,
"id": "approval:222222",
@@ -5898,7 +6115,6 @@
}
}
},
- "email_cc_addresses": [],
"email_message_id": {
"type": "unicode",
"value": ""
@@ -6286,7 +6502,6 @@
"total_net_usage": 0,
"urn": "aff4:/hunts/H:556677"
},
- "email_cc_addresses": [],
"email_message_id": "",
"expiration_time_us": 2419244000000,
"id": "approval:333333",
@@ -6454,7 +6669,6 @@
}
}
},
- "email_cc_addresses": [],
"email_message_id": {
"type": "unicode",
"value": ""
@@ -6793,7 +7007,6 @@
"state": "RUNNING",
"urn": "aff4:/C.1000000000000000/flows/F:112233"
},
- "email_cc_addresses": [],
"email_message_id": "",
"expiration_time_us": 2419244000000,
"id": "approval:444444",
@@ -10242,7 +10455,6 @@
"value": "requestor"
}
],
- "email_cc_addresses": [],
"email_message_id": {
"type": "unicode",
"value": ""
@@ -10422,7 +10634,6 @@
]
}
},
- "labels": [],
"last_seen_at": {
"type": "RDFDatetime",
"value": 42000000
@@ -10486,7 +10697,6 @@
"api_test_user",
"requestor"
],
- "email_cc_addresses": [],
"email_message_id": "",
"expiration_time_us": 2419244000000,
"id": "approval:111111",
@@ -10547,7 +10757,6 @@
}
]
},
- "labels": [],
"last_seen_at": 42000000,
"os_info": {
"fqdn": "Host-0.example.com",
@@ -10587,7 +10796,6 @@
"value": "requestor"
}
],
- "email_cc_addresses": [],
"email_message_id": {
"type": "unicode",
"value": ""
@@ -10698,7 +10906,6 @@
"api_test_user",
"requestor"
],
- "email_cc_addresses": [],
"email_message_id": "",
"id": "approval:111111",
"is_valid": true,
@@ -10750,7 +10957,6 @@
"value": "requestor"
}
],
- "email_cc_addresses": [],
"email_message_id": {
"type": "unicode",
"value": ""
@@ -11030,7 +11236,6 @@
"api_test_user",
"requestor"
],
- "email_cc_addresses": [],
"email_message_id": "",
"expiration_time_us": 2419244000000,
"id": "approval:111111",
@@ -11392,9 +11597,7 @@
{
"api_method": "ListApproverSuggestions",
"method": "GET",
- "response": {
- "suggestions": []
- },
+ "response": {},
"test_class": "ApiListApproverSuggestionsHandlerRegressionTest_http_v1",
"url": "/api/users/approver-suggestions?username_query=foo"
},
@@ -11566,7 +11769,6 @@
"value": "approver"
}
],
- "email_cc_addresses": [],
"email_message_id": {
"type": "unicode",
"value": ""
@@ -11746,7 +11948,6 @@
]
}
},
- "labels": [],
"last_seen_at": {
"type": "RDFDatetime",
"value": 42000000
@@ -11813,7 +12014,6 @@
"value": "api_test_user"
}
],
- "email_cc_addresses": [],
"email_message_id": {
"type": "unicode",
"value": ""
@@ -11997,7 +12197,6 @@
]
}
},
- "labels": [],
"last_seen_at": {
"type": "RDFDatetime",
"value": 42000000
@@ -12065,7 +12264,6 @@
"api_test_user",
"approver"
],
- "email_cc_addresses": [],
"email_message_id": "",
"expiration_time_us": 2419245000000,
"id": "approval:222222",
@@ -12126,7 +12324,6 @@
}
]
},
- "labels": [],
"last_seen_at": 42000000,
"os_info": {
"fqdn": "Host-1.example.com",
@@ -12150,7 +12347,6 @@
"approvers": [
"api_test_user"
],
- "email_cc_addresses": [],
"email_message_id": "",
"expiration_time_us": 2419244000000,
"id": "approval:111111",
@@ -12212,7 +12408,6 @@
}
]
},
- "labels": [],
"last_seen_at": 42000000,
"os_info": {
"fqdn": "Host-0.example.com",
@@ -12250,7 +12445,6 @@
"value": "api_test_user"
}
],
- "email_cc_addresses": [],
"email_message_id": {
"type": "unicode",
"value": ""
@@ -12434,7 +12628,6 @@
]
}
},
- "labels": [],
"last_seen_at": {
"type": "RDFDatetime",
"value": 42000000
@@ -12501,7 +12694,6 @@
"approvers": [
"api_test_user"
],
- "email_cc_addresses": [],
"email_message_id": "",
"expiration_time_us": 2419244000000,
"id": "approval:111111",
@@ -12563,7 +12755,6 @@
}
]
},
- "labels": [],
"last_seen_at": 42000000,
"os_info": {
"fqdn": "Host-0.example.com",
@@ -14011,10 +14202,6 @@
}
}
},
- "payload_type": {
- "type": "unicode",
- "value": "FileFinderResult"
- },
"timestamp": {
"type": "RDFDatetime",
"value": 42000000
@@ -14045,7 +14232,6 @@
"st_uid": 139592
}
},
- "payload_type": "FileFinderResult",
"timestamp": 42000000
}
]
@@ -14055,9 +14241,7 @@
{
"api_method": "ListFlowResults",
"method": "GET",
- "response": {
- "items": []
- },
+ "response": {},
"test_class": "ApiListFlowResultsHandlerRegressionTest_http_v1",
"url": "/api/clients/C.1000000000000000/flows/W:ABCDEF/results?filter=benign"
}
@@ -14800,7 +14984,6 @@
"value": "api_test_user"
}
],
- "email_cc_addresses": [],
"email_message_id": {
"type": "unicode",
"value": ""
@@ -15087,7 +15270,6 @@
"approvers": [
"api_test_user"
],
- "email_cc_addresses": [],
"email_message_id": "",
"expiration_time_us": 2419243000000,
"id": "approval:112233",
@@ -16143,12 +16325,13 @@
"value": "C.1000000000000000"
},
"payload": {
- "type": "RDFString",
- "value": "blah1"
- },
- "payload_type": {
- "type": "unicode",
- "value": "RDFString"
+ "type": "DummyFlowResult",
+ "value": {
+ "flow_output": {
+ "type": "unicode",
+ "value": "blah1"
+ }
+ }
},
"timestamp": {
"type": "RDFDatetime",
@@ -16164,12 +16347,13 @@
"value": "C.1000000000000000"
},
"payload": {
- "type": "RDFString",
- "value": "blah2-foo"
- },
- "payload_type": {
- "type": "unicode",
- "value": "RDFString"
+ "type": "DummyFlowResult",
+ "value": {
+ "flow_output": {
+ "type": "unicode",
+ "value": "blah2-foo"
+ }
+ }
},
"timestamp": {
"type": "RDFDatetime",
@@ -16185,14 +16369,16 @@
"items": [
{
"client_id": "C.1000000000000000",
- "payload": "blah1",
- "payload_type": "RDFString",
+ "payload": {
+ "flow_output": "blah1"
+ },
"timestamp": 2000000
},
{
"client_id": "C.1000000000000000",
- "payload": "blah2-foo",
- "payload_type": "RDFString",
+ "payload": {
+ "flow_output": "blah2-foo"
+ },
"timestamp": 43000000
}
],
@@ -16213,12 +16399,13 @@
"value": "C.1000000000000000"
},
"payload": {
- "type": "RDFString",
- "value": "blah1"
- },
- "payload_type": {
- "type": "unicode",
- "value": "RDFString"
+ "type": "DummyFlowResult",
+ "value": {
+ "flow_output": {
+ "type": "unicode",
+ "value": "blah1"
+ }
+ }
},
"timestamp": {
"type": "RDFDatetime",
@@ -16234,8 +16421,9 @@
"items": [
{
"client_id": "C.1000000000000000",
- "payload": "blah1",
- "payload_type": "RDFString",
+ "payload": {
+ "flow_output": "blah1"
+ },
"timestamp": 2000000
}
],
@@ -16256,12 +16444,13 @@
"value": "C.1000000000000000"
},
"payload": {
- "type": "RDFString",
- "value": "blah2-foo"
- },
- "payload_type": {
- "type": "unicode",
- "value": "RDFString"
+ "type": "DummyFlowResult",
+ "value": {
+ "flow_output": {
+ "type": "unicode",
+ "value": "blah2-foo"
+ }
+ }
},
"timestamp": {
"type": "RDFDatetime",
@@ -16277,8 +16466,9 @@
"items": [
{
"client_id": "C.1000000000000000",
- "payload": "blah2-foo",
- "payload_type": "RDFString",
+ "payload": {
+ "flow_output": "blah2-foo"
+ },
"timestamp": 43000000
}
],
@@ -16299,12 +16489,13 @@
"value": "C.1000000000000000"
},
"payload": {
- "type": "RDFString",
- "value": "blah2-foo"
- },
- "payload_type": {
- "type": "unicode",
- "value": "RDFString"
+ "type": "DummyFlowResult",
+ "value": {
+ "flow_output": {
+ "type": "unicode",
+ "value": "blah2-foo"
+ }
+ }
},
"timestamp": {
"type": "RDFDatetime",
@@ -16320,8 +16511,9 @@
"items": [
{
"client_id": "C.1000000000000000",
- "payload": "blah2-foo",
- "payload_type": "RDFString",
+ "payload": {
+ "flow_output": "blah2-foo"
+ },
"timestamp": 43000000
}
],
@@ -17753,9 +17945,7 @@
{
"api_method": "ListPendingUserNotifications",
"method": "GET",
- "response": {
- "items": []
- },
+ "response": {},
"test_class": "ApiListPendingUserNotificationsHandlerRegressionTest_http_v1",
"url": "/api/users/me/notifications/pending?timestamp=44000000"
}
diff --git a/grr/server/grr_response_server/gui/static/angular-components/docs/api-v2-docs-examples.json b/grr/server/grr_response_server/gui/static/angular-components/docs/api-v2-docs-examples.json
index ef5dc1186b..33d88eb15f 100644
--- a/grr/server/grr_response_server/gui/static/angular-components/docs/api-v2-docs-examples.json
+++ b/grr/server/grr_response_server/gui/static/angular-components/docs/api-v2-docs-examples.json
@@ -34,7 +34,7 @@
"items": [
{
"count": "1",
- "type": "RDFString"
+ "type": "DummyFlowResult"
}
]
},
@@ -1071,6 +1071,220 @@
"url": "/api/v2/clients/C.1000000000000000/versions?end=46000000&mode=full&start=44000000"
}
],
+ "ApiGetConfigOptionHandler": [
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "isInvalid": false,
+ "name": "Client.company_name",
+ "type": "RDFString",
+ "value": {
+ "@type": "type.googleapis.com/google.protobuf.StringValue",
+ "value": "Monstros S.A."
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v2",
+ "url": "/api/v2/config/Client.company_name"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "isInvalid": false,
+ "name": "AdminUI.hunt_config",
+ "type": "AdminUIHuntConfig",
+ "value": {
+ "@type": "type.googleapis.com/grr.AdminUIHuntConfig",
+ "defaultExcludeLabels": [
+ "oh-oh"
+ ]
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v2",
+ "url": "/api/v2/config/AdminUI.hunt_config"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "isInvalid": false,
+ "name": "Source.version_major",
+ "type": "RDFInteger",
+ "value": {
+ "@type": "type.googleapis.com/google.protobuf.Int64Value",
+ "value": "42"
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v2",
+ "url": "/api/v2/config/Source.version_major"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "isInvalid": false,
+ "name": "Source.version_major",
+ "type": "RDFInteger",
+ "value": {
+ "@type": "type.googleapis.com/google.protobuf.Int64Value",
+ "value": "42"
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v2",
+ "url": "/api/v2/config/Source.version_major"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "isInvalid": false,
+ "name": "Source.version_major",
+ "type": "RDFInteger",
+ "value": {
+ "@type": "type.googleapis.com/google.protobuf.Int64Value",
+ "value": "42"
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v2",
+ "url": "/api/v2/config/Source.version_major"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "isInvalid": false,
+ "name": "Source.version_major",
+ "type": "RDFInteger",
+ "value": {
+ "@type": "type.googleapis.com/google.protobuf.Int64Value",
+ "value": "42"
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v2",
+ "url": "/api/v2/config/Source.version_major"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "isInvalid": false,
+ "name": "Hunt.default_client_rate",
+ "type": "RDFInteger",
+ "value": {
+ "@type": "type.googleapis.com/google.protobuf.Int64Value",
+ "value": "42"
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v2",
+ "url": "/api/v2/config/Hunt.default_client_rate"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "isInvalid": false,
+ "name": "Email.enable_custom_email_address",
+ "type": "RDFInteger",
+ "value": {
+ "@type": "type.googleapis.com/google.protobuf.Int64Value",
+ "value": "1"
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v2",
+ "url": "/api/v2/config/Email.enable_custom_email_address"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "isInvalid": true,
+ "name": "Cron.disabled_cron_jobs"
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v2",
+ "url": "/api/v2/config/Cron.disabled_cron_jobs"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "isInvalid": false,
+ "name": "Server.fleetspeak_last_ping_threshold",
+ "type": "Duration",
+ "value": {
+ "@type": "type.googleapis.com/google.protobuf.UInt64Value",
+ "value": "3600000000"
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v2",
+ "url": "/api/v2/config/Server.fleetspeak_last_ping_threshold"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "isInvalid": false,
+ "name": "Server.raw_filesystem_access_pathtype",
+ "type": "EnumNamedValue",
+ "value": {
+ "@type": "type.googleapis.com/google.protobuf.Int64Value",
+ "value": "1"
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v2",
+ "url": "/api/v2/config/Server.raw_filesystem_access_pathtype"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "isInvalid": false,
+ "name": "ClientBuilder.build_type",
+ "type": "RDFString",
+ "value": {
+ "@type": "type.googleapis.com/google.protobuf.StringValue",
+ "value": "Debug"
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v2",
+ "url": "/api/v2/config/ClientBuilder.build_type"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "isInvalid": true,
+ "name": "ClientBuilder.target_platforms"
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v2",
+ "url": "/api/v2/config/ClientBuilder.target_platforms"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "isInvalid": false,
+ "name": "ClientRepacker.output_filename",
+ "type": "RDFString",
+ "value": {
+ "@type": "type.googleapis.com/google.protobuf.StringValue",
+ "value": "GRR_0.0.0.0_"
+ }
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v2",
+ "url": "/api/v2/config/ClientRepacker.output_filename"
+ },
+ {
+ "api_method": "GetConfigOption",
+ "method": "GET",
+ "response": {
+ "isRedacted": true,
+ "name": "Mysql.password"
+ },
+ "test_class": "ApiGetConfigOptionHandlerRegressionTest_http_v2",
+ "url": "/api/v2/config/Mysql.password"
+ }
+ ],
"ApiGetCronJobApprovalHandler": [
{
"api_method": "GetCronJobApproval",
@@ -4302,7 +4516,6 @@
"stUid": 139592
}
},
- "payloadType": "FileFinderResult",
"timestamp": "42000000"
}
]
@@ -4907,19 +5120,17 @@
{
"clientId": "C.1000000000000000",
"payload": {
- "@type": "type.googleapis.com/google.protobuf.StringValue",
- "value": "blah1"
+ "@type": "type.googleapis.com/grr.DummyFlowResult",
+ "flowOutput": "blah1"
},
- "payloadType": "RDFString",
"timestamp": "2000000"
},
{
"clientId": "C.1000000000000000",
"payload": {
- "@type": "type.googleapis.com/google.protobuf.StringValue",
- "value": "blah2-foo"
+ "@type": "type.googleapis.com/grr.DummyFlowResult",
+ "flowOutput": "blah2-foo"
},
- "payloadType": "RDFString",
"timestamp": "43000000"
}
],
@@ -4936,10 +5147,9 @@
{
"clientId": "C.1000000000000000",
"payload": {
- "@type": "type.googleapis.com/google.protobuf.StringValue",
- "value": "blah1"
+ "@type": "type.googleapis.com/grr.DummyFlowResult",
+ "flowOutput": "blah1"
},
- "payloadType": "RDFString",
"timestamp": "2000000"
}
],
@@ -4956,10 +5166,9 @@
{
"clientId": "C.1000000000000000",
"payload": {
- "@type": "type.googleapis.com/google.protobuf.StringValue",
- "value": "blah2-foo"
+ "@type": "type.googleapis.com/grr.DummyFlowResult",
+ "flowOutput": "blah2-foo"
},
- "payloadType": "RDFString",
"timestamp": "43000000"
}
],
@@ -4976,10 +5185,9 @@
{
"clientId": "C.1000000000000000",
"payload": {
- "@type": "type.googleapis.com/google.protobuf.StringValue",
- "value": "blah2-foo"
+ "@type": "type.googleapis.com/grr.DummyFlowResult",
+ "flowOutput": "blah2-foo"
},
- "payloadType": "RDFString",
"timestamp": "43000000"
}
],
diff --git a/grr/server/grr_response_server/gui/static/angular-components/user/user-dashboard-directive.js b/grr/server/grr_response_server/gui/static/angular-components/user/user-dashboard-directive.js
index 2a9245c5f3..65cbe5a8a2 100644
--- a/grr/server/grr_response_server/gui/static/angular-components/user/user-dashboard-directive.js
+++ b/grr/server/grr_response_server/gui/static/angular-components/user/user-dashboard-directive.js
@@ -92,9 +92,13 @@ const UserDashboardController = class {
* @private
*/
onClientApprovals_(response) {
- this.clientApprovals =
- filterOutDuplicateApprovals(response['data']['items'])
- .slice(0, MAX_SHOWN_CLIENTS);
+ if ('items' in response['data']) {
+ this.clientApprovals =
+ filterOutDuplicateApprovals(response['data']['items'])
+ .slice(0, MAX_SHOWN_CLIENTS);
+ } else {
+ this.clientApprovals = [];
+ }
}
/**
diff --git a/grr/server/grr_response_server/gui/ui/components/client_overview/client_overview.ts b/grr/server/grr_response_server/gui/ui/components/client_overview/client_overview.ts
index b6f6fc3cd5..829dfb64e0 100644
--- a/grr/server/grr_response_server/gui/ui/components/client_overview/client_overview.ts
+++ b/grr/server/grr_response_server/gui/ui/components/client_overview/client_overview.ts
@@ -1,4 +1,3 @@
-// g3-format-v_3_1
import {
ChangeDetectionStrategy,
Component,
@@ -73,14 +72,13 @@ export class ClientOverview implements OnInit, OnDestroy {
readonly activeOnlineNotificationArgs$ =
this.clientPageGlobalStore.flowListEntries$.pipe(
withLatestFrom(this.userGlobalStore.currentUser$),
- map(
- ([data, user]) =>
- data.flows?.find(
- (f) =>
- f.name === 'OnlineNotification' &&
- f.creator === user.name &&
- f.state === FlowState.RUNNING,
- ),
+ map(([data, user]) =>
+ data.flows?.find(
+ (f) =>
+ f.name === 'OnlineNotification' &&
+ f.creator === user.name &&
+ f.state === FlowState.RUNNING,
+ ),
),
map((flow) => flow?.args as OnlineNotificationArgs | undefined),
);
diff --git a/grr/server/grr_response_server/gui/ui/components/flow_args_form/artifact_collector_flow_form.ts b/grr/server/grr_response_server/gui/ui/components/flow_args_form/artifact_collector_flow_form.ts
index a922448149..82d0d40656 100644
--- a/grr/server/grr_response_server/gui/ui/components/flow_args_form/artifact_collector_flow_form.ts
+++ b/grr/server/grr_response_server/gui/ui/components/flow_args_form/artifact_collector_flow_form.ts
@@ -27,7 +27,6 @@ const READABLE_SOURCE_NAME: {[key in SourceType]?: string} = {
[SourceType.ARTIFACT]: 'Collects artifact',
[SourceType.COMMAND]: 'Executes command',
[SourceType.FILE]: 'Collects file',
- [SourceType.GREP]: 'Greps',
[SourceType.GRR_CLIENT_ACTION]: 'Executes client action',
[SourceType.LIST_FILES]: 'Lists files in',
[SourceType.PATH]: 'Collects path',
@@ -78,7 +77,6 @@ function getReadableSources(source: ArtifactSource): readonly string[] {
return [source.cmdline];
case SourceType.FILE:
- case SourceType.GREP:
case SourceType.PATH:
return source.paths;
@@ -233,7 +231,6 @@ export class ArtifactCollectorFlowForm extends FlowArgumentForm<
override convertFormStateToFlowArgs(formState: ControlValues) {
return {
artifactList: formState.artifactName ? [formState.artifactName] : [],
- applyParsers: false,
};
}
diff --git a/grr/server/grr_response_server/gui/ui/components/flow_details/flow_details.ng.html b/grr/server/grr_response_server/gui/ui/components/flow_details/flow_details.ng.html
index b80878da47..1725b4ac14 100644
--- a/grr/server/grr_response_server/gui/ui/components/flow_details/flow_details.ng.html
+++ b/grr/server/grr_response_server/gui/ui/components/flow_details/flow_details.ng.html
@@ -2,12 +2,17 @@