Skip to content

Commit c23ccef

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent 342ed53 commit c23ccef

File tree

3 files changed

+17
-12
lines changed

3 files changed

+17
-12
lines changed

delphi/scorers/classifier/detection.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from ...clients.client import Client
22
from ...latents import LatentRecord
3-
from .classifier import Classifier
43
from ...scorers.scorer import Scorer
4+
from .classifier import Classifier
55
from .prompts.detection_prompt import prompt as detection_prompt
66
from .sample import Sample, examples_to_samples
77

delphi/scorers/simulator/oai_autointerp/activations/activations.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ class ActivationRecord(Serializable):
1313

1414
tokens: list[str]
1515
"""Tokens in the text sequence, represented as strings."""
16-
16+
1717
activations: list[int | float]
1818
"""Raw activation values for the neuron on each token in the text sequence."""
1919

delphi/scorers/simulator/oai_simulator.py

Lines changed: 15 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
1-
import torch
21
from typing import Union
32

4-
from delphi.latents.latents import NonActivatingExample, ActivatingExample
3+
import torch
4+
5+
from delphi.latents.latents import ActivatingExample, NonActivatingExample
56

67
from ..scorer import Scorer, ScorerResult
78
from .oai_autointerp import (
@@ -56,24 +57,28 @@ async def __call__(self, record): # type: ignore
5657
score=result,
5758
)
5859

59-
def to_activation_records(self, examples: list[Union[ActivatingExample, NonActivatingExample]]) -> list[ActivationRecord]:
60+
def to_activation_records(
61+
self, examples: list[Union[ActivatingExample, NonActivatingExample]]
62+
) -> list[ActivationRecord]:
6063
# Filter Nones
6164
result = []
6265
for example in examples:
63-
if example is None:
66+
if example is None:
6467
continue
65-
68+
6669
if example.normalized_activations is None:
6770
# Use zeros for non-activating examples
68-
example.normalized_activations = torch.zeros_like(
69-
example.activations
70-
)
71-
71+
example.normalized_activations = torch.zeros_like(example.activations)
72+
7273
result.append(
7374
ActivationRecord(
7475
self.tokenizer.batch_decode(example.tokens),
7576
example.normalized_activations.half(),
76-
quantile=example.quantile if isinstance(example, ActivatingExample) else None,
77+
quantile=(
78+
example.quantile
79+
if isinstance(example, ActivatingExample)
80+
else None
81+
),
7782
)
7883
)
7984

0 commit comments

Comments
 (0)