Skip to content

Commit b17cf0b

Browse files
committed
Fix hstu
1 parent a6a8aed commit b17cf0b

File tree

4 files changed

+5
-4
lines changed

4 files changed

+5
-4
lines changed

.github/workflows/pr.yaml

+3
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,9 @@ jobs:
1919
steps:
2020
- name: Checkout Tritonbench
2121
uses: actions/checkout@v3
22+
with:
23+
# no need to checkout submodules recursively
24+
submodules: true
2225
- name: Tune Nvidia GPU
2326
run: |
2427
sudo nvidia-smi -pm 1

tritonbench/operators/ragged_attention/hstu.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
try:
66
# Internal Import
7-
from hammer.generative_recommenders.ops.triton.triton_ragged_hstu_attention import (
7+
from hammer.oss.generative_recommenders.ops.triton.triton_ragged_hstu_attention import (
88
_ragged_hstu_attn_fwd,
99
_ragged_hstu_attn_fwd_persistent,
1010
)

tritonbench/operators/sum/operator.py

-2
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,6 @@
44
import os
55
from typing import Callable, Generator, List, Optional, Tuple
66

7-
import matplotlib.pyplot as plt
8-
97
import torch
108
import triton
119
import triton.language as tl

tritonbench/utils/path_utils.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33

44
from pathlib import Path
55

6-
REPO_PATH = Path(os.path.abspath(__file__)).parent.parent
6+
REPO_PATH = Path(os.path.abspath(__file__)).parent.parent.parent
77
SUBMODULE_PATH = REPO_PATH.joinpath("submodules")
88

99

0 commit comments

Comments
 (0)