Files
hugo-cwpearson/content/publication/2014chen.md
2019-01-08 16:56:56 -06:00

3.5 KiB

+++ title = "Adaptive Cache Bypass and Insertion for Many-Core Accelerators" date = 2014-01-01 draft = false

Authors. Comma separated list, e.g. ["Bob Smith", "David Jones"].

authors = ["Xuhao Chen", "Shengzhao Wu", "Li-Wen Chang", "Wei-Sheng Huang", "Carl Pearson", "Wen-mei Hwu"]

Publication type.

Legend:

0 = Uncategorized

1 = Conference paper

2 = Journal article

3 = Manuscript

4 = Report

5 = Book

6 = Book section

publication_types = ["1"]

Publication name and optional abbreviated version.

publication = "Proceedings of International Workshop on Manycore Embedded Systems." publication_short = ""

Abstract and optional shortened version.

abstract = "Many-core accelerators, e.g. GPUs, are widely used for accelerating general-purpose compute kernels. With the SIMT execution model, GPUs can hide memory latency through massive multithreading for many regular applications. To support more applications with irregular memory access pattern, cache hierarchy is introduced to GPU architecture to capture input data sharing and mitigate the effect of irregular accesses. However, GPU caches suffer from poor efficiency due to severe contention, which makes it difficult to adopt heuristic management policies, and also limits system performance and energy-efficiency. We propose an adaptive cache management policy specifically for many-core accelerators. The tag array of L2 cache is enhanced with extra bits to track memory access history, an thus the locality information is captured and provided to L1 cache as heuristics to guide its run-time bypass and insertion decisions. By preventing un-reused data from polluting the cache and alleviating contention, cache efficiency is significantly improved. As a result, the system performance is improved by 31% on average for cache sensitive benchmarks, compared to the baseline GPU architecture." abstract_short = ""

Is this a selected publication? (true/false)

selected = false

Projects (optional).

Associate this publication with one or more of your projects.

Simply enter your project's folder or file name without extension.

E.g. projects = ["deep-learning"] references

content/project/deep-learning/index.md.

Otherwise, set projects = [].

projects = []

Slides (optional).

Associate this publication with Markdown slides.

Simply enter your slide deck's filename without extension.

E.g. slides = "example-slides" references

content/slides/example-slides.md.

Otherwise, set slides = "".

slides = ""

Tags (optional).

Set tags = [] for no tags, or use the form tags = ["A Tag", "Another Tag"] for one or more tags.

tags = []

Does this page require source code highlighting? (true/false)

highlight = true

Featured image thumbnail (optional)

image_preview = ""

Links (optional)

url_pdf = "pdf/2014chen.pdf" url_preprint = "" url_code = "" url_dataset = "" url_project = "" url_slides = "" url_video = "" url_poster = "" url_source = ""

Custom links (optional).

Uncomment line below to enable. For multiple links, use the form [{...}, {...}, {...}].

url_custom = [{name = "Custom Link", url = "http://example.org"}]

Digital Object Identifier (DOI)

doi = ""

Does this page contain LaTeX math? (true/false)

math = false

Featured image

To use, add an image named featured.jpg/png to your page's folder.

[image]

Caption (optional)

caption = ""

Focal point (optional)

Options: Smart, Center, TopLeft, Top, TopRight, Left, Right, BottomLeft, Bottom, BottomRight

focal_point = ""

+++