diff options
| author | Dave Airlie <airlied@redhat.com> | 2025-08-19 06:58:56 +1000 |
|---|---|---|
| committer | Dave Airlie <airlied@redhat.com> | 2025-08-19 07:02:30 +1000 |
| commit | dd489c01c3971778c417630f328460021fc8fc61 (patch) | |
| tree | fd59dc9db7200d8aefc1a6eb9ca0093171681581 /drivers/accel | |
| parent | c17b750b3ad9f45f2b6f7e6f7f4679844244f0b9 (diff) | |
| parent | 0070851095d2954567510777976e40275f81ca1b (diff) | |
| download | tip-dd489c01c3971778c417630f328460021fc8fc61.tar.gz | |
Merge tag 'drm-misc-next-2025-08-14' of https://gitlab.freedesktop.org/drm/misc/kernel into drm-next
drm-misc-next for v6.18:
UAPI Changes:
- Add DRM_IOCTL_GEM_CHANGE_HANDLE for reassigning GEM handles
- Document DRM_MODE_PAGE_FLIP_EVENT
Cross-subsystem Changes:
fbcon:
- Add missing declarations in fbcon.h
Core Changes:
bridge:
- Fix ref counting
panel:
- Replace and remove mipi_dsi_generic_write_{seq/_chatty}()
sched:
- Fixes
Rust:
- Drop Opaque<> from ioctl arguments
Driver Changes:
amdxdma:
- Support buffers allocated by user space
- Streamline PM interfaces
- Fixes
bridge:
- cdns-dsi: Various improvements to mode setting
- Support Solomon SSD2825 plus DT bindings
- Support Waveshare DSI2DPI plus DT bindings
gud:
- Fixes
ivpu:
- Fixes
nouveau:
- Use GSP firmware by default
- Fixes
panel:
- panel-edp: Support mt8189 Chromebooks; Support BOE NV140WUM-N64;
Support SHP LQ134Z1; Fixes
- panel-simple: Support Olimex LCD-OLinuXino-5CTS plus DT bindings
- Support Samsung AMS561RA01
- Support Hydis HV101HD1 plus DT bindings
panthor:
- Print task/pid on errors
- Fixes
renesas:
- convert to RUNTIME_PM_OPS
repaper:
- Use shadow-plane helpers
rocket:
- Add driver for Rockchip NPU plus DT bindings
sharp-memory:
- Use shadow-plane helpers
simpledrm:
- Use of_reserved_mem_region_to_resource() helper
tidss:
- Use crtc_ fields for programming display mode
- Remove other drivers from aperture
v3d:
- Support querying nubmer of GPU resets for KHR_robustness
vmwgfx:
- Fixes
Signed-off-by: Dave Airlie <airlied@redhat.com>
From: Thomas Zimmermann <tzimmermann@suse.de>
Link: https://lore.kernel.org/r/20250814072454.GA18104@linux.fritz.box
Diffstat (limited to 'drivers/accel')
27 files changed, 6370 insertions, 166 deletions
diff --git a/drivers/accel/Kconfig b/drivers/accel/Kconfig index 5b9490367a39fd..bb01cebc42bf16 100644 --- a/drivers/accel/Kconfig +++ b/drivers/accel/Kconfig @@ -28,5 +28,6 @@ source "drivers/accel/amdxdna/Kconfig" source "drivers/accel/habanalabs/Kconfig" source "drivers/accel/ivpu/Kconfig" source "drivers/accel/qaic/Kconfig" +source "drivers/accel/rocket/Kconfig" endif diff --git a/drivers/accel/Makefile b/drivers/accel/Makefile index a301fb6089d4c5..ffc3fa58866616 100644 --- a/drivers/accel/Makefile +++ b/drivers/accel/Makefile @@ -4,3 +4,4 @@ obj-$(CONFIG_DRM_ACCEL_AMDXDNA) += amdxdna/ obj-$(CONFIG_DRM_ACCEL_HABANALABS) += habanalabs/ obj-$(CONFIG_DRM_ACCEL_IVPU) += ivpu/ obj-$(CONFIG_DRM_ACCEL_QAIC) += qaic/ +obj-$(CONFIG_DRM_ACCEL_ROCKET) += rocket/
\ No newline at end of file diff --git a/drivers/accel/amdxdna/Makefile b/drivers/accel/amdxdna/Makefile index 0e9adf6890a015..6797dac65efaab 100644 --- a/drivers/accel/amdxdna/Makefile +++ b/drivers/accel/amdxdna/Makefile @@ -15,6 +15,7 @@ amdxdna-y := \ amdxdna_mailbox_helper.o \ amdxdna_pci_drv.o \ amdxdna_sysfs.o \ + amdxdna_ubuf.o \ npu1_regs.o \ npu2_regs.o \ npu4_regs.o \ diff --git a/drivers/accel/amdxdna/aie2_ctx.c b/drivers/accel/amdxdna/aie2_ctx.c index 2cff5419bd2fac..910ffb7051f448 100644 --- a/drivers/accel/amdxdna/aie2_ctx.c +++ b/drivers/accel/amdxdna/aie2_ctx.c @@ -46,6 +46,17 @@ static void aie2_job_put(struct amdxdna_sched_job *job) kref_put(&job->refcnt, aie2_job_release); } +static void aie2_hwctx_status_shift_stop(struct amdxdna_hwctx *hwctx) +{ + hwctx->old_status = hwctx->status; + hwctx->status = HWCTX_STAT_STOP; +} + +static void aie2_hwctx_status_restore(struct amdxdna_hwctx *hwctx) +{ + hwctx->status = hwctx->old_status; +} + /* The bad_job is used in aie2_sched_job_timedout, otherwise, set it to NULL */ static void aie2_hwctx_stop(struct amdxdna_dev *xdna, struct amdxdna_hwctx *hwctx, struct drm_sched_job *bad_job) @@ -89,25 +100,6 @@ out: return ret; } -void aie2_restart_ctx(struct amdxdna_client *client) -{ - struct amdxdna_dev *xdna = client->xdna; - struct amdxdna_hwctx *hwctx; - unsigned long hwctx_id; - - drm_WARN_ON(&xdna->ddev, !mutex_is_locked(&xdna->dev_lock)); - mutex_lock(&client->hwctx_lock); - amdxdna_for_each_hwctx(client, hwctx_id, hwctx) { - if (hwctx->status != HWCTX_STAT_STOP) - continue; - - hwctx->status = hwctx->old_status; - XDNA_DBG(xdna, "Resetting %s", hwctx->name); - aie2_hwctx_restart(xdna, hwctx); - } - mutex_unlock(&client->hwctx_lock); -} - static struct dma_fence *aie2_cmd_get_out_fence(struct amdxdna_hwctx *hwctx, u64 seq) { struct dma_fence *fence, *out_fence = NULL; @@ -141,9 +133,11 @@ static void aie2_hwctx_wait_for_idle(struct amdxdna_hwctx *hwctx) dma_fence_put(fence); } -void aie2_hwctx_suspend(struct amdxdna_hwctx *hwctx) +void aie2_hwctx_suspend(struct amdxdna_client *client) { - struct amdxdna_dev *xdna = hwctx->client->xdna; + struct amdxdna_dev *xdna = client->xdna; + struct amdxdna_hwctx *hwctx; + unsigned long hwctx_id; /* * Command timeout is unlikely. But if it happens, it doesn't @@ -151,15 +145,19 @@ void aie2_hwctx_suspend(struct amdxdna_hwctx *hwctx) * and abort all commands. */ drm_WARN_ON(&xdna->ddev, !mutex_is_locked(&xdna->dev_lock)); - aie2_hwctx_wait_for_idle(hwctx); - aie2_hwctx_stop(xdna, hwctx, NULL); - hwctx->old_status = hwctx->status; - hwctx->status = HWCTX_STAT_STOP; + guard(mutex)(&client->hwctx_lock); + amdxdna_for_each_hwctx(client, hwctx_id, hwctx) { + aie2_hwctx_wait_for_idle(hwctx); + aie2_hwctx_stop(xdna, hwctx, NULL); + aie2_hwctx_status_shift_stop(hwctx); + } } -void aie2_hwctx_resume(struct amdxdna_hwctx *hwctx) +void aie2_hwctx_resume(struct amdxdna_client *client) { - struct amdxdna_dev *xdna = hwctx->client->xdna; + struct amdxdna_dev *xdna = client->xdna; + struct amdxdna_hwctx *hwctx; + unsigned long hwctx_id; /* * The resume path cannot guarantee that mailbox channel can be @@ -167,8 +165,11 @@ void aie2_hwctx_resume(struct amdxdna_hwctx *hwctx) * mailbox channel, error will return. */ drm_WARN_ON(&xdna->ddev, !mutex_is_locked(&xdna->dev_lock)); - hwctx->status = hwctx->old_status; - aie2_hwctx_restart(xdna, hwctx); + guard(mutex)(&client->hwctx_lock); + amdxdna_for_each_hwctx(client, hwctx_id, hwctx) { + aie2_hwctx_status_restore(hwctx); + aie2_hwctx_restart(xdna, hwctx); + } } static void diff --git a/drivers/accel/amdxdna/aie2_pci.c b/drivers/accel/amdxdna/aie2_pci.c index c6cf7068d23c03..6fc3191c3097b1 100644 --- a/drivers/accel/amdxdna/aie2_pci.c +++ b/drivers/accel/amdxdna/aie2_pci.c @@ -440,6 +440,37 @@ disable_dev: return ret; } +static int aie2_hw_suspend(struct amdxdna_dev *xdna) +{ + struct amdxdna_client *client; + + guard(mutex)(&xdna->dev_lock); + list_for_each_entry(client, &xdna->client_list, node) + aie2_hwctx_suspend(client); + + aie2_hw_stop(xdna); + + return 0; +} + +static int aie2_hw_resume(struct amdxdna_dev *xdna) +{ + struct amdxdna_client *client; + int ret; + + guard(mutex)(&xdna->dev_lock); + ret = aie2_hw_start(xdna); + if (ret) { + XDNA_ERR(xdna, "Start hardware failed, %d", ret); + return ret; + } + + list_for_each_entry(client, &xdna->client_list, node) + aie2_hwctx_resume(client); + + return ret; +} + static int aie2_init(struct amdxdna_dev *xdna) { struct pci_dev *pdev = to_pci_dev(xdna->ddev.dev); @@ -520,14 +551,14 @@ static int aie2_init(struct amdxdna_dev *xdna) if (!ndev->psp_hdl) { XDNA_ERR(xdna, "failed to create psp"); ret = -ENOMEM; - goto free_irq; + goto release_fw; } xdna->dev_handle = ndev; ret = aie2_hw_start(xdna); if (ret) { XDNA_ERR(xdna, "start npu failed, ret %d", ret); - goto free_irq; + goto release_fw; } ret = aie2_mgmt_fw_query(ndev); @@ -578,8 +609,6 @@ async_event_free: aie2_error_async_events_free(ndev); stop_hw: aie2_hw_stop(xdna); -free_irq: - pci_free_irq_vectors(pdev); release_fw: release_firmware(fw); @@ -588,12 +617,10 @@ release_fw: static void aie2_fini(struct amdxdna_dev *xdna) { - struct pci_dev *pdev = to_pci_dev(xdna->ddev.dev); struct amdxdna_dev_hdl *ndev = xdna->dev_handle; aie2_hw_stop(xdna); aie2_error_async_events_free(ndev); - pci_free_irq_vectors(pdev); } static int aie2_get_aie_status(struct amdxdna_client *client, @@ -905,8 +932,8 @@ static int aie2_set_state(struct amdxdna_client *client, const struct amdxdna_dev_ops aie2_ops = { .init = aie2_init, .fini = aie2_fini, - .resume = aie2_hw_start, - .suspend = aie2_hw_stop, + .resume = aie2_hw_resume, + .suspend = aie2_hw_suspend, .get_aie_info = aie2_get_info, .set_aie_state = aie2_set_state, .hwctx_init = aie2_hwctx_init, @@ -914,6 +941,4 @@ const struct amdxdna_dev_ops aie2_ops = { .hwctx_config = aie2_hwctx_config, .cmd_submit = aie2_cmd_submit, .hmm_invalidate = aie2_hmm_invalidate, - .hwctx_suspend = aie2_hwctx_suspend, - .hwctx_resume = aie2_hwctx_resume, }; diff --git a/drivers/accel/amdxdna/aie2_pci.h b/drivers/accel/amdxdna/aie2_pci.h index 385914840eaa61..488d8ee568eb1a 100644 --- a/drivers/accel/amdxdna/aie2_pci.h +++ b/drivers/accel/amdxdna/aie2_pci.h @@ -288,10 +288,9 @@ int aie2_sync_bo(struct amdxdna_hwctx *hwctx, struct amdxdna_sched_job *job, int aie2_hwctx_init(struct amdxdna_hwctx *hwctx); void aie2_hwctx_fini(struct amdxdna_hwctx *hwctx); int aie2_hwctx_config(struct amdxdna_hwctx *hwctx, u32 type, u64 value, void *buf, u32 size); -void aie2_hwctx_suspend(struct amdxdna_hwctx *hwctx); -void aie2_hwctx_resume(struct amdxdna_hwctx *hwctx); +void aie2_hwctx_suspend(struct amdxdna_client *client); +void aie2_hwctx_resume(struct amdxdna_client *client); int aie2_cmd_submit(struct amdxdna_hwctx *hwctx, struct amdxdna_sched_job *job, u64 *seq); void aie2_hmm_invalidate(struct amdxdna_gem_obj *abo, unsigned long cur_seq); -void aie2_restart_ctx(struct amdxdna_client *client); #endif /* _AIE2_PCI_H_ */ diff --git a/drivers/accel/amdxdna/amdxdna_ctx.c b/drivers/accel/amdxdna/amdxdna_ctx.c index be073224bd6933..b47a7f8e90170a 100644 --- a/drivers/accel/amdxdna/amdxdna_ctx.c +++ b/drivers/accel/amdxdna/amdxdna_ctx.c @@ -60,32 +60,6 @@ static struct dma_fence *amdxdna_fence_create(struct amdxdna_hwctx *hwctx) return &fence->base; } -void amdxdna_hwctx_suspend(struct amdxdna_client *client) -{ - struct amdxdna_dev *xdna = client->xdna; - struct amdxdna_hwctx *hwctx; - unsigned long hwctx_id; - - drm_WARN_ON(&xdna->ddev, !mutex_is_locked(&xdna->dev_lock)); - mutex_lock(&client->hwctx_lock); - amdxdna_for_each_hwctx(client, hwctx_id, hwctx) - xdna->dev_info->ops->hwctx_suspend(hwctx); - mutex_unlock(&client->hwctx_lock); -} - -void amdxdna_hwctx_resume(struct amdxdna_client *client) -{ - struct amdxdna_dev *xdna = client->xdna; - struct amdxdna_hwctx *hwctx; - unsigned long hwctx_id; - - drm_WARN_ON(&xdna->ddev, !mutex_is_locked(&xdna->dev_lock)); - mutex_lock(&client->hwctx_lock); - amdxdna_for_each_hwctx(client, hwctx_id, hwctx) - xdna->dev_info->ops->hwctx_resume(hwctx); - mutex_unlock(&client->hwctx_lock); -} - static void amdxdna_hwctx_destroy_rcu(struct amdxdna_hwctx *hwctx, struct srcu_struct *ss) { diff --git a/drivers/accel/amdxdna/amdxdna_ctx.h b/drivers/accel/amdxdna/amdxdna_ctx.h index f0a4a8586d858d..c652229547a3c3 100644 --- a/drivers/accel/amdxdna/amdxdna_ctx.h +++ b/drivers/accel/amdxdna/amdxdna_ctx.h @@ -147,8 +147,6 @@ static inline u32 amdxdna_hwctx_col_map(struct amdxdna_hwctx *hwctx) void amdxdna_sched_job_cleanup(struct amdxdna_sched_job *job); void amdxdna_hwctx_remove_all(struct amdxdna_client *client); -void amdxdna_hwctx_suspend(struct amdxdna_client *client); -void amdxdna_hwctx_resume(struct amdxdna_client *client); int amdxdna_cmd_submit(struct amdxdna_client *client, u32 cmd_bo_hdls, u32 *arg_bo_hdls, u32 arg_bo_cnt, diff --git a/drivers/accel/amdxdna/amdxdna_gem.c b/drivers/accel/amdxdna/amdxdna_gem.c index 0f85a010517884..d407a36eb41265 100644 --- a/drivers/accel/amdxdna/amdxdna_gem.c +++ b/drivers/accel/amdxdna/amdxdna_gem.c @@ -18,6 +18,7 @@ #include "amdxdna_ctx.h" #include "amdxdna_gem.h" #include "amdxdna_pci_drv.h" +#include "amdxdna_ubuf.h" #define XDNA_MAX_CMD_BO_SIZE SZ_32K @@ -296,7 +297,7 @@ static int amdxdna_insert_pages(struct amdxdna_gem_obj *abo, vma->vm_private_data = NULL; vma->vm_ops = NULL; - ret = dma_buf_mmap(to_gobj(abo)->dma_buf, vma, 0); + ret = dma_buf_mmap(abo->dma_buf, vma, 0); if (ret) { XDNA_ERR(xdna, "Failed to mmap dma buf %d", ret); return ret; @@ -391,10 +392,47 @@ static const struct dma_buf_ops amdxdna_dmabuf_ops = { .vunmap = drm_gem_dmabuf_vunmap, }; +static int amdxdna_gem_obj_vmap(struct drm_gem_object *obj, struct iosys_map *map) +{ + struct amdxdna_gem_obj *abo = to_xdna_obj(obj); + + iosys_map_clear(map); + + dma_resv_assert_held(obj->resv); + + if (is_import_bo(abo)) + dma_buf_vmap(abo->dma_buf, map); + else + drm_gem_shmem_object_vmap(obj, map); + + if (!map->vaddr) + return -ENOMEM; + + return 0; +} + +static void amdxdna_gem_obj_vunmap(struct drm_gem_object *obj, struct iosys_map *map) +{ + struct amdxdna_gem_obj *abo = to_xdna_obj(obj); + + dma_resv_assert_held(obj->resv); + + if (is_import_bo(abo)) + dma_buf_vunmap(abo->dma_buf, map); + else + drm_gem_shmem_object_vunmap(obj, map); +} + static struct dma_buf *amdxdna_gem_prime_export(struct drm_gem_object *gobj, int flags) { + struct amdxdna_gem_obj *abo = to_xdna_obj(gobj); DEFINE_DMA_BUF_EXPORT_INFO(exp_info); + if (abo->dma_buf) { + get_dma_buf(abo->dma_buf); + return abo->dma_buf; + } + exp_info.ops = &amdxdna_dmabuf_ops; exp_info.size = gobj->size; exp_info.flags = flags; @@ -451,8 +489,8 @@ static const struct drm_gem_object_funcs amdxdna_gem_shmem_funcs = { .pin = drm_gem_shmem_object_pin, .unpin = drm_gem_shmem_object_unpin, .get_sg_table = drm_gem_shmem_object_get_sg_table, - .vmap = drm_gem_shmem_object_vmap, - .vunmap = drm_gem_shmem_object_vunmap, + .vmap = amdxdna_gem_obj_vmap, + .vunmap = amdxdna_gem_obj_vunmap, .mmap = amdxdna_gem_obj_mmap, .vm_ops = &drm_gem_shmem_vm_ops, .export = amdxdna_gem_prime_export, @@ -494,6 +532,68 @@ amdxdna_gem_create_object_cb(struct drm_device *dev, size_t size) return to_gobj(abo); } +static struct amdxdna_gem_obj * +amdxdna_gem_create_shmem_object(struct drm_device *dev, size_t size) +{ + struct drm_gem_shmem_object *shmem = drm_gem_shmem_create(dev, size); + + if (IS_ERR(shmem)) + return ERR_CAST(shmem); + + shmem->map_wc = false; + return to_xdna_obj(&shmem->base); +} + +static struct amdxdna_gem_obj * +amdxdna_gem_create_ubuf_object(struct drm_device *dev, struct amdxdna_drm_create_bo *args) +{ + struct amdxdna_dev *xdna = to_xdna_dev(dev); + enum amdxdna_ubuf_flag flags = 0; + struct amdxdna_drm_va_tbl va_tbl; + struct drm_gem_object *gobj; + struct dma_buf *dma_buf; + + if (copy_from_user(&va_tbl, u64_to_user_ptr(args->vaddr), sizeof(va_tbl))) { + XDNA_DBG(xdna, "Access va table failed"); + return ERR_PTR(-EINVAL); + } + + if (va_tbl.num_entries) { + if (args->type == AMDXDNA_BO_CMD) + flags |= AMDXDNA_UBUF_FLAG_MAP_DMA; + + dma_buf = amdxdna_get_ubuf(dev, flags, va_tbl.num_entries, + u64_to_user_ptr(args->vaddr + sizeof(va_tbl))); + } else { + dma_buf = dma_buf_get(va_tbl.dmabuf_fd); + } + + if (IS_ERR(dma_buf)) + return ERR_CAST(dma_buf); + + gobj = amdxdna_gem_prime_import(dev, dma_buf); + if (IS_ERR(gobj)) { + dma_buf_put(dma_buf); + return ERR_CAST(gobj); + } + + dma_buf_put(dma_buf); + + return to_xdna_obj(gobj); +} + +static struct amdxdna_gem_obj * +amdxdna_gem_create_object(struct drm_device *dev, + struct amdxdna_drm_create_bo *args) +{ + size_t aligned_sz = PAGE_ALIGN(args->size); + + if (args->vaddr) + return amdxdna_gem_create_ubuf_object(dev, args); + + return amdxdna_gem_create_shmem_object(dev, aligned_sz); +} + struct drm_gem_object * amdxdna_gem_prime_import(struct drm_device *dev, struct dma_buf *dma_buf) { @@ -545,16 +645,12 @@ amdxdna_drm_alloc_shmem(struct drm_device *dev, struct drm_file *filp) { struct amdxdna_client *client = filp->driver_priv; - struct drm_gem_shmem_object *shmem; struct amdxdna_gem_obj *abo; - shmem = drm_gem_shmem_create(dev, args->size); - if (IS_ERR(shmem)) - return ERR_CAST(shmem); - - shmem->map_wc = false; + abo = amdxdna_gem_create_object(dev, args); + if (IS_ERR(abo)) + return ERR_CAST(abo); - abo = to_xdna_obj(&shmem->base); abo->client = client; abo->type = AMDXDNA_BO_SHMEM; @@ -569,7 +665,6 @@ amdxdna_drm_create_dev_heap(struct drm_device *dev, struct amdxdna_client *client = filp->driver_priv; struct iosys_map map = IOSYS_MAP_INIT_VADDR(NULL); struct amdxdna_dev *xdna = to_xdna_dev(dev); - struct drm_gem_shmem_object *shmem; struct amdxdna_gem_obj *abo; int ret; @@ -586,14 +681,12 @@ amdxdna_drm_create_dev_heap(struct drm_device *dev, goto mm_unlock; } - shmem = drm_gem_shmem_create(dev, args->size); - if (IS_ERR(shmem)) { - ret = PTR_ERR(shmem); + abo = amdxdna_gem_create_object(dev, args); + if (IS_ERR(abo)) { + ret = PTR_ERR(abo); goto mm_unlock; } - shmem->map_wc = false; - abo = to_xdna_obj(&shmem->base); abo->type = AMDXDNA_BO_DEV_HEAP; abo->client = client; abo->mem.dev_addr = client->xdna->dev_info->dev_mem_base; @@ -657,7 +750,6 @@ amdxdna_drm_create_cmd_bo(struct drm_device *dev, { struct iosys_map map = IOSYS_MAP_INIT_VADDR(NULL); struct amdxdna_dev *xdna = to_xdna_dev(dev); - struct drm_gem_shmem_object *shmem; struct amdxdna_gem_obj *abo; int ret; @@ -671,12 +763,9 @@ amdxdna_drm_create_cmd_bo(struct drm_device *dev, return ERR_PTR(-EINVAL); } - shmem = drm_gem_shmem_create(dev, args->size); - if (IS_ERR(shmem)) - return ERR_CAST(shmem); - - shmem->map_wc = false; - abo = to_xdna_obj(&shmem->base); + abo = amdxdna_gem_create_object(dev, args); + if (IS_ERR(abo)) + return ERR_CAST(abo); abo->type = AMDXDNA_BO_CMD; abo->client = filp->driver_priv; @@ -691,7 +780,7 @@ amdxdna_drm_create_cmd_bo(struct drm_device *dev, return abo; release_obj: - drm_gem_shmem_free(shmem); + drm_gem_object_put(to_gobj(abo)); return ERR_PTR(ret); } @@ -702,7 +791,7 @@ int amdxdna_drm_create_bo_ioctl(struct drm_device *dev, void *data, struct drm_f struct amdxdna_gem_obj *abo; int ret; - if (args->flags || args->vaddr || !args->size) + if (args->flags) return -EINVAL; XDNA_DBG(xdna, "BO arg type %d vaddr 0x%llx size 0x%llx flags 0x%llx", diff --git a/drivers/accel/amdxdna/amdxdna_pci_drv.c b/drivers/accel/amdxdna/amdxdna_pci_drv.c index f2bf1d374cc70a..fbca94183f963a 100644 --- a/drivers/accel/amdxdna/amdxdna_pci_drv.c +++ b/drivers/accel/amdxdna/amdxdna_pci_drv.c @@ -343,89 +343,29 @@ static void amdxdna_remove(struct pci_dev *pdev) mutex_unlock(&xdna->dev_lock); } -static int amdxdna_dev_suspend_nolock(struct amdxdna_dev *xdna) -{ - if (xdna->dev_info->ops->suspend) - xdna->dev_info->ops->suspend(xdna); - - return 0; -} - -static int amdxdna_dev_resume_nolock(struct amdxdna_dev *xdna) -{ - if (xdna->dev_info->ops->resume) - return xdna->dev_info->ops->resume(xdna); - - return 0; -} - static int amdxdna_pmops_suspend(struct device *dev) { struct amdxdna_dev *xdna = pci_get_drvdata(to_pci_dev(dev)); - struct amdxdna_client *client; - - mutex_lock(&xdna->dev_lock); - list_for_each_entry(client, &xdna->client_list, node) - amdxdna_hwctx_suspend(client); - amdxdna_dev_suspend_nolock(xdna); - mutex_unlock(&xdna->dev_lock); + if (!xdna->dev_info->ops->suspend) + return -EOPNOTSUPP; - return 0; + return xdna->dev_info->ops->suspend(xdna); } static int amdxdna_pmops_resume(struct device *dev) { struct amdxdna_dev *xdna = pci_get_drvdata(to_pci_dev(dev)); - struct amdxdna_client *client; - int ret; - - XDNA_INFO(xdna, "firmware resuming..."); - mutex_lock(&xdna->dev_lock); - ret = amdxdna_dev_resume_nolock(xdna); - if (ret) { - XDNA_ERR(xdna, "resume NPU firmware failed"); - mutex_unlock(&xdna->dev_lock); - return ret; - } - XDNA_INFO(xdna, "hardware context resuming..."); - list_for_each_entry(client, &xdna->client_list, node) - amdxdna_hwctx_resume(client); - mutex_unlock(&xdna->dev_lock); - - return 0; -} - -static int amdxdna_rpmops_suspend(struct device *dev) -{ - struct amdxdna_dev *xdna = pci_get_drvdata(to_pci_dev(dev)); - int ret; - - mutex_lock(&xdna->dev_lock); - ret = amdxdna_dev_suspend_nolock(xdna); - mutex_unlock(&xdna->dev_lock); - - XDNA_DBG(xdna, "Runtime suspend done ret: %d", ret); - return ret; -} - -static int amdxdna_rpmops_resume(struct device *dev) -{ - struct amdxdna_dev *xdna = pci_get_drvdata(to_pci_dev(dev)); - int ret; - - mutex_lock(&xdna->dev_lock); - ret = amdxdna_dev_resume_nolock(xdna); - mutex_unlock(&xdna->dev_lock); + if (!xdna->dev_info->ops->resume) + return -EOPNOTSUPP; - XDNA_DBG(xdna, "Runtime resume done ret: %d", ret); - return ret; + return xdna->dev_info->ops->resume(xdna); } static const struct dev_pm_ops amdxdna_pm_ops = { SYSTEM_SLEEP_PM_OPS(amdxdna_pmops_suspend, amdxdna_pmops_resume) - RUNTIME_PM_OPS(amdxdna_rpmops_suspend, amdxdna_rpmops_resume, NULL) + RUNTIME_PM_OPS(amdxdna_pmops_suspend, amdxdna_pmops_resume, NULL) }; static struct pci_driver amdxdna_pci_driver = { diff --git a/drivers/accel/amdxdna/amdxdna_pci_drv.h b/drivers/accel/amdxdna/amdxdna_pci_drv.h index ab79600911aaa3..40bbb3c063203a 100644 --- a/drivers/accel/amdxdna/amdxdna_pci_drv.h +++ b/drivers/accel/amdxdna/amdxdna_pci_drv.h @@ -50,13 +50,11 @@ struct amdxdna_dev_ops { int (*init)(struct amdxdna_dev *xdna); void (*fini)(struct amdxdna_dev *xdna); int (*resume)(struct amdxdna_dev *xdna); - void (*suspend)(struct amdxdna_dev *xdna); + int (*suspend)(struct amdxdna_dev *xdna); int (*hwctx_init)(struct amdxdna_hwctx *hwctx); void (*hwctx_fini)(struct amdxdna_hwctx *hwctx); int (*hwctx_config)(struct amdxdna_hwctx *hwctx, u32 type, u64 value, void *buf, u32 size); void (*hmm_invalidate)(struct amdxdna_gem_obj *abo, unsigned long cur_seq); - void (*hwctx_suspend)(struct amdxdna_hwctx *hwctx); - void (*hwctx_resume)(struct amdxdna_hwctx *hwctx); int (*cmd_submit)(struct amdxdna_hwctx *hwctx, struct amdxdna_sched_job *job, u64 *seq); int (*get_aie_info)(struct amdxdna_client *client, struct amdxdna_drm_get_info *args); int (*set_aie_state)(struct amdxdna_client *client, struct amdxdna_drm_set_state *args); diff --git a/drivers/accel/amdxdna/amdxdna_ubuf.c b/drivers/accel/amdxdna/amdxdna_ubuf.c new file mode 100644 index 00000000000000..077b2261cf2a04 --- /dev/null +++ b/drivers/accel/amdxdna/amdxdna_ubuf.c @@ -0,0 +1,232 @@ +// SPDX-License-Identifier: GPL-2.0 +/* + * Copyright (C) 2025, Advanced Micro Devices, Inc. + */ + +#include <drm/amdxdna_accel.h> +#include <drm/drm_device.h> +#include <drm/drm_print.h> +#include <linux/dma-buf.h> +#include <linux/pagemap.h> +#include <linux/vmalloc.h> + +#include "amdxdna_pci_drv.h" +#include "amdxdna_ubuf.h" + +struct amdxdna_ubuf_priv { + struct page **pages; + u64 nr_pages; + enum amdxdna_ubuf_flag flags; + struct mm_struct *mm; +}; + +static struct sg_table *amdxdna_ubuf_map(struct dma_buf_attachment *attach, + enum dma_data_direction direction) +{ + struct amdxdna_ubuf_priv *ubuf = attach->dmabuf->priv; + struct sg_table *sg; + int ret; + + sg = kzalloc(sizeof(*sg), GFP_KERNEL); + if (!sg) + return ERR_PTR(-ENOMEM); + + ret = sg_alloc_table_from_pages(sg, ubuf->pages, ubuf->nr_pages, 0, + ubuf->nr_pages << PAGE_SHIFT, GFP_KERNEL); + if (ret) + return ERR_PTR(ret); + + if (ubuf->flags & AMDXDNA_UBUF_FLAG_MAP_DMA) { + ret = dma_map_sgtable(attach->dev, sg, direction, 0); + if (ret) + return ERR_PTR(ret); + } + + return sg; +} + +static void amdxdna_ubuf_unmap(struct dma_buf_attachment *attach, + struct sg_table *sg, + enum dma_data_direction direction) +{ + struct amdxdna_ubuf_priv *ubuf = attach->dmabuf->priv; + + if (ubuf->flags & AMDXDNA_UBUF_FLAG_MAP_DMA) + dma_unmap_sgtable(attach->dev, sg, direction, 0); + + sg_free_table(sg); + kfree(sg); +} + +static void amdxdna_ubuf_release(struct dma_buf *dbuf) +{ + struct amdxdna_ubuf_priv *ubuf = dbuf->priv; + + unpin_user_pages(ubuf->pages, ubuf->nr_pages); + kvfree(ubuf->pages); + atomic64_sub(ubuf->nr_pages, &ubuf->mm->pinned_vm); + mmdrop(ubuf->mm); + kfree(ubuf); +} + +static vm_fault_t amdxdna_ubuf_vm_fault(struct vm_fault *vmf) +{ + struct vm_area_struct *vma = vmf->vma; + struct amdxdna_ubuf_priv *ubuf; + unsigned long pfn; + pgoff_t pgoff; + + ubuf = vma->vm_private_data; + pgoff = (vmf->address - vma->vm_start) >> PAGE_SHIFT; + + pfn = page_to_pfn(ubuf->pages[pgoff]); + return vmf_insert_pfn(vma, vmf->address, pfn); +} + +static const struct vm_operations_struct amdxdna_ubuf_vm_ops = { + .fault = amdxdna_ubuf_vm_fault, +}; + +static int amdxdna_ubuf_mmap(struct dma_buf *dbuf, struct vm_area_struct *vma) +{ + struct amdxdna_ubuf_priv *ubuf = dbuf->priv; + + vma->vm_ops = &amdxdna_ubuf_vm_ops; + vma->vm_private_data = ubuf; + vm_flags_set(vma, VM_PFNMAP | VM_DONTEXPAND | VM_DONTDUMP); + + return 0; +} + +static int amdxdna_ubuf_vmap(struct dma_buf *dbuf, struct iosys_map *map) +{ + struct amdxdna_ubuf_priv *ubuf = dbuf->priv; + void *kva; + + kva = vmap(ubuf->pages, ubuf->nr_pages, VM_MAP, PAGE_KERNEL); + if (!kva) + return -EINVAL; + + iosys_map_set_vaddr(map, kva); + return 0; +} + +static void amdxdna_ubuf_vunmap(struct dma_buf *dbuf, struct iosys_map *map) +{ + vunmap(map->vaddr); +} + +static const struct dma_buf_ops amdxdna_ubuf_dmabuf_ops = { + .map_dma_buf = amdxdna_ubuf_map, + .unmap_dma_buf = amdxdna_ubuf_unmap, + .release = amdxdna_ubuf_release, + .mmap = amdxdna_ubuf_mmap, + .vmap = amdxdna_ubuf_vmap, + .vunmap = amdxdna_ubuf_vunmap, +}; + +struct dma_buf *amdxdna_get_ubuf(struct drm_device *dev, + enum amdxdna_ubuf_flag flags, + u32 num_entries, void __user *va_entries) +{ + struct amdxdna_dev *xdna = to_xdna_dev(dev); + unsigned long lock_limit, new_pinned; + struct amdxdna_drm_va_entry *va_ent; + struct amdxdna_ubuf_priv *ubuf; + u32 npages, start = 0; + struct dma_buf *dbuf; + int i, ret; + DEFINE_DMA_BUF_EXPORT_INFO(exp_info); + + if (!can_do_mlock()) + return ERR_PTR(-EPERM); + + ubuf = kzalloc(sizeof(*ubuf), GFP_KERNEL); + if (!ubuf) + return ERR_PTR(-ENOMEM); + + ubuf->flags = flags; + ubuf->mm = current->mm; + mmgrab(ubuf->mm); + + va_ent = kvcalloc(num_entries, sizeof(*va_ent), GFP_KERNEL); + if (!va_ent) { + ret = -ENOMEM; + goto free_ubuf; + } + + if (copy_from_user(va_ent, va_entries, sizeof(*va_ent) * num_entries)) { + XDNA_DBG(xdna, "Access va entries failed"); + ret = -EINVAL; + goto free_ent; + } + + for (i = 0, exp_info.size = 0; i < num_entries; i++) { + if (!IS_ALIGNED(va_ent[i].vaddr, PAGE_SIZE) || + !IS_ALIGNED(va_ent[i].len, PAGE_SIZE)) { + XDNA_ERR(xdna, "Invalid address or len %llx, %llx", + va_ent[i].vaddr, va_ent[i].len); + ret = -EINVAL; + goto free_ent; + } + + exp_info.size += va_ent[i].len; + } + + ubuf->nr_pages = exp_info.size >> PAGE_SHIFT; + lock_limit = rlimit(RLIMIT_MEMLOCK) >> PAGE_SHIFT; + new_pinned = atomic64_add_return(ubuf->nr_pages, &ubuf->mm->pinned_vm); + if (new_pinned > lock_limit && !capable(CAP_IPC_LOCK)) { + XDNA_DBG(xdna, "New pin %ld, limit %ld, cap %d", + new_pinned, lock_limit, capable(CAP_IPC_LOCK)); + ret = -ENOMEM; + goto sub_pin_cnt; + } + + ubuf->pages = kvmalloc_array(ubuf->nr_pages, sizeof(*ubuf->pages), GFP_KERNEL); + if (!ubuf->pages) { + ret = -ENOMEM; + goto sub_pin_cnt; + } + + for (i = 0; i < num_entries; i++) { + npages = va_ent[i].len >> PAGE_SHIFT; + + ret = pin_user_pages_fast(va_ent[i].vaddr, npages, + FOLL_WRITE | FOLL_LONGTERM, + &ubuf->pages[start]); + if (ret < 0 || ret != npages) { + ret = -ENOMEM; + XDNA_ERR(xdna, "Failed to pin pages ret %d", ret); + goto destroy_pages; + } + + start += ret; + } + + exp_info.ops = &amdxdna_ubuf_dmabuf_ops; + exp_info.priv = ubuf; + exp_info.flags = O_RDWR | O_CLOEXEC; + + dbuf = dma_buf_export(&exp_info); + if (IS_ERR(dbuf)) { + ret = PTR_ERR(dbuf); + goto destroy_pages; + } + kvfree(va_ent); + + return dbuf; + +destroy_pages: + if (start) + unpin_user_pages(ubuf->pages, start); + kvfree(ubuf->pages); +sub_pin_cnt: + atomic64_sub(ubuf->nr_pages, &ubuf->mm->pinned_vm); +free_ent: + kvfree(va_ent); +free_ubuf: + mmdrop(ubuf->mm); + kfree(ubuf); + return ERR_PTR(ret); +} diff --git a/drivers/accel/amdxdna/amdxdna_ubuf.h b/drivers/accel/amdxdna/amdxdna_ubuf.h new file mode 100644 index 00000000000000..e5cb3bdb3ec90d --- /dev/null +++ b/drivers/accel/amdxdna/amdxdna_ubuf.h @@ -0,0 +1,19 @@ +/* SPDX-License-Identifier: GPL-2.0 */ +/* + * Copyright (C) 2025, Advanced Micro Devices, Inc. + */ +#ifndef _AMDXDNA_UBUF_H_ +#define _AMDXDNA_UBUF_H_ + +#include <drm/drm_device.h> +#include <linux/dma-buf.h> + +enum amdxdna_ubuf_flag { + AMDXDNA_UBUF_FLAG_MAP_DMA = 1, +}; + +struct dma_buf *amdxdna_get_ubuf(struct drm_device *dev, + enum amdxdna_ubuf_flag flags, + u32 num_entries, void __user *va_entries); + +#endif /* _AMDXDNA_UBUF_H_ */ diff --git a/drivers/accel/ivpu/ivpu_ipc.c b/drivers/accel/ivpu/ivpu_ipc.c index 39f83225c1815a..5f00809d448afb 100644 --- a/drivers/accel/ivpu/ivpu_ipc.c +++ b/drivers/accel/ivpu/ivpu_ipc.c @@ -141,7 +141,6 @@ ivpu_ipc_rx_msg_add(struct ivpu_device *vdev, struct ivpu_ipc_consumer *cons, struct ivpu_ipc_rx_msg *rx_msg; lockdep_assert_held(&ipc->cons_lock); - lockdep_assert_irqs_disabled(); rx_msg = kzalloc(sizeof(*rx_msg), GFP_ATOMIC); if (!rx_msg) { diff --git a/drivers/accel/rocket/Kconfig b/drivers/accel/rocket/Kconfig new file mode 100644 index 00000000000000..43d6cd98ec8e4e --- /dev/null +++ b/drivers/accel/rocket/Kconfig @@ -0,0 +1,24 @@ +# SPDX-License-Identifier: GPL-2.0-only + +config DRM_ACCEL_ROCKET + tristate "Rocket (support for Rockchip NPUs)" + depends on DRM + depends on (ARCH_ROCKCHIP && ARM64) || COMPILE_TEST + depends on ROCKCHIP_IOMMU || COMPILE_TEST + depends on MMU + select DRM_SCHED + select DRM_GEM_SHMEM_HELPER + help + Choose this option if you have a Rockchip SoC that contains a + compatible Neural Processing Unit (NPU), such as the RK3588. Called by + Rockchip either RKNN or RKNPU, it accelerates inference of neural + networks. + + The interface exposed to userspace is described in + include/uapi/drm/rocket_accel.h and is used by the Rocket userspace + driver in Mesa3D. + + If unsure, say N. + + To compile this driver as a module, choose M here: the + module will be called rocket. diff --git a/drivers/accel/rocket/Makefile b/drivers/accel/rocket/Makefile new file mode 100644 index 00000000000000..3713dfe223d6ec --- /dev/null +++ b/drivers/accel/rocket/Makefile @@ -0,0 +1,10 @@ +# SPDX-License-Identifier: GPL-2.0-only + +obj-$(CONFIG_DRM_ACCEL_ROCKET) := rocket.o + +rocket-y := \ + rocket_core.o \ + rocket_device.o \ + rocket_drv.o \ + rocket_gem.o \ + rocket_job.o diff --git a/drivers/accel/rocket/rocket_core.c b/drivers/accel/rocket/rocket_core.c new file mode 100644 index 00000000000000..72fb5e5798fac8 --- /dev/null +++ b/drivers/accel/rocket/rocket_core.c @@ -0,0 +1,110 @@ +// SPDX-License-Identifier: GPL-2.0-only +/* Copyright 2024-2025 Tomeu Vizoso <tomeu@tomeuvizoso.net> */ + +#include <linux/clk.h> +#include <linux/delay.h> +#include <linux/dev_printk.h> +#include <linux/dma-mapping.h> +#include <linux/err.h> +#include <linux/iommu.h> +#include <linux/platform_device.h> +#include <linux/pm_runtime.h> +#include <linux/reset.h> + +#include "rocket_core.h" +#include "rocket_job.h" + +int rocket_core_init(struct rocket_core *core) +{ + struct device *dev = core->dev; + struct platform_device *pdev = to_platform_device(dev); + u32 version; + int err = 0; + + core->resets[0].id = "srst_a"; + core->resets[1].id = "srst_h"; + err = devm_reset_control_bulk_get_exclusive(&pdev->dev, ARRAY_SIZE(core->resets), + core->resets); + if (err) + return dev_err_probe(dev, err, "failed to get resets for core %d\n", core->index); + + err = devm_clk_bulk_get(dev, ARRAY_SIZE(core->clks), core->clks); + if (err) + return dev_err_probe(dev, err, "failed to get clocks for core %d\n", core->index); + + core->pc_iomem = devm_platform_ioremap_resource_byname(pdev, "pc"); + if (IS_ERR(core->pc_iomem)) { + dev_err(dev, "couldn't find PC registers %ld\n", PTR_ERR(core->pc_iomem)); + return PTR_ERR(core->pc_iomem); + } + + core->cna_iomem = devm_platform_ioremap_resource_byname(pdev, "cna"); + if (IS_ERR(core->cna_iomem)) { + dev_err(dev, "couldn't find CNA registers %ld\n", PTR_ERR(core->cna_iomem)); + return PTR_ERR(core->cna_iomem); + } + + core->core_iomem = devm_platform_ioremap_resource_byname(pdev, "core"); + if (IS_ERR(core->core_iomem)) { + dev_err(dev, "couldn't find CORE registers %ld\n", PTR_ERR(core->core_iomem)); + return PTR_ERR(core->core_iomem); + } + + dma_set_max_seg_size(dev, UINT_MAX); + + err = dma_set_mask_and_coherent(dev, DMA_BIT_MASK(40)); + if (err) + return err; + + core->iommu_group = iommu_group_get(dev); + + err = rocket_job_init(core); + if (err) + return err; + + pm_runtime_use_autosuspend(dev); + + /* + * As this NPU will be most often used as part of a media pipeline that + * ends presenting in a display, choose 50 ms (~3 frames at 60Hz) as an + * autosuspend delay as that will keep the device powered up while the + * pipeline is running. + */ + pm_runtime_set_autosuspend_delay(dev, 50); + + pm_runtime_enable(dev); + + err = pm_runtime_get_sync(dev); + if (err) { + rocket_job_fini(core); + return err; + } + + version = rocket_pc_readl(core, VERSION); + version += rocket_pc_readl(core, VERSION_NUM) & 0xffff; + + pm_runtime_mark_last_busy(dev); + pm_runtime_put_autosuspend(dev); + + dev_info(dev, "Rockchip NPU core %d version: %d\n", core->index, version); + + return 0; +} + +void rocket_core_fini(struct rocket_core *core) +{ + pm_runtime_dont_use_autosuspend(core->dev); + pm_runtime_disable(core->dev); + iommu_group_put(core->iommu_group); + core->iommu_group = NULL; + rocket_job_fini(core); +} + +void rocket_core_reset(struct rocket_core *core) +{ + reset_control_bulk_assert(ARRAY_SIZE(core->resets), core->resets); + + udelay(10); + + reset_control_bulk_deassert(ARRAY_SIZE(core->resets), core->resets); +} diff --git a/drivers/accel/rocket/rocket_core.h b/drivers/accel/rocket/rocket_core.h new file mode 100644 index 00000000000000..f6d7382854ca9e --- /dev/null +++ b/drivers/accel/rocket/rocket_core.h @@ -0,0 +1,64 @@ +/* SPDX-License-Identifier: GPL-2.0-only */ +/* Copyright 2024-2025 Tomeu Vizoso <tomeu@tomeuvizoso.net> */ + +#ifndef __ROCKET_CORE_H__ +#define __ROCKET_CORE_H__ + +#include <drm/gpu_scheduler.h> +#include <linux/clk.h> +#include <linux/io.h> +#include <linux/mutex_types.h> +#include <linux/reset.h> + +#include "rocket_registers.h" + +#define rocket_pc_readl(core, reg) \ + readl((core)->pc_iomem + (REG_PC_##reg)) +#define rocket_pc_writel(core, reg, value) \ + writel(value, (core)->pc_iomem + (REG_PC_##reg)) + +#define rocket_cna_readl(core, reg) \ + readl((core)->cna_iomem + (REG_CNA_##reg) - REG_CNA_S_STATUS) +#define rocket_cna_writel(core, reg, value) \ + writel(value, (core)->cna_iomem + (REG_CNA_##reg) - REG_CNA_S_STATUS) + +#define rocket_core_readl(core, reg) \ + readl((core)->core_iomem + (REG_CORE_##reg) - REG_CORE_S_STATUS) +#define rocket_core_writel(core, reg, value) \ + writel(value, (core)->core_iomem + (REG_CORE_##reg) - REG_CORE_S_STATUS) + +struct rocket_core { + struct device *dev; + struct rocket_device *rdev; + unsigned int index; + + int irq; + void __iomem *pc_iomem; + void __iomem *cna_iomem; + void __iomem *core_iomem; + struct clk_bulk_data clks[4]; + struct reset_control_bulk_data resets[2]; + + struct iommu_group *iommu_group; + + struct mutex job_lock; + struct rocket_job *in_flight_job; + + spinlock_t fence_lock; + + struct { + struct workqueue_struct *wq; + struct work_struct work; + atomic_t pending; + } reset; + + struct drm_gpu_scheduler sched; + u64 fence_context; + u64 emit_seqno; +}; + +int rocket_core_init(struct rocket_core *core); +void rocket_core_fini(struct rocket_core *core); +void rocket_core_reset(struct rocket_core *core); + +#endif diff --git a/drivers/accel/rocket/rocket_device.c b/drivers/accel/rocket/rocket_device.c new file mode 100644 index 00000000000000..46e6ee1e72c5f2 --- /dev/null +++ b/drivers/accel/rocket/rocket_device.c @@ -0,0 +1,60 @@ +// SPDX-License-Identifier: GPL-2.0-only +/* Copyright 2024-2025 Tomeu Vizoso <tomeu@tomeuvizoso.net> */ + +#include <drm/drm_drv.h> +#include <linux/array_size.h> +#include <linux/clk.h> +#include <linux/dma-mapping.h> +#include <linux/platform_device.h> +#include <linux/of.h> + +#include "rocket_device.h" + +struct rocket_device *rocket_device_init(struct platform_device *pdev, + const struct drm_driver *rocket_drm_driver) +{ + struct device *dev = &pdev->dev; + struct device_node *core_node; + struct rocket_device *rdev; + struct drm_device *ddev; + unsigned int num_cores = 0; + int err; + + rdev = devm_drm_dev_alloc(dev, rocket_drm_driver, struct rocket_device, ddev); + if (IS_ERR(rdev)) + return rdev; + + ddev = &rdev->ddev; + dev_set_drvdata(dev, rdev); + + for_each_compatible_node(core_node, NULL, "rockchip,rk3588-rknn-core") + if (of_device_is_available(core_node)) + num_cores++; + + rdev->cores = devm_kcalloc(dev, num_cores, sizeof(*rdev->cores), GFP_KERNEL); + if (!rdev->cores) + return ERR_PTR(-ENOMEM); + + dma_set_max_seg_size(dev, UINT_MAX); + + err = dma_set_mask_and_coherent(dev, DMA_BIT_MASK(40)); + if (err) + return ERR_PTR(err); + + err = devm_mutex_init(dev, &rdev->sched_lock); + if (err) + return ERR_PTR(-ENOMEM); + + err = drm_dev_register(ddev, 0); + if (err) + return ERR_PTR(err); + + return rdev; +} + +void rocket_device_fini(struct rocket_device *rdev) +{ + WARN_ON(rdev->num_cores > 0); + + drm_dev_unregister(&rdev->ddev); +} diff --git a/drivers/accel/rocket/rocket_device.h b/drivers/accel/rocket/rocket_device.h new file mode 100644 index 00000000000000..ce662abc01d3d1 --- /dev/null +++ b/drivers/accel/rocket/rocket_device.h @@ -0,0 +1,30 @@ +/* SPDX-License-Identifier: GPL-2.0-only */ +/* Copyright 2024-2025 Tomeu Vizoso <tomeu@tomeuvizoso.net> */ + +#ifndef __ROCKET_DEVICE_H__ +#define __ROCKET_DEVICE_H__ + +#include <drm/drm_device.h> +#include <linux/clk.h> +#include <linux/container_of.h> +#include <linux/iommu.h> +#include <linux/platform_device.h> + +#include "rocket_core.h" + +struct rocket_device { + struct drm_device ddev; + + struct mutex sched_lock; + + struct rocket_core *cores; + unsigned int num_cores; +}; + +struct rocket_device *rocket_device_init(struct platform_device *pdev, + const struct drm_driver *rocket_drm_driver); +void rocket_device_fini(struct rocket_device *rdev); +#define to_rocket_device(drm_dev) \ + ((struct rocket_device *)(container_of((drm_dev), struct rocket_device, ddev))) + +#endif /* __ROCKET_DEVICE_H__ */ diff --git a/drivers/accel/rocket/rocket_drv.c b/drivers/accel/rocket/rocket_drv.c new file mode 100644 index 00000000000000..5c0b63f0a8f00d --- /dev/null +++ b/drivers/accel/rocket/rocket_drv.c @@ -0,0 +1,290 @@ +// SPDX-License-Identifier: GPL-2.0-only +/* Copyright 2024-2025 Tomeu Vizoso <tomeu@tomeuvizoso.net> */ + +#include <drm/drm_accel.h> +#include <drm/drm_drv.h> +#include <drm/drm_gem.h> +#include <drm/drm_ioctl.h> +#include <drm/rocket_accel.h> +#include <linux/clk.h> +#include <linux/err.h> +#include <linux/iommu.h> +#include <linux/of.h> +#include <linux/platform_device.h> +#include <linux/pm_runtime.h> + +#include "rocket_drv.h" +#include "rocket_gem.h" +#include "rocket_job.h" + +/* + * Facade device, used to expose a single DRM device to userspace, that + * schedules jobs to any RKNN cores in the system. + */ +static struct platform_device *drm_dev; +static struct rocket_device *rdev; + +static void +rocket_iommu_domain_destroy(struct kref *kref) +{ + struct rocket_iommu_domain *domain = container_of(kref, struct rocket_iommu_domain, kref); + + iommu_domain_free(domain->domain); + domain->domain = NULL; + kfree(domain); +} + +static struct rocket_iommu_domain* +rocket_iommu_domain_create(struct device *dev) +{ + struct rocket_iommu_domain *domain = kmalloc(sizeof(*domain), GFP_KERNEL); + void *err; + + if (!domain) + return ERR_PTR(-ENOMEM); + + domain->domain = iommu_paging_domain_alloc(dev); + if (IS_ERR(domain->domain)) { + err = ERR_CAST(domain->domain); + kfree(domain); + return err; + } + kref_init(&domain->kref); + + return domain; +} + +struct rocket_iommu_domain * +rocket_iommu_domain_get(struct rocket_file_priv *rocket_priv) +{ + kref_get(&rocket_priv->domain->kref); + return rocket_priv->domain; +} + +void +rocket_iommu_domain_put(struct rocket_iommu_domain *domain) +{ + kref_put(&domain->kref, rocket_iommu_domain_destroy); +} + +static int +rocket_open(struct drm_device *dev, struct drm_file *file) +{ + struct rocket_device *rdev = to_rocket_device(dev); + struct rocket_file_priv *rocket_priv; + u64 start, end; + int ret; + + if (!try_module_get(THIS_MODULE)) + return -EINVAL; + + rocket_priv = kzalloc(sizeof(*rocket_priv), GFP_KERNEL); + if (!rocket_priv) { + ret = -ENOMEM; + goto err_put_mod; + } + + rocket_priv->rdev = rdev; + rocket_priv->domain = rocket_iommu_domain_create(rdev->cores[0].dev); + if (IS_ERR(rocket_priv->domain)) { + ret = PTR_ERR(rocket_priv->domain); + goto err_free; + } + + file->driver_priv = rocket_priv; + + start = rocket_priv->domain->domain->geometry.aperture_start; + end = rocket_priv->domain->domain->geometry.aperture_end; + drm_mm_init(&rocket_priv->mm, start, end - start + 1); + mutex_init(&rocket_priv->mm_lock); + + ret = rocket_job_open(rocket_priv); + if (ret) + goto err_mm_takedown; + + return 0; + +err_mm_takedown: + mutex_destroy(&rocket_priv->mm_lock); + drm_mm_takedown(&rocket_priv->mm); + rocket_iommu_domain_put(rocket_priv->domain); +err_free: + kfree(rocket_priv); +err_put_mod: + module_put(THIS_MODULE); + return ret; +} + +static void +rocket_postclose(struct drm_device *dev, struct drm_file *file) +{ + struct rocket_file_priv *rocket_priv = file->driver_priv; + + rocket_job_close(rocket_priv); + mutex_destroy(&rocket_priv->mm_lock); + drm_mm_takedown(&rocket_priv->mm); + rocket_iommu_domain_put(rocket_priv->domain); + kfree(rocket_priv); + module_put(THIS_MODULE); +} + +static const struct drm_ioctl_desc rocket_drm_driver_ioctls[] = { +#define ROCKET_IOCTL(n, func) \ + DRM_IOCTL_DEF_DRV(ROCKET_##n, rocket_ioctl_##func, 0) + + ROCKET_IOCTL(CREATE_BO, create_bo), + ROCKET_IOCTL(SUBMIT, submit), + ROCKET_IOCTL(PREP_BO, prep_bo), + ROCKET_IOCTL(FINI_BO, fini_bo), +}; + +DEFINE_DRM_ACCEL_FOPS(rocket_accel_driver_fops); + +/* + * Rocket driver version: + * - 1.0 - initial interface + */ +static const struct drm_driver rocket_drm_driver = { + .driver_features = DRIVER_COMPUTE_ACCEL | DRIVER_GEM, + .open = rocket_open, + .postclose = rocket_postclose, + .gem_create_object = rocket_gem_create_object, + .ioctls = rocket_drm_driver_ioctls, + .num_ioctls = ARRAY_SIZE(rocket_drm_driver_ioctls), + .fops = &rocket_accel_driver_fops, + .name = "rocket", + .desc = "rocket DRM", +}; + +static int rocket_probe(struct platform_device *pdev) +{ + if (rdev == NULL) { + /* First core probing, initialize DRM device. */ + rdev = rocket_device_init(drm_dev, &rocket_drm_driver); + if (IS_ERR(rdev)) { + dev_err(&pdev->dev, "failed to initialize rocket device\n"); + return PTR_ERR(rdev); + } + } + + unsigned int core = rdev->num_cores; + + dev_set_drvdata(&pdev->dev, rdev); + + rdev->cores[core].rdev = rdev; + rdev->cores[core].dev = &pdev->dev; + rdev->cores[core].index = core; + + rdev->num_cores++; + + return rocket_core_init(&rdev->cores[core]); +} + +static void rocket_remove(struct platform_device *pdev) +{ + struct device *dev = &pdev->dev; + + for (unsigned int core = 0; core < rdev->num_cores; core++) { + if (rdev->cores[core].dev == dev) { + rocket_core_fini(&rdev->cores[core]); + rdev->num_cores--; + break; + } + } + + if (rdev->num_cores == 0) { + /* Last core removed, deinitialize DRM device. */ + rocket_device_fini(rdev); + rdev = NULL; + } +} + +static const struct of_device_id dt_match[] = { + { .compatible = "rockchip,rk3588-rknn-core" }, + {} +}; +MODULE_DEVICE_TABLE(of, dt_match); + +static int find_core_for_dev(struct device *dev) +{ + struct rocket_device *rdev = dev_get_drvdata(dev); + + for (unsigned int core = 0; core < rdev->num_cores; core++) { + if (dev == rdev->cores[core].dev) + return core; + } + + return -1; +} + +static int rocket_device_runtime_resume(struct device *dev) +{ + struct rocket_device *rdev = dev_get_drvdata(dev); + int core = find_core_for_dev(dev); + int err = 0; + + if (core < 0) + return -ENODEV; + + err = clk_bulk_prepare_enable(ARRAY_SIZE(rdev->cores[core].clks), rdev->cores[core].clks); + if (err) { + dev_err(dev, "failed to enable (%d) clocks for core %d\n", err, core); + return err; + } + + return 0; +} + +static int rocket_device_runtime_suspend(struct device *dev) +{ + struct rocket_device *rdev = dev_get_drvdata(dev); + int core = find_core_for_dev(dev); + + if (core < 0) + return -ENODEV; + + if (!rocket_job_is_idle(&rdev->cores[core])) + return -EBUSY; + + clk_bulk_disable_unprepare(ARRAY_SIZE(rdev->cores[core].clks), rdev->cores[core].clks); + + return 0; +} + +EXPORT_GPL_DEV_PM_OPS(rocket_pm_ops) = { + RUNTIME_PM_OPS(rocket_device_runtime_suspend, rocket_device_runtime_resume, NULL) + SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend, pm_runtime_force_resume) +}; + +static struct platform_driver rocket_driver = { + .probe = rocket_probe, + .remove = rocket_remove, + .driver = { + .name = "rocket", + .pm = pm_ptr(&rocket_pm_ops), + .of_match_table = dt_match, + }, +}; + +static int __init rocket_register(void) +{ + drm_dev = platform_device_register_simple("rknn", -1, NULL, 0); + if (IS_ERR(drm_dev)) + return PTR_ERR(drm_dev); + + return platform_driver_register(&rocket_driver); +} + +static void __exit rocket_unregister(void) +{ + platform_driver_unregister(&rocket_driver); + + platform_device_unregister(drm_dev); +} + +module_init(rocket_register); +module_exit(rocket_unregister); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("DRM driver for the Rockchip NPU IP"); +MODULE_AUTHOR("Tomeu Vizoso"); diff --git a/drivers/accel/rocket/rocket_drv.h b/drivers/accel/rocket/rocket_drv.h new file mode 100644 index 00000000000000..2c673bb99ccc1d --- /dev/null +++ b/drivers/accel/rocket/rocket_drv.h @@ -0,0 +1,32 @@ +/* SPDX-License-Identifier: GPL-2.0-only */ +/* Copyright 2024-2025 Tomeu Vizoso <tomeu@tomeuvizoso.net> */ + +#ifndef __ROCKET_DRV_H__ +#define __ROCKET_DRV_H__ + +#include <drm/drm_mm.h> +#include <drm/gpu_scheduler.h> + +#include "rocket_device.h" + +extern const struct dev_pm_ops rocket_pm_ops; + +struct rocket_iommu_domain { + struct iommu_domain *domain; + struct kref kref; +}; + +struct rocket_file_priv { + struct rocket_device *rdev; + + struct rocket_iommu_domain *domain; + struct drm_mm mm; + struct mutex mm_lock; + + struct drm_sched_entity sched_entity; +}; + +struct rocket_iommu_domain *rocket_iommu_domain_get(struct rocket_file_priv *rocket_priv); +void rocket_iommu_domain_put(struct rocket_iommu_domain *domain); + +#endif diff --git a/drivers/accel/rocket/rocket_gem.c b/drivers/accel/rocket/rocket_gem.c new file mode 100644 index 00000000000000..0551e11cc18414 --- /dev/null +++ b/drivers/accel/rocket/rocket_gem.c @@ -0,0 +1,181 @@ +// SPDX-License-Identifier: GPL-2.0-only +/* Copyright 2024-2025 Tomeu Vizoso <tomeu@tomeuvizoso.net> */ + +#include <drm/drm_device.h> +#include <drm/drm_utils.h> +#include <drm/rocket_accel.h> +#include <linux/dma-mapping.h> +#include <linux/iommu.h> + +#include "rocket_drv.h" +#include "rocket_gem.h" + +static void rocket_gem_bo_free(struct drm_gem_object *obj) +{ + struct rocket_gem_object *bo = to_rocket_bo(obj); + struct rocket_file_priv *rocket_priv = bo->driver_priv; + size_t unmapped; + + drm_WARN_ON(obj->dev, refcount_read(&bo->base.pages_use_count) > 1); + + unmapped = iommu_unmap(bo->domain->domain, bo->mm.start, bo->size); + drm_WARN_ON(obj->dev, unmapped != bo->size); + + mutex_lock(&rocket_priv->mm_lock); + drm_mm_remove_node(&bo->mm); + mutex_unlock(&rocket_priv->mm_lock); + + rocket_iommu_domain_put(bo->domain); + bo->domain = NULL; + + drm_gem_shmem_free(&bo->base); +} + +static const struct drm_gem_object_funcs rocket_gem_funcs = { + .free = rocket_gem_bo_free, + .print_info = drm_gem_shmem_object_print_info, + .pin = drm_gem_shmem_object_pin, + .unpin = drm_gem_shmem_object_unpin, + .get_sg_table = drm_gem_shmem_object_get_sg_table, + .vmap = drm_gem_shmem_object_vmap, + .vunmap = drm_gem_shmem_object_vunmap, + .mmap = drm_gem_shmem_object_mmap, + .vm_ops = &drm_gem_shmem_vm_ops, +}; + +struct drm_gem_object *rocket_gem_create_object(struct drm_device *dev, size_t size) +{ + struct rocket_gem_object *obj; + + obj = kzalloc(sizeof(*obj), GFP_KERNEL); + if (!obj) + return ERR_PTR(-ENOMEM); + + obj->base.base.funcs = &rocket_gem_funcs; + + return &obj->base.base; +} + +int rocket_ioctl_create_bo(struct drm_device *dev, void *data, struct drm_file *file) +{ + struct rocket_file_priv *rocket_priv = file->driver_priv; + struct drm_rocket_create_bo *args = data; + struct drm_gem_shmem_object *shmem_obj; + struct rocket_gem_object *rkt_obj; + struct drm_gem_object *gem_obj; + struct sg_table *sgt; + int ret; + + shmem_obj = drm_gem_shmem_create(dev, args->size); + if (IS_ERR(shmem_obj)) + return PTR_ERR(shmem_obj); + + gem_obj = &shmem_obj->base; + rkt_obj = to_rocket_bo(gem_obj); + + rkt_obj->driver_priv = rocket_priv; + rkt_obj->domain = rocket_iommu_domain_get(rocket_priv); + rkt_obj->size = args->size; + rkt_obj->offset = 0; + + ret = drm_gem_handle_create(file, gem_obj, &args->handle); + drm_gem_object_put(gem_obj); + if (ret) + goto err; + + sgt = drm_gem_shmem_get_pages_sgt(shmem_obj); + if (IS_ERR(sgt)) { + ret = PTR_ERR(sgt); + goto err; + } + + mutex_lock(&rocket_priv->mm_lock); + ret = drm_mm_insert_node_generic(&rocket_priv->mm, &rkt_obj->mm, + rkt_obj->size, PAGE_SIZE, + 0, 0); + mutex_unlock(&rocket_priv->mm_lock); + + ret = iommu_map_sgtable(rocket_priv->domain->domain, + rkt_obj->mm.start, + shmem_obj->sgt, + IOMMU_READ | IOMMU_WRITE); + if (ret < 0 || ret < args->size) { + drm_err(dev, "failed to map buffer: size=%d request_size=%u\n", + ret, args->size); + ret = -ENOMEM; + goto err_remove_node; + } + + /* iommu_map_sgtable might have aligned the size */ + rkt_obj->size = ret; + args->offset = drm_vma_node_offset_addr(&gem_obj->vma_node); + args->dma_address = rkt_obj->mm.start; + + return 0; + +err_remove_node: + mutex_lock(&rocket_priv->mm_lock); + drm_mm_remove_node(&rkt_obj->mm); + mutex_unlock(&rocket_priv->mm_lock); + +err: + drm_gem_shmem_object_free(gem_obj); + + return ret; +} + +int rocket_ioctl_prep_bo(struct drm_device *dev, void *data, struct drm_file *file) +{ + struct drm_rocket_prep_bo *args = data; + unsigned long timeout = drm_timeout_abs_to_jiffies(args->timeout_ns); + struct drm_gem_object *gem_obj; + struct drm_gem_shmem_object *shmem_obj; + long ret = 0; + + if (args->reserved != 0) { + drm_dbg(dev, "Reserved field in drm_rocket_prep_bo struct should be 0.\n"); + return -EINVAL; + } + + gem_obj = drm_gem_object_lookup(file, args->handle); + if (!gem_obj) + return -ENOENT; + + ret = dma_resv_wait_timeout(gem_obj->resv, DMA_RESV_USAGE_WRITE, true, timeout); + if (!ret) + ret = timeout ? -ETIMEDOUT : -EBUSY; + + shmem_obj = &to_rocket_bo(gem_obj)->base; + + dma_sync_sgtable_for_cpu(dev->dev, shmem_obj->sgt, DMA_BIDIRECTIONAL); + + drm_gem_object_put(gem_obj); + + return ret; +} + +int rocket_ioctl_fini_bo(struct drm_device *dev, void *data, struct drm_file *file) +{ + struct drm_rocket_fini_bo *args = data; + struct drm_gem_shmem_object *shmem_obj; + struct rocket_gem_object *rkt_obj; + struct drm_gem_object *gem_obj; + + if (args->reserved != 0) { + drm_dbg(dev, "Reserved field in drm_rocket_fini_bo struct should be 0.\n"); + return -EINVAL; + } + + gem_obj = drm_gem_object_lookup(file, args->handle); + if (!gem_obj) + return -ENOENT; + + rkt_obj = to_rocket_bo(gem_obj); + shmem_obj = &rkt_obj->base; + + dma_sync_sgtable_for_device(dev->dev, shmem_obj->sgt, DMA_BIDIRECTIONAL); + + drm_gem_object_put(gem_obj); + + return 0; +} diff --git a/drivers/accel/rocket/rocket_gem.h b/drivers/accel/rocket/rocket_gem.h new file mode 100644 index 00000000000000..24043033450941 --- /dev/null +++ b/drivers/accel/rocket/rocket_gem.h @@ -0,0 +1,34 @@ +/* SPDX-License-Identifier: GPL-2.0-only */ +/* Copyright 2024-2025 Tomeu Vizoso <tomeu@tomeuvizoso.net> */ + +#ifndef __ROCKET_GEM_H__ +#define __ROCKET_GEM_H__ + +#include <drm/drm_gem_shmem_helper.h> + +struct rocket_gem_object { + struct drm_gem_shmem_object base; + + struct rocket_file_priv *driver_priv; + + struct rocket_iommu_domain *domain; + struct drm_mm_node mm; + size_t size; + u32 offset; +}; + +struct drm_gem_object *rocket_gem_create_object(struct drm_device *dev, size_t size); + +int rocket_ioctl_create_bo(struct drm_device *dev, void *data, struct drm_file *file); + +int rocket_ioctl_prep_bo(struct drm_device *dev, void *data, struct drm_file *file); + +int rocket_ioctl_fini_bo(struct drm_device *dev, void *data, struct drm_file *file); + +static inline +struct rocket_gem_object *to_rocket_bo(struct drm_gem_object *obj) +{ + return container_of(to_drm_gem_shmem_obj(obj), struct rocket_gem_object, base); +} + +#endif diff --git a/drivers/accel/rocket/rocket_job.c b/drivers/accel/rocket/rocket_job.c new file mode 100644 index 00000000000000..5d4afd69230623 --- /dev/null +++ b/drivers/accel/rocket/rocket_job.c @@ -0,0 +1,636 @@ +// SPDX-License-Identifier: GPL-2.0-only +/* Copyright 2019 Linaro, Ltd, Rob Herring <robh@kernel.org> */ +/* Copyright 2019 Collabora ltd. */ +/* Copyright 2024-2025 Tomeu Vizoso <tomeu@tomeuvizoso.net> */ + +#include <drm/drm_print.h> +#include <drm/drm_file.h> +#include <drm/drm_gem.h> +#include <drm/rocket_accel.h> +#include <linux/interrupt.h> +#include <linux/iommu.h> +#include <linux/platform_device.h> +#include <linux/pm_runtime.h> + +#include "rocket_core.h" +#include "rocket_device.h" +#include "rocket_drv.h" +#include "rocket_job.h" +#include "rocket_registers.h" + +#define JOB_TIMEOUT_MS 500 + +static struct rocket_job * +to_rocket_job(struct drm_sched_job *sched_job) +{ + return container_of(sched_job, struct rocket_job, base); +} + +static const char *rocket_fence_get_driver_name(struct dma_fence *fence) +{ + return "rocket"; +} + +static const char *rocket_fence_get_timeline_name(struct dma_fence *fence) +{ + return "rockchip-npu"; +} + +static const struct dma_fence_ops rocket_fence_ops = { + .get_driver_name = rocket_fence_get_driver_name, + .get_timeline_name = rocket_fence_get_timeline_name, +}; + +static struct dma_fence *rocket_fence_create(struct rocket_core *core) +{ + struct dma_fence *fence; + + fence = kzalloc(sizeof(*fence), GFP_KERNEL); + if (!fence) + return ERR_PTR(-ENOMEM); + + dma_fence_init(fence, &rocket_fence_ops, &core->fence_lock, + core->fence_context, ++core->emit_seqno); + + return fence; +} + +static int +rocket_copy_tasks(struct drm_device *dev, + struct drm_file *file_priv, + struct drm_rocket_job *job, + struct rocket_job *rjob) +{ + int ret = 0; + + if (job->task_struct_size < sizeof(struct drm_rocket_task)) + return -EINVAL; + + rjob->task_count = job->task_count; + + if (!rjob->task_count) + return 0; + + rjob->tasks = kvmalloc_array(job->task_count, sizeof(*rjob->tasks), GFP_KERNEL); + if (!rjob->tasks) { + drm_dbg(dev, "Failed to allocate task array\n"); + return -ENOMEM; + } + + for (int i = 0; i < rjob->task_count; i++) { + struct drm_rocket_task task = {0}; + + if (copy_from_user(&task, + u64_to_user_ptr(job->tasks) + i * job->task_struct_size, + sizeof(task))) { + drm_dbg(dev, "Failed to copy incoming tasks\n"); + ret = -EFAULT; + goto fail; + } + + if (task.regcmd_count == 0) { + drm_dbg(dev, "regcmd_count field in drm_rocket_task should be > 0.\n"); + ret = -EINVAL; + goto fail; + } + + rjob->tasks[i].regcmd = task.regcmd; + rjob->tasks[i].regcmd_count = task.regcmd_count; + } + + return 0; + +fail: + kvfree(rjob->tasks); + return ret; +} + +static void rocket_job_hw_submit(struct rocket_core *core, struct rocket_job *job) +{ + struct rocket_task *task; + unsigned int extra_bit; + + /* Don't queue the job if a reset is in progress */ + if (atomic_read(&core->reset.pending)) + return; + + /* GO ! */ + + task = &job->tasks[job->next_task_idx]; + job->next_task_idx++; + + rocket_pc_writel(core, BASE_ADDRESS, 0x1); + + /* From rknpu, in the TRM this bit is marked as reserved */ + extra_bit = 0x10000000 * core->index; + rocket_cna_writel(core, S_POINTER, CNA_S_POINTER_POINTER_PP_EN(1) | + CNA_S_POINTER_EXECUTER_PP_EN(1) | + CNA_S_POINTER_POINTER_PP_MODE(1) | + extra_bit); + + rocket_core_writel(core, S_POINTER, CORE_S_POINTER_POINTER_PP_EN(1) | + CORE_S_POINTER_EXECUTER_PP_EN(1) | + CORE_S_POINTER_POINTER_PP_MODE(1) | + extra_bit); + + rocket_pc_writel(core, BASE_ADDRESS, task->regcmd); + rocket_pc_writel(core, REGISTER_AMOUNTS, + PC_REGISTER_AMOUNTS_PC_DATA_AMOUNT((task->regcmd_count + 1) / 2 - 1)); + + rocket_pc_writel(core, INTERRUPT_MASK, PC_INTERRUPT_MASK_DPU_0 | PC_INTERRUPT_MASK_DPU_1); + rocket_pc_writel(core, INTERRUPT_CLEAR, PC_INTERRUPT_CLEAR_DPU_0 | PC_INTERRUPT_CLEAR_DPU_1); + + rocket_pc_writel(core, TASK_CON, PC_TASK_CON_RESERVED_0(1) | + PC_TASK_CON_TASK_COUNT_CLEAR(1) | + PC_TASK_CON_TASK_NUMBER(1) | + PC_TASK_CON_TASK_PP_EN(1)); + + rocket_pc_writel(core, TASK_DMA_BASE_ADDR, PC_TASK_DMA_BASE_ADDR_DMA_BASE_ADDR(0x0)); + + rocket_pc_writel(core, OPERATION_ENABLE, PC_OPERATION_ENABLE_OP_EN(1)); + + dev_dbg(core->dev, "Submitted regcmd at 0x%llx to core %d", task->regcmd, core->index); +} + +static int rocket_acquire_object_fences(struct drm_gem_object **bos, + int bo_count, + struct drm_sched_job *job, + bool is_write) +{ + int i, ret; + + for (i = 0; i < bo_count; i++) { + ret = dma_resv_reserve_fences(bos[i]->resv, 1); + if (ret) + return ret; + + ret = drm_sched_job_add_implicit_dependencies(job, bos[i], + is_write); + if (ret) + return ret; + } + + return 0; +} + +static void rocket_attach_object_fences(struct drm_gem_object **bos, + int bo_count, + struct dma_fence *fence) +{ + int i; + + for (i = 0; i < bo_count; i++) + dma_resv_add_fence(bos[i]->resv, fence, DMA_RESV_USAGE_WRITE); +} + +static int rocket_job_push(struct rocket_job *job) +{ + struct rocket_device *rdev = job->rdev; + struct drm_gem_object **bos; + struct ww_acquire_ctx acquire_ctx; + int ret = 0; + + bos = kvmalloc_array(job->in_bo_count + job->out_bo_count, sizeof(void *), + GFP_KERNEL); + memcpy(bos, job->in_bos, job->in_bo_count * sizeof(void *)); + memcpy(&bos[job->in_bo_count], job->out_bos, job->out_bo_count * sizeof(void *)); + + ret = drm_gem_lock_reservations(bos, job->in_bo_count + job->out_bo_count, &acquire_ctx); + if (ret) + goto err; + + scoped_guard(mutex, &rdev->sched_lock) { + drm_sched_job_arm(&job->base); + + job->inference_done_fence = dma_fence_get(&job->base.s_fence->finished); + + ret = rocket_acquire_object_fences(job->in_bos, job->in_bo_count, &job->base, false); + if (ret) + goto err_unlock; + + ret = rocket_acquire_object_fences(job->out_bos, job->out_bo_count, &job->base, true); + if (ret) + goto err_unlock; + + kref_get(&job->refcount); /* put by scheduler job completion */ + + drm_sched_entity_push_job(&job->base); + } + + rocket_attach_object_fences(job->out_bos, job->out_bo_count, job->inference_done_fence); + +err_unlock: + drm_gem_unlock_reservations(bos, job->in_bo_count + job->out_bo_count, &acquire_ctx); +err: + kfree(bos); + + return ret; +} + +static void rocket_job_cleanup(struct kref *ref) +{ + struct rocket_job *job = container_of(ref, struct rocket_job, + refcount); + unsigned int i; + + rocket_iommu_domain_put(job->domain); + + dma_fence_put(job->done_fence); + dma_fence_put(job->inference_done_fence); + + if (job->in_bos) { + for (i = 0; i < job->in_bo_count; i++) + drm_gem_object_put(job->in_bos[i]); + + kvfree(job->in_bos); + } + + if (job->out_bos) { + for (i = 0; i < job->out_bo_count; i++) + drm_gem_object_put(job->out_bos[i]); + + kvfree(job->out_bos); + } + + kvfree(job->tasks); + + kfree(job); +} + +static void rocket_job_put(struct rocket_job *job) +{ + kref_put(&job->refcount, rocket_job_cleanup); +} + +static void rocket_job_free(struct drm_sched_job *sched_job) +{ + struct rocket_job *job = to_rocket_job(sched_job); + + drm_sched_job_cleanup(sched_job); + + rocket_job_put(job); +} + +static struct rocket_core *sched_to_core(struct rocket_device *rdev, + struct drm_gpu_scheduler *sched) +{ + unsigned int core; + + for (core = 0; core < rdev->num_cores; core++) { + if (&rdev->cores[core].sched == sched) + return &rdev->cores[core]; + } + + return NULL; +} + +static struct dma_fence *rocket_job_run(struct drm_sched_job *sched_job) +{ + struct rocket_job *job = to_rocket_job(sched_job); + struct rocket_device *rdev = job->rdev; + struct rocket_core *core = sched_to_core(rdev, sched_job->sched); + struct dma_fence *fence = NULL; + int ret; + + if (unlikely(job->base.s_fence->finished.error)) + return NULL; + + /* + * Nothing to execute: can happen if the job has finished while + * we were resetting the NPU. + */ + if (job->next_task_idx == job->task_count) + return NULL; + + fence = rocket_fence_create(core); + if (IS_ERR(fence)) + return fence; + + if (job->done_fence) + dma_fence_put(job->done_fence); + job->done_fence = dma_fence_get(fence); + + ret = pm_runtime_get_sync(core->dev); + if (ret < 0) + return fence; + + ret = iommu_attach_group(job->domain->domain, core->iommu_group); + if (ret < 0) + return fence; + + scoped_guard(mutex, &core->job_lock) { + core->in_flight_job = job; + rocket_job_hw_submit(core, job); + } + + return fence; +} + +static void rocket_job_handle_irq(struct rocket_core *core) +{ + pm_runtime_mark_last_busy(core->dev); + + rocket_pc_writel(core, OPERATION_ENABLE, 0x0); + rocket_pc_writel(core, INTERRUPT_CLEAR, 0x1ffff); + + scoped_guard(mutex, &core->job_lock) + if (core->in_flight_job) { + if (core->in_flight_job->next_task_idx < core->in_flight_job->task_count) { + rocket_job_hw_submit(core, core->in_flight_job); + return; + } + + iommu_detach_group(NULL, iommu_group_get(core->dev)); + dma_fence_signal(core->in_flight_job->done_fence); + pm_runtime_put_autosuspend(core->dev); + core->in_flight_job = NULL; + } +} + +static void +rocket_reset(struct rocket_core *core, struct drm_sched_job *bad) +{ + if (!atomic_read(&core->reset.pending)) + return; + + drm_sched_stop(&core->sched, bad); + + /* + * Remaining interrupts have been handled, but we might still have + * stuck jobs. Let's make sure the PM counters stay balanced by + * manually calling pm_runtime_put_noidle(). + */ + scoped_guard(mutex, &core->job_lock) { + if (core->in_flight_job) + pm_runtime_put_noidle(core->dev); + + iommu_detach_group(NULL, core->iommu_group); + + core->in_flight_job = NULL; + } + + /* Proceed with reset now. */ + rocket_core_reset(core); + + /* NPU has been reset, we can clear the reset pending bit. */ + atomic_set(&core->reset.pending, 0); + + /* Restart the scheduler */ + drm_sched_start(&core->sched, 0); +} + +static enum drm_gpu_sched_stat rocket_job_timedout(struct drm_sched_job *sched_job) +{ + struct rocket_job *job = to_rocket_job(sched_job); + struct rocket_device *rdev = job->rdev; + struct rocket_core *core = sched_to_core(rdev, sched_job->sched); + + dev_err(core->dev, "NPU job timed out"); + + atomic_set(&core->reset.pending, 1); + rocket_reset(core, sched_job); + + return DRM_GPU_SCHED_STAT_RESET; +} + +static void rocket_reset_work(struct work_struct *work) +{ + struct rocket_core *core; + + core = container_of(work, struct rocket_core, reset.work); + rocket_reset(core, NULL); +} + +static const struct drm_sched_backend_ops rocket_sched_ops = { + .run_job = rocket_job_run, + .timedout_job = rocket_job_timedout, + .free_job = rocket_job_free +}; + +static irqreturn_t rocket_job_irq_handler_thread(int irq, void *data) +{ + struct rocket_core *core = data; + + rocket_job_handle_irq(core); + + return IRQ_HANDLED; +} + +static irqreturn_t rocket_job_irq_handler(int irq, void *data) +{ + struct rocket_core *core = data; + u32 raw_status = rocket_pc_readl(core, INTERRUPT_RAW_STATUS); + + WARN_ON(raw_status & PC_INTERRUPT_RAW_STATUS_DMA_READ_ERROR); + WARN_ON(raw_status & PC_INTERRUPT_RAW_STATUS_DMA_READ_ERROR); + + if (!(raw_status & PC_INTERRUPT_RAW_STATUS_DPU_0 || + raw_status & PC_INTERRUPT_RAW_STATUS_DPU_1)) + return IRQ_NONE; + + rocket_pc_writel(core, INTERRUPT_MASK, 0x0); + + return IRQ_WAKE_THREAD; +} + +int rocket_job_init(struct rocket_core *core) +{ + struct drm_sched_init_args args = { + .ops = &rocket_sched_ops, + .num_rqs = DRM_SCHED_PRIORITY_COUNT, + .credit_limit = 1, + .timeout = msecs_to_jiffies(JOB_TIMEOUT_MS), + .name = dev_name(core->dev), + .dev = core->dev, + }; + int ret; + + INIT_WORK(&core->reset.work, rocket_reset_work); + spin_lock_init(&core->fence_lock); + mutex_init(&core->job_lock); + + core->irq = platform_get_irq(to_platform_device(core->dev), 0); + if (core->irq < 0) + return core->irq; + + ret = devm_request_threaded_irq(core->dev, core->irq, + rocket_job_irq_handler, + rocket_job_irq_handler_thread, + IRQF_SHARED, dev_name(core->dev), + core); + if (ret) { + dev_err(core->dev, "failed to request job irq"); + return ret; + } + + core->reset.wq = alloc_ordered_workqueue("rocket-reset-%d", 0, core->index); + if (!core->reset.wq) + return -ENOMEM; + + core->fence_context = dma_fence_context_alloc(1); + + args.timeout_wq = core->reset.wq; + ret = drm_sched_init(&core->sched, &args); + if (ret) { + dev_err(core->dev, "Failed to create scheduler: %d.", ret); + goto err_sched; + } + + return 0; + +err_sched: + drm_sched_fini(&core->sched); + + destroy_workqueue(core->reset.wq); + return ret; +} + +void rocket_job_fini(struct rocket_core *core) +{ + drm_sched_fini(&core->sched); + + cancel_work_sync(&core->reset.work); + destroy_workqueue(core->reset.wq); +} + +int rocket_job_open(struct rocket_file_priv *rocket_priv) +{ + struct rocket_device *rdev = rocket_priv->rdev; + struct drm_gpu_scheduler **scheds = kmalloc_array(rdev->num_cores, sizeof(scheds), + GFP_KERNEL); + unsigned int core; + int ret; + + for (core = 0; core < rdev->num_cores; core++) + scheds[core] = &rdev->cores[core].sched; + + ret = drm_sched_entity_init(&rocket_priv->sched_entity, + DRM_SCHED_PRIORITY_NORMAL, + scheds, + rdev->num_cores, NULL); + if (WARN_ON(ret)) + return ret; + + return 0; +} + +void rocket_job_close(struct rocket_file_priv *rocket_priv) +{ + struct drm_sched_entity *entity = &rocket_priv->sched_entity; + + kfree(entity->sched_list); + drm_sched_entity_destroy(entity); +} + +int rocket_job_is_idle(struct rocket_core *core) +{ + /* If there are any jobs in this HW queue, we're not idle */ + if (atomic_read(&core->sched.credit_count)) + return false; + + return true; +} + +static int rocket_ioctl_submit_job(struct drm_device *dev, struct drm_file *file, + struct drm_rocket_job *job) +{ + struct rocket_device *rdev = to_rocket_device(dev); + struct rocket_file_priv *file_priv = file->driver_priv; + struct rocket_job *rjob = NULL; + int ret = 0; + + if (job->task_count == 0) + return -EINVAL; + + rjob = kzalloc(sizeof(*rjob), GFP_KERNEL); + if (!rjob) + return -ENOMEM; + + kref_init(&rjob->refcount); + + rjob->rdev = rdev; + + ret = drm_sched_job_init(&rjob->base, + &file_priv->sched_entity, + 1, NULL, file->client_id); + if (ret) + goto out_put_job; + + ret = rocket_copy_tasks(dev, file, job, rjob); + if (ret) + goto out_cleanup_job; + + ret = drm_gem_objects_lookup(file, u64_to_user_ptr(job->in_bo_handles), + job->in_bo_handle_count, &rjob->in_bos); + if (ret) + goto out_cleanup_job; + + rjob->in_bo_count = job->in_bo_handle_count; + + ret = drm_gem_objects_lookup(file, u64_to_user_ptr(job->out_bo_handles), + job->out_bo_handle_count, &rjob->out_bos); + if (ret) + goto out_cleanup_job; + + rjob->out_bo_count = job->out_bo_handle_count; + + rjob->domain = rocket_iommu_domain_get(file_priv); + + ret = rocket_job_push(rjob); + if (ret) + goto out_cleanup_job; + +out_cleanup_job: + if (ret) + drm_sched_job_cleanup(&rjob->base); +out_put_job: + rocket_job_put(rjob); + + return ret; +} + +int rocket_ioctl_submit(struct drm_device *dev, void *data, struct drm_file *file) +{ + struct drm_rocket_submit *args = data; + struct drm_rocket_job *jobs; + int ret = 0; + unsigned int i = 0; + + if (args->job_count == 0) + return 0; + + if (args->job_struct_size < sizeof(struct drm_rocket_job)) { + drm_dbg(dev, "job_struct_size field in drm_rocket_submit struct is too small.\n"); + return -EINVAL; + } + + if (args->reserved != 0) { + drm_dbg(dev, "Reserved field in drm_rocket_submit struct should be 0.\n"); + return -EINVAL; + } + + jobs = kvmalloc_array(args->job_count, sizeof(*jobs), GFP_KERNEL); + if (!jobs) { + drm_dbg(dev, "Failed to allocate incoming job array\n"); + return -ENOMEM; + } + + for (i = 0; i < args->job_count; i++) { + if (copy_from_user(&jobs[i], + u64_to_user_ptr(args->jobs) + i * args->job_struct_size, + sizeof(*jobs))) { + ret = -EFAULT; + drm_dbg(dev, "Failed to copy incoming job array\n"); + goto exit; + } + } + + + for (i = 0; i < args->job_count; i++) + rocket_ioctl_submit_job(dev, file, &jobs[i]); + +exit: + kfree(jobs); + + return ret; +} diff --git a/drivers/accel/rocket/rocket_job.h b/drivers/accel/rocket/rocket_job.h new file mode 100644 index 00000000000000..4ae00feec3b939 --- /dev/null +++ b/drivers/accel/rocket/rocket_job.h @@ -0,0 +1,52 @@ +/* SPDX-License-Identifier: GPL-2.0-only */ +/* Copyright 2024-2025 Tomeu Vizoso <tomeu@tomeuvizoso.net> */ + +#ifndef __ROCKET_JOB_H__ +#define __ROCKET_JOB_H__ + +#include <drm/drm_drv.h> +#include <drm/gpu_scheduler.h> + +#include "rocket_core.h" +#include "rocket_drv.h" + +struct rocket_task { + u64 regcmd; + u32 regcmd_count; +}; + +struct rocket_job { + struct drm_sched_job base; + + struct rocket_device *rdev; + + struct drm_gem_object **in_bos; + struct drm_gem_object **out_bos; + + u32 in_bo_count; + u32 out_bo_count; + + struct rocket_task *tasks; + u32 task_count; + u32 next_task_idx; + + /* Fence to be signaled by drm-sched once its done with the job */ + struct dma_fence *inference_done_fence; + + /* Fence to be signaled by IRQ handler when the job is complete. */ + struct dma_fence *done_fence; + + struct rocket_iommu_domain *domain; + + struct kref refcount; +}; + +int rocket_ioctl_submit(struct drm_device *dev, void *data, struct drm_file *file); + +int rocket_job_init(struct rocket_core *core); +void rocket_job_fini(struct rocket_core *core); +int rocket_job_open(struct rocket_file_priv *rocket_priv); +void rocket_job_close(struct rocket_file_priv *rocket_priv); +int rocket_job_is_idle(struct rocket_core *core); + +#endif diff --git a/drivers/accel/rocket/rocket_registers.h b/drivers/accel/rocket/rocket_registers.h new file mode 100644 index 00000000000000..9aef614c34705e --- /dev/null +++ b/drivers/accel/rocket/rocket_registers.h @@ -0,0 +1,4404 @@ +/* SPDX-License-Identifier: GPL-2.0-only OR MIT */ + +#ifndef __ROCKET_REGISTERS_XML__ +#define __ROCKET_REGISTERS_XML__ + +/* Autogenerated file, DO NOT EDIT manually! + +This file was generated by the rules-ng-ng gen_header.py tool in this git repository: +http://gitlab.freedesktop.org/mesa/mesa/ +git clone https://gitlab.freedesktop.org/mesa/mesa.git + +The rules-ng-ng source files this header was generated from are: + +- /home/tomeu/src/mesa/src/gallium/drivers/rocket/registers.xml ( 60076 bytes, from Wed Jun 12 10:02:25 2024) + +Copyright (C) 2024-2025 by the following authors: +- Tomeu Vizoso <tomeu@tomeuvizoso.net> +*/ + +#define REG_PC_VERSION 0x00000000 +#define PC_VERSION_VERSION__MASK 0xffffffff +#define PC_VERSION_VERSION__SHIFT 0 +static inline uint32_t PC_VERSION_VERSION(uint32_t val) +{ + return ((val) << PC_VERSION_VERSION__SHIFT) & PC_VERSION_VERSION__MASK; +} + +#define REG_PC_VERSION_NUM 0x00000004 +#define PC_VERSION_NUM_VERSION_NUM__MASK 0xffffffff +#define PC_VERSION_NUM_VERSION_NUM__SHIFT 0 +static inline uint32_t PC_VERSION_NUM_VERSION_NUM(uint32_t val) +{ + return ((val) << PC_VERSION_NUM_VERSION_NUM__SHIFT) & PC_VERSION_NUM_VERSION_NUM__MASK; +} + +#define REG_PC_OPERATION_ENABLE 0x00000008 +#define PC_OPERATION_ENABLE_RESERVED_0__MASK 0xfffffffe +#define PC_OPERATION_ENABLE_RESERVED_0__SHIFT 1 +static inline uint32_t PC_OPERATION_ENABLE_RESERVED_0(uint32_t val) +{ + return ((val) << PC_OPERATION_ENABLE_RESERVED_0__SHIFT) & PC_OPERATION_ENABLE_RESERVED_0__MASK; +} +#define PC_OPERATION_ENABLE_OP_EN__MASK 0x00000001 +#define PC_OPERATION_ENABLE_OP_EN__SHIFT 0 +static inline uint32_t PC_OPERATION_ENABLE_OP_EN(uint32_t val) +{ + return ((val) << PC_OPERATION_ENABLE_OP_EN__SHIFT) & PC_OPERATION_ENABLE_OP_EN__MASK; +} + +#define REG_PC_BASE_ADDRESS 0x00000010 +#define PC_BASE_ADDRESS_PC_SOURCE_ADDR__MASK 0xfffffff0 +#define PC_BASE_ADDRESS_PC_SOURCE_ADDR__SHIFT 4 +static inline uint32_t PC_BASE_ADDRESS_PC_SOURCE_ADDR(uint32_t val) +{ + return ((val) << PC_BASE_ADDRESS_PC_SOURCE_ADDR__SHIFT) & PC_BASE_ADDRESS_PC_SOURCE_ADDR__MASK; +} +#define PC_BASE_ADDRESS_RESERVED_0__MASK 0x0000000e +#define PC_BASE_ADDRESS_RESERVED_0__SHIFT 1 +static inline uint32_t PC_BASE_ADDRESS_RESERVED_0(uint32_t val) +{ + return ((val) << PC_BASE_ADDRESS_RESERVED_0__SHIFT) & PC_BASE_ADDRESS_RESERVED_0__MASK; +} +#define PC_BASE_ADDRESS_PC_SEL__MASK 0x00000001 +#define PC_BASE_ADDRESS_PC_SEL__SHIFT 0 +static inline uint32_t PC_BASE_ADDRESS_PC_SEL(uint32_t val) +{ + return ((val) << PC_BASE_ADDRESS_PC_SEL__SHIFT) & PC_BASE_ADDRESS_PC_SEL__MASK; +} + +#define REG_PC_REGISTER_AMOUNTS 0x00000014 +#define PC_REGISTER_AMOUNTS_RESERVED_0__MASK 0xffff0000 +#define PC_REGISTER_AMOUNTS_RESERVED_0__SHIFT 16 +static inline uint32_t PC_REGISTER_AMOUNTS_RESERVED_0(uint32_t val) +{ + return ((val) << PC_REGISTER_AMOUNTS_RESERVED_0__SHIFT) & PC_REGISTER_AMOUNTS_RESERVED_0__MASK; +} +#define PC_REGISTER_AMOUNTS_PC_DATA_AMOUNT__MASK 0x0000ffff +#define PC_REGISTER_AMOUNTS_PC_DATA_AMOUNT__SHIFT 0 +static inline uint32_t PC_REGISTER_AMOUNTS_PC_DATA_AMOUNT(uint32_t val) +{ + return ((val) << PC_REGISTER_AMOUNTS_PC_DATA_AMOUNT__SHIFT) & PC_REGISTER_AMOUNTS_PC_DATA_AMOUNT__MASK; +} + +#define REG_PC_INTERRUPT_MASK 0x00000020 +#define PC_INTERRUPT_MASK_RESERVED_0__MASK 0xffffc000 +#define PC_INTERRUPT_MASK_RESERVED_0__SHIFT 14 +static inline uint32_t PC_INTERRUPT_MASK_RESERVED_0(uint32_t val) +{ + return ((val) << PC_INTERRUPT_MASK_RESERVED_0__SHIFT) & PC_INTERRUPT_MASK_RESERVED_0__MASK; +} +#define PC_INTERRUPT_MASK_DMA_WRITE_ERROR 0x00002000 +#define PC_INTERRUPT_MASK_DMA_READ_ERROR 0x00001000 +#define PC_INTERRUPT_MASK_PPU_1 0x00000800 +#define PC_INTERRUPT_MASK_PPU_0 0x00000400 +#define PC_INTERRUPT_MASK_DPU_1 0x00000200 +#define PC_INTERRUPT_MASK_DPU_0 0x00000100 +#define PC_INTERRUPT_MASK_CORE_1 0x00000080 +#define PC_INTERRUPT_MASK_CORE_0 0x00000040 +#define PC_INTERRUPT_MASK_CNA_CSC_1 0x00000020 +#define PC_INTERRUPT_MASK_CNA_CSC_0 0x00000010 +#define PC_INTERRUPT_MASK_CNA_WEIGHT_1 0x00000008 +#define PC_INTERRUPT_MASK_CNA_WEIGHT_0 0x00000004 +#define PC_INTERRUPT_MASK_CNA_FEATURE_1 0x00000002 +#define PC_INTERRUPT_MASK_CNA_FEATURE_0 0x00000001 + +#define REG_PC_INTERRUPT_CLEAR 0x00000024 +#define PC_INTERRUPT_CLEAR_RESERVED_0__MASK 0xffffc000 +#define PC_INTERRUPT_CLEAR_RESERVED_0__SHIFT 14 +static inline uint32_t PC_INTERRUPT_CLEAR_RESERVED_0(uint32_t val) +{ + return ((val) << PC_INTERRUPT_CLEAR_RESERVED_0__SHIFT) & PC_INTERRUPT_CLEAR_RESERVED_0__MASK; +} +#define PC_INTERRUPT_CLEAR_DMA_WRITE_ERROR 0x00002000 +#define PC_INTERRUPT_CLEAR_DMA_READ_ERROR 0x00001000 +#define PC_INTERRUPT_CLEAR_PPU_1 0x00000800 +#define PC_INTERRUPT_CLEAR_PPU_0 0x00000400 +#define PC_INTERRUPT_CLEAR_DPU_1 0x00000200 +#define PC_INTERRUPT_CLEAR_DPU_0 0x00000100 +#define PC_INTERRUPT_CLEAR_CORE_1 0x00000080 +#define PC_INTERRUPT_CLEAR_CORE_0 0x00000040 +#define PC_INTERRUPT_CLEAR_CNA_CSC_1 0x00000020 +#define PC_INTERRUPT_CLEAR_CNA_CSC_0 0x00000010 +#define PC_INTERRUPT_CLEAR_CNA_WEIGHT_1 0x00000008 +#define PC_INTERRUPT_CLEAR_CNA_WEIGHT_0 0x00000004 +#define PC_INTERRUPT_CLEAR_CNA_FEATURE_1 0x00000002 +#define PC_INTERRUPT_CLEAR_CNA_FEATURE_0 0x00000001 + +#define REG_PC_INTERRUPT_STATUS 0x00000028 +#define PC_INTERRUPT_STATUS_RESERVED_0__MASK 0xffffc000 +#define PC_INTERRUPT_STATUS_RESERVED_0__SHIFT 14 +static inline uint32_t PC_INTERRUPT_STATUS_RESERVED_0(uint32_t val) +{ + return ((val) << PC_INTERRUPT_STATUS_RESERVED_0__SHIFT) & PC_INTERRUPT_STATUS_RESERVED_0__MASK; +} +#define PC_INTERRUPT_STATUS_DMA_WRITE_ERROR 0x00002000 +#define PC_INTERRUPT_STATUS_DMA_READ_ERROR 0x00001000 +#define PC_INTERRUPT_STATUS_PPU_1 0x00000800 +#define PC_INTERRUPT_STATUS_PPU_0 0x00000400 +#define PC_INTERRUPT_STATUS_DPU_1 0x00000200 +#define PC_INTERRUPT_STATUS_DPU_0 0x00000100 +#define PC_INTERRUPT_STATUS_CORE_1 0x00000080 +#define PC_INTERRUPT_STATUS_CORE_0 0x00000040 +#define PC_INTERRUPT_STATUS_CNA_CSC_1 0x00000020 +#define PC_INTERRUPT_STATUS_CNA_CSC_0 0x00000010 +#define PC_INTERRUPT_STATUS_CNA_WEIGHT_1 0x00000008 +#define PC_INTERRUPT_STATUS_CNA_WEIGHT_0 0x00000004 +#define PC_INTERRUPT_STATUS_CNA_FEATURE_1 0x00000002 +#define PC_INTERRUPT_STATUS_CNA_FEATURE_0 0x00000001 + +#define REG_PC_INTERRUPT_RAW_STATUS 0x0000002c +#define PC_INTERRUPT_RAW_STATUS_RESERVED_0__MASK 0xffffc000 +#define PC_INTERRUPT_RAW_STATUS_RESERVED_0__SHIFT 14 +static inline uint32_t PC_INTERRUPT_RAW_STATUS_RESERVED_0(uint32_t val) +{ + return ((val) << PC_INTERRUPT_RAW_STATUS_RESERVED_0__SHIFT) & PC_INTERRUPT_RAW_STATUS_RESERVED_0__MASK; +} +#define PC_INTERRUPT_RAW_STATUS_DMA_WRITE_ERROR 0x00002000 +#define PC_INTERRUPT_RAW_STATUS_DMA_READ_ERROR 0x00001000 +#define PC_INTERRUPT_RAW_STATUS_PPU_1 0x00000800 +#define PC_INTERRUPT_RAW_STATUS_PPU_0 0x00000400 +#define PC_INTERRUPT_RAW_STATUS_DPU_1 0x00000200 +#define PC_INTERRUPT_RAW_STATUS_DPU_0 0x00000100 +#define PC_INTERRUPT_RAW_STATUS_CORE_1 0x00000080 +#define PC_INTERRUPT_RAW_STATUS_CORE_0 0x00000040 +#define PC_INTERRUPT_RAW_STATUS_CNA_CSC_1 0x00000020 +#define PC_INTERRUPT_RAW_STATUS_CNA_CSC_0 0x00000010 +#define PC_INTERRUPT_RAW_STATUS_CNA_WEIGHT_1 0x00000008 +#define PC_INTERRUPT_RAW_STATUS_CNA_WEIGHT_0 0x00000004 +#define PC_INTERRUPT_RAW_STATUS_CNA_FEATURE_1 0x00000002 +#define PC_INTERRUPT_RAW_STATUS_CNA_FEATURE_0 0x00000001 + +#define REG_PC_TASK_CON 0x00000030 +#define PC_TASK_CON_RESERVED_0__MASK 0xffffc000 +#define PC_TASK_CON_RESERVED_0__SHIFT 14 +static inline uint32_t PC_TASK_CON_RESERVED_0(uint32_t val) +{ + return ((val) << PC_TASK_CON_RESERVED_0__SHIFT) & PC_TASK_CON_RESERVED_0__MASK; +} +#define PC_TASK_CON_TASK_COUNT_CLEAR__MASK 0x00002000 +#define PC_TASK_CON_TASK_COUNT_CLEAR__SHIFT 13 +static inline uint32_t PC_TASK_CON_TASK_COUNT_CLEAR(uint32_t val) +{ + return ((val) << PC_TASK_CON_TASK_COUNT_CLEAR__SHIFT) & PC_TASK_CON_TASK_COUNT_CLEAR__MASK; +} +#define PC_TASK_CON_TASK_PP_EN__MASK 0x00001000 +#define PC_TASK_CON_TASK_PP_EN__SHIFT 12 +static inline uint32_t PC_TASK_CON_TASK_PP_EN(uint32_t val) +{ + return ((val) << PC_TASK_CON_TASK_PP_EN__SHIFT) & PC_TASK_CON_TASK_PP_EN__MASK; +} +#define PC_TASK_CON_TASK_NUMBER__MASK 0x00000fff +#define PC_TASK_CON_TASK_NUMBER__SHIFT 0 +static inline uint32_t PC_TASK_CON_TASK_NUMBER(uint32_t val) +{ + return ((val) << PC_TASK_CON_TASK_NUMBER__SHIFT) & PC_TASK_CON_TASK_NUMBER__MASK; +} + +#define REG_PC_TASK_DMA_BASE_ADDR 0x00000034 +#define PC_TASK_DMA_BASE_ADDR_DMA_BASE_ADDR__MASK 0xfffffff0 +#define PC_TASK_DMA_BASE_ADDR_DMA_BASE_ADDR__SHIFT 4 +static inline uint32_t PC_TASK_DMA_BASE_ADDR_DMA_BASE_ADDR(uint32_t val) +{ + return ((val) << PC_TASK_DMA_BASE_ADDR_DMA_BASE_ADDR__SHIFT) & PC_TASK_DMA_BASE_ADDR_DMA_BASE_ADDR__MASK; +} +#define PC_TASK_DMA_BASE_ADDR_RESERVED_0__MASK 0x0000000f +#define PC_TASK_DMA_BASE_ADDR_RESERVED_0__SHIFT 0 +static inline uint32_t PC_TASK_DMA_BASE_ADDR_RESERVED_0(uint32_t val) +{ + return ((val) << PC_TASK_DMA_BASE_ADDR_RESERVED_0__SHIFT) & PC_TASK_DMA_BASE_ADDR_RESERVED_0__MASK; +} + +#define REG_PC_TASK_STATUS 0x0000003c +#define PC_TASK_STATUS_RESERVED_0__MASK 0xf0000000 +#define PC_TASK_STATUS_RESERVED_0__SHIFT 28 +static inline uint32_t PC_TASK_STATUS_RESERVED_0(uint32_t val) +{ + return ((val) << PC_TASK_STATUS_RESERVED_0__SHIFT) & PC_TASK_STATUS_RESERVED_0__MASK; +} +#define PC_TASK_STATUS_TASK_STATUS__MASK 0x0fffffff +#define PC_TASK_STATUS_TASK_STATUS__SHIFT 0 +static inline uint32_t PC_TASK_STATUS_TASK_STATUS(uint32_t val) +{ + return ((val) << PC_TASK_STATUS_TASK_STATUS__SHIFT) & PC_TASK_STATUS_TASK_STATUS__MASK; +} + +#define REG_CNA_S_STATUS 0x00001000 +#define CNA_S_STATUS_RESERVED_0__MASK 0xfffc0000 +#define CNA_S_STATUS_RESERVED_0__SHIFT 18 +static inline uint32_t CNA_S_STATUS_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_S_STATUS_RESERVED_0__SHIFT) & CNA_S_STATUS_RESERVED_0__MASK; +} +#define CNA_S_STATUS_STATUS_1__MASK 0x00030000 +#define CNA_S_STATUS_STATUS_1__SHIFT 16 +static inline uint32_t CNA_S_STATUS_STATUS_1(uint32_t val) +{ + return ((val) << CNA_S_STATUS_STATUS_1__SHIFT) & CNA_S_STATUS_STATUS_1__MASK; +} +#define CNA_S_STATUS_RESERVED_1__MASK 0x0000fffc +#define CNA_S_STATUS_RESERVED_1__SHIFT 2 +static inline uint32_t CNA_S_STATUS_RESERVED_1(uint32_t val) +{ + return ((val) << CNA_S_STATUS_RESERVED_1__SHIFT) & CNA_S_STATUS_RESERVED_1__MASK; +} +#define CNA_S_STATUS_STATUS_0__MASK 0x00000003 +#define CNA_S_STATUS_STATUS_0__SHIFT 0 +static inline uint32_t CNA_S_STATUS_STATUS_0(uint32_t val) +{ + return ((val) << CNA_S_STATUS_STATUS_0__SHIFT) & CNA_S_STATUS_STATUS_0__MASK; +} + +#define REG_CNA_S_POINTER 0x00001004 +#define CNA_S_POINTER_RESERVED_0__MASK 0xfffe0000 +#define CNA_S_POINTER_RESERVED_0__SHIFT 17 +static inline uint32_t CNA_S_POINTER_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_S_POINTER_RESERVED_0__SHIFT) & CNA_S_POINTER_RESERVED_0__MASK; +} +#define CNA_S_POINTER_EXECUTER__MASK 0x00010000 +#define CNA_S_POINTER_EXECUTER__SHIFT 16 +static inline uint32_t CNA_S_POINTER_EXECUTER(uint32_t val) +{ + return ((val) << CNA_S_POINTER_EXECUTER__SHIFT) & CNA_S_POINTER_EXECUTER__MASK; +} +#define CNA_S_POINTER_RESERVED_1__MASK 0x0000ffc0 +#define CNA_S_POINTER_RESERVED_1__SHIFT 6 +static inline uint32_t CNA_S_POINTER_RESERVED_1(uint32_t val) +{ + return ((val) << CNA_S_POINTER_RESERVED_1__SHIFT) & CNA_S_POINTER_RESERVED_1__MASK; +} +#define CNA_S_POINTER_EXECUTER_PP_CLEAR__MASK 0x00000020 +#define CNA_S_POINTER_EXECUTER_PP_CLEAR__SHIFT 5 +static inline uint32_t CNA_S_POINTER_EXECUTER_PP_CLEAR(uint32_t val) +{ + return ((val) << CNA_S_POINTER_EXECUTER_PP_CLEAR__SHIFT) & CNA_S_POINTER_EXECUTER_PP_CLEAR__MASK; +} +#define CNA_S_POINTER_POINTER_PP_CLEAR__MASK 0x00000010 +#define CNA_S_POINTER_POINTER_PP_CLEAR__SHIFT 4 +static inline uint32_t CNA_S_POINTER_POINTER_PP_CLEAR(uint32_t val) +{ + return ((val) << CNA_S_POINTER_POINTER_PP_CLEAR__SHIFT) & CNA_S_POINTER_POINTER_PP_CLEAR__MASK; +} +#define CNA_S_POINTER_POINTER_PP_MODE__MASK 0x00000008 +#define CNA_S_POINTER_POINTER_PP_MODE__SHIFT 3 +static inline uint32_t CNA_S_POINTER_POINTER_PP_MODE(uint32_t val) +{ + return ((val) << CNA_S_POINTER_POINTER_PP_MODE__SHIFT) & CNA_S_POINTER_POINTER_PP_MODE__MASK; +} +#define CNA_S_POINTER_EXECUTER_PP_EN__MASK 0x00000004 +#define CNA_S_POINTER_EXECUTER_PP_EN__SHIFT 2 +static inline uint32_t CNA_S_POINTER_EXECUTER_PP_EN(uint32_t val) +{ + return ((val) << CNA_S_POINTER_EXECUTER_PP_EN__SHIFT) & CNA_S_POINTER_EXECUTER_PP_EN__MASK; +} +#define CNA_S_POINTER_POINTER_PP_EN__MASK 0x00000002 +#define CNA_S_POINTER_POINTER_PP_EN__SHIFT 1 +static inline uint32_t CNA_S_POINTER_POINTER_PP_EN(uint32_t val) +{ + return ((val) << CNA_S_POINTER_POINTER_PP_EN__SHIFT) & CNA_S_POINTER_POINTER_PP_EN__MASK; +} +#define CNA_S_POINTER_POINTER__MASK 0x00000001 +#define CNA_S_POINTER_POINTER__SHIFT 0 +static inline uint32_t CNA_S_POINTER_POINTER(uint32_t val) +{ + return ((val) << CNA_S_POINTER_POINTER__SHIFT) & CNA_S_POINTER_POINTER__MASK; +} + +#define REG_CNA_OPERATION_ENABLE 0x00001008 +#define CNA_OPERATION_ENABLE_RESERVED_0__MASK 0xfffffffe +#define CNA_OPERATION_ENABLE_RESERVED_0__SHIFT 1 +static inline uint32_t CNA_OPERATION_ENABLE_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_OPERATION_ENABLE_RESERVED_0__SHIFT) & CNA_OPERATION_ENABLE_RESERVED_0__MASK; +} +#define CNA_OPERATION_ENABLE_OP_EN__MASK 0x00000001 +#define CNA_OPERATION_ENABLE_OP_EN__SHIFT 0 +static inline uint32_t CNA_OPERATION_ENABLE_OP_EN(uint32_t val) +{ + return ((val) << CNA_OPERATION_ENABLE_OP_EN__SHIFT) & CNA_OPERATION_ENABLE_OP_EN__MASK; +} + +#define REG_CNA_CONV_CON1 0x0000100c +#define CNA_CONV_CON1_RESERVED_0__MASK 0x80000000 +#define CNA_CONV_CON1_RESERVED_0__SHIFT 31 +static inline uint32_t CNA_CONV_CON1_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_CONV_CON1_RESERVED_0__SHIFT) & CNA_CONV_CON1_RESERVED_0__MASK; +} +#define CNA_CONV_CON1_NONALIGN_DMA__MASK 0x40000000 +#define CNA_CONV_CON1_NONALIGN_DMA__SHIFT 30 +static inline uint32_t CNA_CONV_CON1_NONALIGN_DMA(uint32_t val) +{ + return ((val) << CNA_CONV_CON1_NONALIGN_DMA__SHIFT) & CNA_CONV_CON1_NONALIGN_DMA__MASK; +} +#define CNA_CONV_CON1_GROUP_LINE_OFF__MASK 0x20000000 +#define CNA_CONV_CON1_GROUP_LINE_OFF__SHIFT 29 +static inline uint32_t CNA_CONV_CON1_GROUP_LINE_OFF(uint32_t val) +{ + return ((val) << CNA_CONV_CON1_GROUP_LINE_OFF__SHIFT) & CNA_CONV_CON1_GROUP_LINE_OFF__MASK; +} +#define CNA_CONV_CON1_RESERVED_1__MASK 0x1ffe0000 +#define CNA_CONV_CON1_RESERVED_1__SHIFT 17 +static inline uint32_t CNA_CONV_CON1_RESERVED_1(uint32_t val) +{ + return ((val) << CNA_CONV_CON1_RESERVED_1__SHIFT) & CNA_CONV_CON1_RESERVED_1__MASK; +} +#define CNA_CONV_CON1_DECONV__MASK 0x00010000 +#define CNA_CONV_CON1_DECONV__SHIFT 16 +static inline uint32_t CNA_CONV_CON1_DECONV(uint32_t val) +{ + return ((val) << CNA_CONV_CON1_DECONV__SHIFT) & CNA_CONV_CON1_DECONV__MASK; +} +#define CNA_CONV_CON1_ARGB_IN__MASK 0x0000f000 +#define CNA_CONV_CON1_ARGB_IN__SHIFT 12 +static inline uint32_t CNA_CONV_CON1_ARGB_IN(uint32_t val) +{ + return ((val) << CNA_CONV_CON1_ARGB_IN__SHIFT) & CNA_CONV_CON1_ARGB_IN__MASK; +} +#define CNA_CONV_CON1_RESERVED_2__MASK 0x00000c00 +#define CNA_CONV_CON1_RESERVED_2__SHIFT 10 +static inline uint32_t CNA_CONV_CON1_RESERVED_2(uint32_t val) +{ + return ((val) << CNA_CONV_CON1_RESERVED_2__SHIFT) & CNA_CONV_CON1_RESERVED_2__MASK; +} +#define CNA_CONV_CON1_PROC_PRECISION__MASK 0x00000380 +#define CNA_CONV_CON1_PROC_PRECISION__SHIFT 7 +static inline uint32_t CNA_CONV_CON1_PROC_PRECISION(uint32_t val) +{ + return ((val) << CNA_CONV_CON1_PROC_PRECISION__SHIFT) & CNA_CONV_CON1_PROC_PRECISION__MASK; +} +#define CNA_CONV_CON1_IN_PRECISION__MASK 0x00000070 +#define CNA_CONV_CON1_IN_PRECISION__SHIFT 4 +static inline uint32_t CNA_CONV_CON1_IN_PRECISION(uint32_t val) +{ + return ((val) << CNA_CONV_CON1_IN_PRECISION__SHIFT) & CNA_CONV_CON1_IN_PRECISION__MASK; +} +#define CNA_CONV_CON1_CONV_MODE__MASK 0x0000000f +#define CNA_CONV_CON1_CONV_MODE__SHIFT 0 +static inline uint32_t CNA_CONV_CON1_CONV_MODE(uint32_t val) +{ + return ((val) << CNA_CONV_CON1_CONV_MODE__SHIFT) & CNA_CONV_CON1_CONV_MODE__MASK; +} + +#define REG_CNA_CONV_CON2 0x00001010 +#define CNA_CONV_CON2_RESERVED_0__MASK 0xff000000 +#define CNA_CONV_CON2_RESERVED_0__SHIFT 24 +static inline uint32_t CNA_CONV_CON2_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_CONV_CON2_RESERVED_0__SHIFT) & CNA_CONV_CON2_RESERVED_0__MASK; +} +#define CNA_CONV_CON2_KERNEL_GROUP__MASK 0x00ff0000 +#define CNA_CONV_CON2_KERNEL_GROUP__SHIFT 16 +static inline uint32_t CNA_CONV_CON2_KERNEL_GROUP(uint32_t val) +{ + return ((val) << CNA_CONV_CON2_KERNEL_GROUP__SHIFT) & CNA_CONV_CON2_KERNEL_GROUP__MASK; +} +#define CNA_CONV_CON2_RESERVED_1__MASK 0x0000c000 +#define CNA_CONV_CON2_RESERVED_1__SHIFT 14 +static inline uint32_t CNA_CONV_CON2_RESERVED_1(uint32_t val) +{ + return ((val) << CNA_CONV_CON2_RESERVED_1__SHIFT) & CNA_CONV_CON2_RESERVED_1__MASK; +} +#define CNA_CONV_CON2_FEATURE_GRAINS__MASK 0x00003ff0 +#define CNA_CONV_CON2_FEATURE_GRAINS__SHIFT 4 +static inline uint32_t CNA_CONV_CON2_FEATURE_GRAINS(uint32_t val) +{ + return ((val) << CNA_CONV_CON2_FEATURE_GRAINS__SHIFT) & CNA_CONV_CON2_FEATURE_GRAINS__MASK; +} +#define CNA_CONV_CON2_RESERVED_2__MASK 0x00000008 +#define CNA_CONV_CON2_RESERVED_2__SHIFT 3 +static inline uint32_t CNA_CONV_CON2_RESERVED_2(uint32_t val) +{ + return ((val) << CNA_CONV_CON2_RESERVED_2__SHIFT) & CNA_CONV_CON2_RESERVED_2__MASK; +} +#define CNA_CONV_CON2_CSC_WO_EN__MASK 0x00000004 +#define CNA_CONV_CON2_CSC_WO_EN__SHIFT 2 +static inline uint32_t CNA_CONV_CON2_CSC_WO_EN(uint32_t val) +{ + return ((val) << CNA_CONV_CON2_CSC_WO_EN__SHIFT) & CNA_CONV_CON2_CSC_WO_EN__MASK; +} +#define CNA_CONV_CON2_CSC_DO_EN__MASK 0x00000002 +#define CNA_CONV_CON2_CSC_DO_EN__SHIFT 1 +static inline uint32_t CNA_CONV_CON2_CSC_DO_EN(uint32_t val) +{ + return ((val) << CNA_CONV_CON2_CSC_DO_EN__SHIFT) & CNA_CONV_CON2_CSC_DO_EN__MASK; +} +#define CNA_CONV_CON2_CMD_FIFO_SRST__MASK 0x00000001 +#define CNA_CONV_CON2_CMD_FIFO_SRST__SHIFT 0 +static inline uint32_t CNA_CONV_CON2_CMD_FIFO_SRST(uint32_t val) +{ + return ((val) << CNA_CONV_CON2_CMD_FIFO_SRST__SHIFT) & CNA_CONV_CON2_CMD_FIFO_SRST__MASK; +} + +#define REG_CNA_CONV_CON3 0x00001014 +#define CNA_CONV_CON3_RESERVED_0__MASK 0x80000000 +#define CNA_CONV_CON3_RESERVED_0__SHIFT 31 +static inline uint32_t CNA_CONV_CON3_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_CONV_CON3_RESERVED_0__SHIFT) & CNA_CONV_CON3_RESERVED_0__MASK; +} +#define CNA_CONV_CON3_NN_MODE__MASK 0x70000000 +#define CNA_CONV_CON3_NN_MODE__SHIFT 28 +static inline uint32_t CNA_CONV_CON3_NN_MODE(uint32_t val) +{ + return ((val) << CNA_CONV_CON3_NN_MODE__SHIFT) & CNA_CONV_CON3_NN_MODE__MASK; +} +#define CNA_CONV_CON3_RESERVED_1__MASK 0x0c000000 +#define CNA_CONV_CON3_RESERVED_1__SHIFT 26 +static inline uint32_t CNA_CONV_CON3_RESERVED_1(uint32_t val) +{ + return ((val) << CNA_CONV_CON3_RESERVED_1__SHIFT) & CNA_CONV_CON3_RESERVED_1__MASK; +} +#define CNA_CONV_CON3_ATROUS_Y_DILATION__MASK 0x03e00000 +#define CNA_CONV_CON3_ATROUS_Y_DILATION__SHIFT 21 +static inline uint32_t CNA_CONV_CON3_ATROUS_Y_DILATION(uint32_t val) +{ + return ((val) << CNA_CONV_CON3_ATROUS_Y_DILATION__SHIFT) & CNA_CONV_CON3_ATROUS_Y_DILATION__MASK; +} +#define CNA_CONV_CON3_ATROUS_X_DILATION__MASK 0x001f0000 +#define CNA_CONV_CON3_ATROUS_X_DILATION__SHIFT 16 +static inline uint32_t CNA_CONV_CON3_ATROUS_X_DILATION(uint32_t val) +{ + return ((val) << CNA_CONV_CON3_ATROUS_X_DILATION__SHIFT) & CNA_CONV_CON3_ATROUS_X_DILATION__MASK; +} +#define CNA_CONV_CON3_RESERVED_2__MASK 0x0000c000 +#define CNA_CONV_CON3_RESERVED_2__SHIFT 14 +static inline uint32_t CNA_CONV_CON3_RESERVED_2(uint32_t val) +{ + return ((val) << CNA_CONV_CON3_RESERVED_2__SHIFT) & CNA_CONV_CON3_RESERVED_2__MASK; +} +#define CNA_CONV_CON3_DECONV_Y_STRIDE__MASK 0x00003800 +#define CNA_CONV_CON3_DECONV_Y_STRIDE__SHIFT 11 +static inline uint32_t CNA_CONV_CON3_DECONV_Y_STRIDE(uint32_t val) +{ + return ((val) << CNA_CONV_CON3_DECONV_Y_STRIDE__SHIFT) & CNA_CONV_CON3_DECONV_Y_STRIDE__MASK; +} +#define CNA_CONV_CON3_DECONV_X_STRIDE__MASK 0x00000700 +#define CNA_CONV_CON3_DECONV_X_STRIDE__SHIFT 8 +static inline uint32_t CNA_CONV_CON3_DECONV_X_STRIDE(uint32_t val) +{ + return ((val) << CNA_CONV_CON3_DECONV_X_STRIDE__SHIFT) & CNA_CONV_CON3_DECONV_X_STRIDE__MASK; +} +#define CNA_CONV_CON3_RESERVED_3__MASK 0x000000c0 +#define CNA_CONV_CON3_RESERVED_3__SHIFT 6 +static inline uint32_t CNA_CONV_CON3_RESERVED_3(uint32_t val) +{ + return ((val) << CNA_CONV_CON3_RESERVED_3__SHIFT) & CNA_CONV_CON3_RESERVED_3__MASK; +} +#define CNA_CONV_CON3_CONV_Y_STRIDE__MASK 0x00000038 +#define CNA_CONV_CON3_CONV_Y_STRIDE__SHIFT 3 +static inline uint32_t CNA_CONV_CON3_CONV_Y_STRIDE(uint32_t val) +{ + return ((val) << CNA_CONV_CON3_CONV_Y_STRIDE__SHIFT) & CNA_CONV_CON3_CONV_Y_STRIDE__MASK; +} +#define CNA_CONV_CON3_CONV_X_STRIDE__MASK 0x00000007 +#define CNA_CONV_CON3_CONV_X_STRIDE__SHIFT 0 +static inline uint32_t CNA_CONV_CON3_CONV_X_STRIDE(uint32_t val) +{ + return ((val) << CNA_CONV_CON3_CONV_X_STRIDE__SHIFT) & CNA_CONV_CON3_CONV_X_STRIDE__MASK; +} + +#define REG_CNA_DATA_SIZE0 0x00001020 +#define CNA_DATA_SIZE0_RESERVED_0__MASK 0xf8000000 +#define CNA_DATA_SIZE0_RESERVED_0__SHIFT 27 +static inline uint32_t CNA_DATA_SIZE0_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_DATA_SIZE0_RESERVED_0__SHIFT) & CNA_DATA_SIZE0_RESERVED_0__MASK; +} +#define CNA_DATA_SIZE0_DATAIN_WIDTH__MASK 0x07ff0000 +#define CNA_DATA_SIZE0_DATAIN_WIDTH__SHIFT 16 +static inline uint32_t CNA_DATA_SIZE0_DATAIN_WIDTH(uint32_t val) +{ + return ((val) << CNA_DATA_SIZE0_DATAIN_WIDTH__SHIFT) & CNA_DATA_SIZE0_DATAIN_WIDTH__MASK; +} +#define CNA_DATA_SIZE0_RESERVED_1__MASK 0x0000f800 +#define CNA_DATA_SIZE0_RESERVED_1__SHIFT 11 +static inline uint32_t CNA_DATA_SIZE0_RESERVED_1(uint32_t val) +{ + return ((val) << CNA_DATA_SIZE0_RESERVED_1__SHIFT) & CNA_DATA_SIZE0_RESERVED_1__MASK; +} +#define CNA_DATA_SIZE0_DATAIN_HEIGHT__MASK 0x000007ff +#define CNA_DATA_SIZE0_DATAIN_HEIGHT__SHIFT 0 +static inline uint32_t CNA_DATA_SIZE0_DATAIN_HEIGHT(uint32_t val) +{ + return ((val) << CNA_DATA_SIZE0_DATAIN_HEIGHT__SHIFT) & CNA_DATA_SIZE0_DATAIN_HEIGHT__MASK; +} + +#define REG_CNA_DATA_SIZE1 0x00001024 +#define CNA_DATA_SIZE1_RESERVED_0__MASK 0xc0000000 +#define CNA_DATA_SIZE1_RESERVED_0__SHIFT 30 +static inline uint32_t CNA_DATA_SIZE1_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_DATA_SIZE1_RESERVED_0__SHIFT) & CNA_DATA_SIZE1_RESERVED_0__MASK; +} +#define CNA_DATA_SIZE1_DATAIN_CHANNEL_REAL__MASK 0x3fff0000 +#define CNA_DATA_SIZE1_DATAIN_CHANNEL_REAL__SHIFT 16 +static inline uint32_t CNA_DATA_SIZE1_DATAIN_CHANNEL_REAL(uint32_t val) +{ + return ((val) << CNA_DATA_SIZE1_DATAIN_CHANNEL_REAL__SHIFT) & CNA_DATA_SIZE1_DATAIN_CHANNEL_REAL__MASK; +} +#define CNA_DATA_SIZE1_DATAIN_CHANNEL__MASK 0x0000ffff +#define CNA_DATA_SIZE1_DATAIN_CHANNEL__SHIFT 0 +static inline uint32_t CNA_DATA_SIZE1_DATAIN_CHANNEL(uint32_t val) +{ + return ((val) << CNA_DATA_SIZE1_DATAIN_CHANNEL__SHIFT) & CNA_DATA_SIZE1_DATAIN_CHANNEL__MASK; +} + +#define REG_CNA_DATA_SIZE2 0x00001028 +#define CNA_DATA_SIZE2_RESERVED_0__MASK 0xfffff800 +#define CNA_DATA_SIZE2_RESERVED_0__SHIFT 11 +static inline uint32_t CNA_DATA_SIZE2_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_DATA_SIZE2_RESERVED_0__SHIFT) & CNA_DATA_SIZE2_RESERVED_0__MASK; +} +#define CNA_DATA_SIZE2_DATAOUT_WIDTH__MASK 0x000007ff +#define CNA_DATA_SIZE2_DATAOUT_WIDTH__SHIFT 0 +static inline uint32_t CNA_DATA_SIZE2_DATAOUT_WIDTH(uint32_t val) +{ + return ((val) << CNA_DATA_SIZE2_DATAOUT_WIDTH__SHIFT) & CNA_DATA_SIZE2_DATAOUT_WIDTH__MASK; +} + +#define REG_CNA_DATA_SIZE3 0x0000102c +#define CNA_DATA_SIZE3_RESERVED_0__MASK 0xff000000 +#define CNA_DATA_SIZE3_RESERVED_0__SHIFT 24 +static inline uint32_t CNA_DATA_SIZE3_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_DATA_SIZE3_RESERVED_0__SHIFT) & CNA_DATA_SIZE3_RESERVED_0__MASK; +} +#define CNA_DATA_SIZE3_SURF_MODE__MASK 0x00c00000 +#define CNA_DATA_SIZE3_SURF_MODE__SHIFT 22 +static inline uint32_t CNA_DATA_SIZE3_SURF_MODE(uint32_t val) +{ + return ((val) << CNA_DATA_SIZE3_SURF_MODE__SHIFT) & CNA_DATA_SIZE3_SURF_MODE__MASK; +} +#define CNA_DATA_SIZE3_DATAOUT_ATOMICS__MASK 0x003fffff +#define CNA_DATA_SIZE3_DATAOUT_ATOMICS__SHIFT 0 +static inline uint32_t CNA_DATA_SIZE3_DATAOUT_ATOMICS(uint32_t val) +{ + return ((val) << CNA_DATA_SIZE3_DATAOUT_ATOMICS__SHIFT) & CNA_DATA_SIZE3_DATAOUT_ATOMICS__MASK; +} + +#define REG_CNA_WEIGHT_SIZE0 0x00001030 +#define CNA_WEIGHT_SIZE0_WEIGHT_BYTES__MASK 0xffffffff +#define CNA_WEIGHT_SIZE0_WEIGHT_BYTES__SHIFT 0 +static inline uint32_t CNA_WEIGHT_SIZE0_WEIGHT_BYTES(uint32_t val) +{ + return ((val) << CNA_WEIGHT_SIZE0_WEIGHT_BYTES__SHIFT) & CNA_WEIGHT_SIZE0_WEIGHT_BYTES__MASK; +} + +#define REG_CNA_WEIGHT_SIZE1 0x00001034 +#define CNA_WEIGHT_SIZE1_RESERVED_0__MASK 0xfff80000 +#define CNA_WEIGHT_SIZE1_RESERVED_0__SHIFT 19 +static inline uint32_t CNA_WEIGHT_SIZE1_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_WEIGHT_SIZE1_RESERVED_0__SHIFT) & CNA_WEIGHT_SIZE1_RESERVED_0__MASK; +} +#define CNA_WEIGHT_SIZE1_WEIGHT_BYTES_PER_KERNEL__MASK 0x0007ffff +#define CNA_WEIGHT_SIZE1_WEIGHT_BYTES_PER_KERNEL__SHIFT 0 +static inline uint32_t CNA_WEIGHT_SIZE1_WEIGHT_BYTES_PER_KERNEL(uint32_t val) +{ + return ((val) << CNA_WEIGHT_SIZE1_WEIGHT_BYTES_PER_KERNEL__SHIFT) & CNA_WEIGHT_SIZE1_WEIGHT_BYTES_PER_KERNEL__MASK; +} + +#define REG_CNA_WEIGHT_SIZE2 0x00001038 +#define CNA_WEIGHT_SIZE2_RESERVED_0__MASK 0xe0000000 +#define CNA_WEIGHT_SIZE2_RESERVED_0__SHIFT 29 +static inline uint32_t CNA_WEIGHT_SIZE2_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_WEIGHT_SIZE2_RESERVED_0__SHIFT) & CNA_WEIGHT_SIZE2_RESERVED_0__MASK; +} +#define CNA_WEIGHT_SIZE2_WEIGHT_WIDTH__MASK 0x1f000000 +#define CNA_WEIGHT_SIZE2_WEIGHT_WIDTH__SHIFT 24 +static inline uint32_t CNA_WEIGHT_SIZE2_WEIGHT_WIDTH(uint32_t val) +{ + return ((val) << CNA_WEIGHT_SIZE2_WEIGHT_WIDTH__SHIFT) & CNA_WEIGHT_SIZE2_WEIGHT_WIDTH__MASK; +} +#define CNA_WEIGHT_SIZE2_RESERVED_1__MASK 0x00e00000 +#define CNA_WEIGHT_SIZE2_RESERVED_1__SHIFT 21 +static inline uint32_t CNA_WEIGHT_SIZE2_RESERVED_1(uint32_t val) +{ + return ((val) << CNA_WEIGHT_SIZE2_RESERVED_1__SHIFT) & CNA_WEIGHT_SIZE2_RESERVED_1__MASK; +} +#define CNA_WEIGHT_SIZE2_WEIGHT_HEIGHT__MASK 0x001f0000 +#define CNA_WEIGHT_SIZE2_WEIGHT_HEIGHT__SHIFT 16 +static inline uint32_t CNA_WEIGHT_SIZE2_WEIGHT_HEIGHT(uint32_t val) +{ + return ((val) << CNA_WEIGHT_SIZE2_WEIGHT_HEIGHT__SHIFT) & CNA_WEIGHT_SIZE2_WEIGHT_HEIGHT__MASK; +} +#define CNA_WEIGHT_SIZE2_RESERVED_2__MASK 0x0000c000 +#define CNA_WEIGHT_SIZE2_RESERVED_2__SHIFT 14 +static inline uint32_t CNA_WEIGHT_SIZE2_RESERVED_2(uint32_t val) +{ + return ((val) << CNA_WEIGHT_SIZE2_RESERVED_2__SHIFT) & CNA_WEIGHT_SIZE2_RESERVED_2__MASK; +} +#define CNA_WEIGHT_SIZE2_WEIGHT_KERNELS__MASK 0x00003fff +#define CNA_WEIGHT_SIZE2_WEIGHT_KERNELS__SHIFT 0 +static inline uint32_t CNA_WEIGHT_SIZE2_WEIGHT_KERNELS(uint32_t val) +{ + return ((val) << CNA_WEIGHT_SIZE2_WEIGHT_KERNELS__SHIFT) & CNA_WEIGHT_SIZE2_WEIGHT_KERNELS__MASK; +} + +#define REG_CNA_CBUF_CON0 0x00001040 +#define CNA_CBUF_CON0_RESERVED_0__MASK 0xffffc000 +#define CNA_CBUF_CON0_RESERVED_0__SHIFT 14 +static inline uint32_t CNA_CBUF_CON0_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_CBUF_CON0_RESERVED_0__SHIFT) & CNA_CBUF_CON0_RESERVED_0__MASK; +} +#define CNA_CBUF_CON0_WEIGHT_REUSE__MASK 0x00002000 +#define CNA_CBUF_CON0_WEIGHT_REUSE__SHIFT 13 +static inline uint32_t CNA_CBUF_CON0_WEIGHT_REUSE(uint32_t val) +{ + return ((val) << CNA_CBUF_CON0_WEIGHT_REUSE__SHIFT) & CNA_CBUF_CON0_WEIGHT_REUSE__MASK; +} +#define CNA_CBUF_CON0_DATA_REUSE__MASK 0x00001000 +#define CNA_CBUF_CON0_DATA_REUSE__SHIFT 12 +static inline uint32_t CNA_CBUF_CON0_DATA_REUSE(uint32_t val) +{ + return ((val) << CNA_CBUF_CON0_DATA_REUSE__SHIFT) & CNA_CBUF_CON0_DATA_REUSE__MASK; +} +#define CNA_CBUF_CON0_RESERVED_1__MASK 0x00000800 +#define CNA_CBUF_CON0_RESERVED_1__SHIFT 11 +static inline uint32_t CNA_CBUF_CON0_RESERVED_1(uint32_t val) +{ + return ((val) << CNA_CBUF_CON0_RESERVED_1__SHIFT) & CNA_CBUF_CON0_RESERVED_1__MASK; +} +#define CNA_CBUF_CON0_FC_DATA_BANK__MASK 0x00000700 +#define CNA_CBUF_CON0_FC_DATA_BANK__SHIFT 8 +static inline uint32_t CNA_CBUF_CON0_FC_DATA_BANK(uint32_t val) +{ + return ((val) << CNA_CBUF_CON0_FC_DATA_BANK__SHIFT) & CNA_CBUF_CON0_FC_DATA_BANK__MASK; +} +#define CNA_CBUF_CON0_WEIGHT_BANK__MASK 0x000000f0 +#define CNA_CBUF_CON0_WEIGHT_BANK__SHIFT 4 +static inline uint32_t CNA_CBUF_CON0_WEIGHT_BANK(uint32_t val) +{ + return ((val) << CNA_CBUF_CON0_WEIGHT_BANK__SHIFT) & CNA_CBUF_CON0_WEIGHT_BANK__MASK; +} +#define CNA_CBUF_CON0_DATA_BANK__MASK 0x0000000f +#define CNA_CBUF_CON0_DATA_BANK__SHIFT 0 +static inline uint32_t CNA_CBUF_CON0_DATA_BANK(uint32_t val) +{ + return ((val) << CNA_CBUF_CON0_DATA_BANK__SHIFT) & CNA_CBUF_CON0_DATA_BANK__MASK; +} + +#define REG_CNA_CBUF_CON1 0x00001044 +#define CNA_CBUF_CON1_RESERVED_0__MASK 0xffffc000 +#define CNA_CBUF_CON1_RESERVED_0__SHIFT 14 +static inline uint32_t CNA_CBUF_CON1_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_CBUF_CON1_RESERVED_0__SHIFT) & CNA_CBUF_CON1_RESERVED_0__MASK; +} +#define CNA_CBUF_CON1_DATA_ENTRIES__MASK 0x00003fff +#define CNA_CBUF_CON1_DATA_ENTRIES__SHIFT 0 +static inline uint32_t CNA_CBUF_CON1_DATA_ENTRIES(uint32_t val) +{ + return ((val) << CNA_CBUF_CON1_DATA_ENTRIES__SHIFT) & CNA_CBUF_CON1_DATA_ENTRIES__MASK; +} + +#define REG_CNA_CVT_CON0 0x0000104c +#define CNA_CVT_CON0_RESERVED_0__MASK 0xf0000000 +#define CNA_CVT_CON0_RESERVED_0__SHIFT 28 +static inline uint32_t CNA_CVT_CON0_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_CVT_CON0_RESERVED_0__SHIFT) & CNA_CVT_CON0_RESERVED_0__MASK; +} +#define CNA_CVT_CON0_CVT_TRUNCATE_3__MASK 0x0fc00000 +#define CNA_CVT_CON0_CVT_TRUNCATE_3__SHIFT 22 +static inline uint32_t CNA_CVT_CON0_CVT_TRUNCATE_3(uint32_t val) +{ + return ((val) << CNA_CVT_CON0_CVT_TRUNCATE_3__SHIFT) & CNA_CVT_CON0_CVT_TRUNCATE_3__MASK; +} +#define CNA_CVT_CON0_CVT_TRUNCATE_2__MASK 0x003f0000 +#define CNA_CVT_CON0_CVT_TRUNCATE_2__SHIFT 16 +static inline uint32_t CNA_CVT_CON0_CVT_TRUNCATE_2(uint32_t val) +{ + return ((val) << CNA_CVT_CON0_CVT_TRUNCATE_2__SHIFT) & CNA_CVT_CON0_CVT_TRUNCATE_2__MASK; +} +#define CNA_CVT_CON0_CVT_TRUNCATE_1__MASK 0x0000fc00 +#define CNA_CVT_CON0_CVT_TRUNCATE_1__SHIFT 10 +static inline uint32_t CNA_CVT_CON0_CVT_TRUNCATE_1(uint32_t val) +{ + return ((val) << CNA_CVT_CON0_CVT_TRUNCATE_1__SHIFT) & CNA_CVT_CON0_CVT_TRUNCATE_1__MASK; +} +#define CNA_CVT_CON0_CVT_TRUNCATE_0__MASK 0x000003f0 +#define CNA_CVT_CON0_CVT_TRUNCATE_0__SHIFT 4 +static inline uint32_t CNA_CVT_CON0_CVT_TRUNCATE_0(uint32_t val) +{ + return ((val) << CNA_CVT_CON0_CVT_TRUNCATE_0__SHIFT) & CNA_CVT_CON0_CVT_TRUNCATE_0__MASK; +} +#define CNA_CVT_CON0_DATA_SIGN__MASK 0x00000008 +#define CNA_CVT_CON0_DATA_SIGN__SHIFT 3 +static inline uint32_t CNA_CVT_CON0_DATA_SIGN(uint32_t val) +{ + return ((val) << CNA_CVT_CON0_DATA_SIGN__SHIFT) & CNA_CVT_CON0_DATA_SIGN__MASK; +} +#define CNA_CVT_CON0_ROUND_TYPE__MASK 0x00000004 +#define CNA_CVT_CON0_ROUND_TYPE__SHIFT 2 +static inline uint32_t CNA_CVT_CON0_ROUND_TYPE(uint32_t val) +{ + return ((val) << CNA_CVT_CON0_ROUND_TYPE__SHIFT) & CNA_CVT_CON0_ROUND_TYPE__MASK; +} +#define CNA_CVT_CON0_CVT_TYPE__MASK 0x00000002 +#define CNA_CVT_CON0_CVT_TYPE__SHIFT 1 +static inline uint32_t CNA_CVT_CON0_CVT_TYPE(uint32_t val) +{ + return ((val) << CNA_CVT_CON0_CVT_TYPE__SHIFT) & CNA_CVT_CON0_CVT_TYPE__MASK; +} +#define CNA_CVT_CON0_CVT_BYPASS__MASK 0x00000001 +#define CNA_CVT_CON0_CVT_BYPASS__SHIFT 0 +static inline uint32_t CNA_CVT_CON0_CVT_BYPASS(uint32_t val) +{ + return ((val) << CNA_CVT_CON0_CVT_BYPASS__SHIFT) & CNA_CVT_CON0_CVT_BYPASS__MASK; +} + +#define REG_CNA_CVT_CON1 0x00001050 +#define CNA_CVT_CON1_CVT_SCALE0__MASK 0xffff0000 +#define CNA_CVT_CON1_CVT_SCALE0__SHIFT 16 +static inline uint32_t CNA_CVT_CON1_CVT_SCALE0(uint32_t val) +{ + return ((val) << CNA_CVT_CON1_CVT_SCALE0__SHIFT) & CNA_CVT_CON1_CVT_SCALE0__MASK; +} +#define CNA_CVT_CON1_CVT_OFFSET0__MASK 0x0000ffff +#define CNA_CVT_CON1_CVT_OFFSET0__SHIFT 0 +static inline uint32_t CNA_CVT_CON1_CVT_OFFSET0(uint32_t val) +{ + return ((val) << CNA_CVT_CON1_CVT_OFFSET0__SHIFT) & CNA_CVT_CON1_CVT_OFFSET0__MASK; +} + +#define REG_CNA_CVT_CON2 0x00001054 +#define CNA_CVT_CON2_CVT_SCALE1__MASK 0xffff0000 +#define CNA_CVT_CON2_CVT_SCALE1__SHIFT 16 +static inline uint32_t CNA_CVT_CON2_CVT_SCALE1(uint32_t val) +{ + return ((val) << CNA_CVT_CON2_CVT_SCALE1__SHIFT) & CNA_CVT_CON2_CVT_SCALE1__MASK; +} +#define CNA_CVT_CON2_CVT_OFFSET1__MASK 0x0000ffff +#define CNA_CVT_CON2_CVT_OFFSET1__SHIFT 0 +static inline uint32_t CNA_CVT_CON2_CVT_OFFSET1(uint32_t val) +{ + return ((val) << CNA_CVT_CON2_CVT_OFFSET1__SHIFT) & CNA_CVT_CON2_CVT_OFFSET1__MASK; +} + +#define REG_CNA_CVT_CON3 0x00001058 +#define CNA_CVT_CON3_CVT_SCALE2__MASK 0xffff0000 +#define CNA_CVT_CON3_CVT_SCALE2__SHIFT 16 +static inline uint32_t CNA_CVT_CON3_CVT_SCALE2(uint32_t val) +{ + return ((val) << CNA_CVT_CON3_CVT_SCALE2__SHIFT) & CNA_CVT_CON3_CVT_SCALE2__MASK; +} +#define CNA_CVT_CON3_CVT_OFFSET2__MASK 0x0000ffff +#define CNA_CVT_CON3_CVT_OFFSET2__SHIFT 0 +static inline uint32_t CNA_CVT_CON3_CVT_OFFSET2(uint32_t val) +{ + return ((val) << CNA_CVT_CON3_CVT_OFFSET2__SHIFT) & CNA_CVT_CON3_CVT_OFFSET2__MASK; +} + +#define REG_CNA_CVT_CON4 0x0000105c +#define CNA_CVT_CON4_CVT_SCALE3__MASK 0xffff0000 +#define CNA_CVT_CON4_CVT_SCALE3__SHIFT 16 +static inline uint32_t CNA_CVT_CON4_CVT_SCALE3(uint32_t val) +{ + return ((val) << CNA_CVT_CON4_CVT_SCALE3__SHIFT) & CNA_CVT_CON4_CVT_SCALE3__MASK; +} +#define CNA_CVT_CON4_CVT_OFFSET3__MASK 0x0000ffff +#define CNA_CVT_CON4_CVT_OFFSET3__SHIFT 0 +static inline uint32_t CNA_CVT_CON4_CVT_OFFSET3(uint32_t val) +{ + return ((val) << CNA_CVT_CON4_CVT_OFFSET3__SHIFT) & CNA_CVT_CON4_CVT_OFFSET3__MASK; +} + +#define REG_CNA_FC_CON0 0x00001060 +#define CNA_FC_CON0_FC_SKIP_DATA__MASK 0xffff0000 +#define CNA_FC_CON0_FC_SKIP_DATA__SHIFT 16 +static inline uint32_t CNA_FC_CON0_FC_SKIP_DATA(uint32_t val) +{ + return ((val) << CNA_FC_CON0_FC_SKIP_DATA__SHIFT) & CNA_FC_CON0_FC_SKIP_DATA__MASK; +} +#define CNA_FC_CON0_RESERVED_0__MASK 0x0000fffe +#define CNA_FC_CON0_RESERVED_0__SHIFT 1 +static inline uint32_t CNA_FC_CON0_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_FC_CON0_RESERVED_0__SHIFT) & CNA_FC_CON0_RESERVED_0__MASK; +} +#define CNA_FC_CON0_FC_SKIP_EN__MASK 0x00000001 +#define CNA_FC_CON0_FC_SKIP_EN__SHIFT 0 +static inline uint32_t CNA_FC_CON0_FC_SKIP_EN(uint32_t val) +{ + return ((val) << CNA_FC_CON0_FC_SKIP_EN__SHIFT) & CNA_FC_CON0_FC_SKIP_EN__MASK; +} + +#define REG_CNA_FC_CON1 0x00001064 +#define CNA_FC_CON1_RESERVED_0__MASK 0xfffe0000 +#define CNA_FC_CON1_RESERVED_0__SHIFT 17 +static inline uint32_t CNA_FC_CON1_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_FC_CON1_RESERVED_0__SHIFT) & CNA_FC_CON1_RESERVED_0__MASK; +} +#define CNA_FC_CON1_DATA_OFFSET__MASK 0x0001ffff +#define CNA_FC_CON1_DATA_OFFSET__SHIFT 0 +static inline uint32_t CNA_FC_CON1_DATA_OFFSET(uint32_t val) +{ + return ((val) << CNA_FC_CON1_DATA_OFFSET__SHIFT) & CNA_FC_CON1_DATA_OFFSET__MASK; +} + +#define REG_CNA_PAD_CON0 0x00001068 +#define CNA_PAD_CON0_RESERVED_0__MASK 0xffffff00 +#define CNA_PAD_CON0_RESERVED_0__SHIFT 8 +static inline uint32_t CNA_PAD_CON0_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_PAD_CON0_RESERVED_0__SHIFT) & CNA_PAD_CON0_RESERVED_0__MASK; +} +#define CNA_PAD_CON0_PAD_LEFT__MASK 0x000000f0 +#define CNA_PAD_CON0_PAD_LEFT__SHIFT 4 +static inline uint32_t CNA_PAD_CON0_PAD_LEFT(uint32_t val) +{ + return ((val) << CNA_PAD_CON0_PAD_LEFT__SHIFT) & CNA_PAD_CON0_PAD_LEFT__MASK; +} +#define CNA_PAD_CON0_PAD_TOP__MASK 0x0000000f +#define CNA_PAD_CON0_PAD_TOP__SHIFT 0 +static inline uint32_t CNA_PAD_CON0_PAD_TOP(uint32_t val) +{ + return ((val) << CNA_PAD_CON0_PAD_TOP__SHIFT) & CNA_PAD_CON0_PAD_TOP__MASK; +} + +#define REG_CNA_FEATURE_DATA_ADDR 0x00001070 +#define CNA_FEATURE_DATA_ADDR_FEATURE_BASE_ADDR__MASK 0xffffffff +#define CNA_FEATURE_DATA_ADDR_FEATURE_BASE_ADDR__SHIFT 0 +static inline uint32_t CNA_FEATURE_DATA_ADDR_FEATURE_BASE_ADDR(uint32_t val) +{ + return ((val) << CNA_FEATURE_DATA_ADDR_FEATURE_BASE_ADDR__SHIFT) & CNA_FEATURE_DATA_ADDR_FEATURE_BASE_ADDR__MASK; +} + +#define REG_CNA_FC_CON2 0x00001074 +#define CNA_FC_CON2_RESERVED_0__MASK 0xfffe0000 +#define CNA_FC_CON2_RESERVED_0__SHIFT 17 +static inline uint32_t CNA_FC_CON2_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_FC_CON2_RESERVED_0__SHIFT) & CNA_FC_CON2_RESERVED_0__MASK; +} +#define CNA_FC_CON2_WEIGHT_OFFSET__MASK 0x0001ffff +#define CNA_FC_CON2_WEIGHT_OFFSET__SHIFT 0 +static inline uint32_t CNA_FC_CON2_WEIGHT_OFFSET(uint32_t val) +{ + return ((val) << CNA_FC_CON2_WEIGHT_OFFSET__SHIFT) & CNA_FC_CON2_WEIGHT_OFFSET__MASK; +} + +#define REG_CNA_DMA_CON0 0x00001078 +#define CNA_DMA_CON0_OV4K_BYPASS__MASK 0x80000000 +#define CNA_DMA_CON0_OV4K_BYPASS__SHIFT 31 +static inline uint32_t CNA_DMA_CON0_OV4K_BYPASS(uint32_t val) +{ + return ((val) << CNA_DMA_CON0_OV4K_BYPASS__SHIFT) & CNA_DMA_CON0_OV4K_BYPASS__MASK; +} +#define CNA_DMA_CON0_RESERVED_0__MASK 0x7ff00000 +#define CNA_DMA_CON0_RESERVED_0__SHIFT 20 +static inline uint32_t CNA_DMA_CON0_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_DMA_CON0_RESERVED_0__SHIFT) & CNA_DMA_CON0_RESERVED_0__MASK; +} +#define CNA_DMA_CON0_WEIGHT_BURST_LEN__MASK 0x000f0000 +#define CNA_DMA_CON0_WEIGHT_BURST_LEN__SHIFT 16 +static inline uint32_t CNA_DMA_CON0_WEIGHT_BURST_LEN(uint32_t val) +{ + return ((val) << CNA_DMA_CON0_WEIGHT_BURST_LEN__SHIFT) & CNA_DMA_CON0_WEIGHT_BURST_LEN__MASK; +} +#define CNA_DMA_CON0_RESERVED_1__MASK 0x0000fff0 +#define CNA_DMA_CON0_RESERVED_1__SHIFT 4 +static inline uint32_t CNA_DMA_CON0_RESERVED_1(uint32_t val) +{ + return ((val) << CNA_DMA_CON0_RESERVED_1__SHIFT) & CNA_DMA_CON0_RESERVED_1__MASK; +} +#define CNA_DMA_CON0_DATA_BURST_LEN__MASK 0x0000000f +#define CNA_DMA_CON0_DATA_BURST_LEN__SHIFT 0 +static inline uint32_t CNA_DMA_CON0_DATA_BURST_LEN(uint32_t val) +{ + return ((val) << CNA_DMA_CON0_DATA_BURST_LEN__SHIFT) & CNA_DMA_CON0_DATA_BURST_LEN__MASK; +} + +#define REG_CNA_DMA_CON1 0x0000107c +#define CNA_DMA_CON1_RESERVED_0__MASK 0xf0000000 +#define CNA_DMA_CON1_RESERVED_0__SHIFT 28 +static inline uint32_t CNA_DMA_CON1_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_DMA_CON1_RESERVED_0__SHIFT) & CNA_DMA_CON1_RESERVED_0__MASK; +} +#define CNA_DMA_CON1_LINE_STRIDE__MASK 0x0fffffff +#define CNA_DMA_CON1_LINE_STRIDE__SHIFT 0 +static inline uint32_t CNA_DMA_CON1_LINE_STRIDE(uint32_t val) +{ + return ((val) << CNA_DMA_CON1_LINE_STRIDE__SHIFT) & CNA_DMA_CON1_LINE_STRIDE__MASK; +} + +#define REG_CNA_DMA_CON2 0x00001080 +#define CNA_DMA_CON2_RESERVED_0__MASK 0xf0000000 +#define CNA_DMA_CON2_RESERVED_0__SHIFT 28 +static inline uint32_t CNA_DMA_CON2_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_DMA_CON2_RESERVED_0__SHIFT) & CNA_DMA_CON2_RESERVED_0__MASK; +} +#define CNA_DMA_CON2_SURF_STRIDE__MASK 0x0fffffff +#define CNA_DMA_CON2_SURF_STRIDE__SHIFT 0 +static inline uint32_t CNA_DMA_CON2_SURF_STRIDE(uint32_t val) +{ + return ((val) << CNA_DMA_CON2_SURF_STRIDE__SHIFT) & CNA_DMA_CON2_SURF_STRIDE__MASK; +} + +#define REG_CNA_FC_DATA_SIZE0 0x00001084 +#define CNA_FC_DATA_SIZE0_RESERVED_0__MASK 0xc0000000 +#define CNA_FC_DATA_SIZE0_RESERVED_0__SHIFT 30 +static inline uint32_t CNA_FC_DATA_SIZE0_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_FC_DATA_SIZE0_RESERVED_0__SHIFT) & CNA_FC_DATA_SIZE0_RESERVED_0__MASK; +} +#define CNA_FC_DATA_SIZE0_DMA_WIDTH__MASK 0x3fff0000 +#define CNA_FC_DATA_SIZE0_DMA_WIDTH__SHIFT 16 +static inline uint32_t CNA_FC_DATA_SIZE0_DMA_WIDTH(uint32_t val) +{ + return ((val) << CNA_FC_DATA_SIZE0_DMA_WIDTH__SHIFT) & CNA_FC_DATA_SIZE0_DMA_WIDTH__MASK; +} +#define CNA_FC_DATA_SIZE0_RESERVED_1__MASK 0x0000f800 +#define CNA_FC_DATA_SIZE0_RESERVED_1__SHIFT 11 +static inline uint32_t CNA_FC_DATA_SIZE0_RESERVED_1(uint32_t val) +{ + return ((val) << CNA_FC_DATA_SIZE0_RESERVED_1__SHIFT) & CNA_FC_DATA_SIZE0_RESERVED_1__MASK; +} +#define CNA_FC_DATA_SIZE0_DMA_HEIGHT__MASK 0x000007ff +#define CNA_FC_DATA_SIZE0_DMA_HEIGHT__SHIFT 0 +static inline uint32_t CNA_FC_DATA_SIZE0_DMA_HEIGHT(uint32_t val) +{ + return ((val) << CNA_FC_DATA_SIZE0_DMA_HEIGHT__SHIFT) & CNA_FC_DATA_SIZE0_DMA_HEIGHT__MASK; +} + +#define REG_CNA_FC_DATA_SIZE1 0x00001088 +#define CNA_FC_DATA_SIZE1_RESERVED_0__MASK 0xffff0000 +#define CNA_FC_DATA_SIZE1_RESERVED_0__SHIFT 16 +static inline uint32_t CNA_FC_DATA_SIZE1_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_FC_DATA_SIZE1_RESERVED_0__SHIFT) & CNA_FC_DATA_SIZE1_RESERVED_0__MASK; +} +#define CNA_FC_DATA_SIZE1_DMA_CHANNEL__MASK 0x0000ffff +#define CNA_FC_DATA_SIZE1_DMA_CHANNEL__SHIFT 0 +static inline uint32_t CNA_FC_DATA_SIZE1_DMA_CHANNEL(uint32_t val) +{ + return ((val) << CNA_FC_DATA_SIZE1_DMA_CHANNEL__SHIFT) & CNA_FC_DATA_SIZE1_DMA_CHANNEL__MASK; +} + +#define REG_CNA_CLK_GATE 0x00001090 +#define CNA_CLK_GATE_RESERVED_0__MASK 0xffffffe0 +#define CNA_CLK_GATE_RESERVED_0__SHIFT 5 +static inline uint32_t CNA_CLK_GATE_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_CLK_GATE_RESERVED_0__SHIFT) & CNA_CLK_GATE_RESERVED_0__MASK; +} +#define CNA_CLK_GATE_CBUF_CS_DISABLE_CLKGATE__MASK 0x00000010 +#define CNA_CLK_GATE_CBUF_CS_DISABLE_CLKGATE__SHIFT 4 +static inline uint32_t CNA_CLK_GATE_CBUF_CS_DISABLE_CLKGATE(uint32_t val) +{ + return ((val) << CNA_CLK_GATE_CBUF_CS_DISABLE_CLKGATE__SHIFT) & CNA_CLK_GATE_CBUF_CS_DISABLE_CLKGATE__MASK; +} +#define CNA_CLK_GATE_RESERVED_1__MASK 0x00000008 +#define CNA_CLK_GATE_RESERVED_1__SHIFT 3 +static inline uint32_t CNA_CLK_GATE_RESERVED_1(uint32_t val) +{ + return ((val) << CNA_CLK_GATE_RESERVED_1__SHIFT) & CNA_CLK_GATE_RESERVED_1__MASK; +} +#define CNA_CLK_GATE_CSC_DISABLE_CLKGATE__MASK 0x00000004 +#define CNA_CLK_GATE_CSC_DISABLE_CLKGATE__SHIFT 2 +static inline uint32_t CNA_CLK_GATE_CSC_DISABLE_CLKGATE(uint32_t val) +{ + return ((val) << CNA_CLK_GATE_CSC_DISABLE_CLKGATE__SHIFT) & CNA_CLK_GATE_CSC_DISABLE_CLKGATE__MASK; +} +#define CNA_CLK_GATE_CNA_WEIGHT_DISABLE_CLKGATE__MASK 0x00000002 +#define CNA_CLK_GATE_CNA_WEIGHT_DISABLE_CLKGATE__SHIFT 1 +static inline uint32_t CNA_CLK_GATE_CNA_WEIGHT_DISABLE_CLKGATE(uint32_t val) +{ + return ((val) << CNA_CLK_GATE_CNA_WEIGHT_DISABLE_CLKGATE__SHIFT) & CNA_CLK_GATE_CNA_WEIGHT_DISABLE_CLKGATE__MASK; +} +#define CNA_CLK_GATE_CNA_FEATURE_DISABLE_CLKGATE__MASK 0x00000001 +#define CNA_CLK_GATE_CNA_FEATURE_DISABLE_CLKGATE__SHIFT 0 +static inline uint32_t CNA_CLK_GATE_CNA_FEATURE_DISABLE_CLKGATE(uint32_t val) +{ + return ((val) << CNA_CLK_GATE_CNA_FEATURE_DISABLE_CLKGATE__SHIFT) & CNA_CLK_GATE_CNA_FEATURE_DISABLE_CLKGATE__MASK; +} + +#define REG_CNA_DCOMP_CTRL 0x00001100 +#define CNA_DCOMP_CTRL_RESERVED_0__MASK 0xfffffff0 +#define CNA_DCOMP_CTRL_RESERVED_0__SHIFT 4 +static inline uint32_t CNA_DCOMP_CTRL_RESERVED_0(uint32_t val) +{ + return ((val) << CNA_DCOMP_CTRL_RESERVED_0__SHIFT) & CNA_DCOMP_CTRL_RESERVED_0__MASK; +} +#define CNA_DCOMP_CTRL_WT_DEC_BYPASS__MASK 0x00000008 +#define CNA_DCOMP_CTRL_WT_DEC_BYPASS__SHIFT 3 +static inline uint32_t CNA_DCOMP_CTRL_WT_DEC_BYPASS(uint32_t val) +{ + return ((val) << CNA_DCOMP_CTRL_WT_DEC_BYPASS__SHIFT) & CNA_DCOMP_CTRL_WT_DEC_BYPASS__MASK; +} +#define CNA_DCOMP_CTRL_DECOMP_CONTROL__MASK 0x00000007 +#define CNA_DCOMP_CTRL_DECOMP_CONTROL__SHIFT 0 +static inline uint32_t CNA_DCOMP_CTRL_DECOMP_CONTROL(uint32_t val) +{ + return ((val) << CNA_DCOMP_CTRL_DECOMP_CONTROL__SHIFT) & CNA_DCOMP_CTRL_DECOMP_CONTROL__MASK; +} + +#define REG_CNA_DCOMP_REGNUM 0x00001104 +#define CNA_DCOMP_REGNUM_DCOMP_REGNUM__MASK 0xffffffff +#define CNA_DCOMP_REGNUM_DCOMP_REGNUM__SHIFT 0 +static inline uint32_t CNA_DCOMP_REGNUM_DCOMP_REGNUM(uint32_t val) +{ + return ((val) << CNA_DCOMP_REGNUM_DCOMP_REGNUM__SHIFT) & CNA_DCOMP_REGNUM_DCOMP_REGNUM__MASK; +} + +#define REG_CNA_DCOMP_ADDR0 0x00001110 +#define CNA_DCOMP_ADDR0_DECOMPRESS_ADDR0__MASK 0xffffffff +#define CNA_DCOMP_ADDR0_DECOMPRESS_ADDR0__SHIFT 0 +static inline uint32_t CNA_DCOMP_ADDR0_DECOMPRESS_ADDR0(uint32_t val) +{ + return ((val) << CNA_DCOMP_ADDR0_DECOMPRESS_ADDR0__SHIFT) & CNA_DCOMP_ADDR0_DECOMPRESS_ADDR0__MASK; +} + +#define REG_CNA_DCOMP_AMOUNT0 0x00001140 +#define CNA_DCOMP_AMOUNT0_DCOMP_AMOUNT0__MASK 0xffffffff +#define CNA_DCOMP_AMOUNT0_DCOMP_AMOUNT0__SHIFT 0 +static inline uint32_t CNA_DCOMP_AMOUNT0_DCOMP_AMOUNT0(uint32_t val) +{ + return ((val) << CNA_DCOMP_AMOUNT0_DCOMP_AMOUNT0__SHIFT) & CNA_DCOMP_AMOUNT0_DCOMP_AMOUNT0__MASK; +} + +#define REG_CNA_DCOMP_AMOUNT1 0x00001144 +#define CNA_DCOMP_AMOUNT1_DCOMP_AMOUNT1__MASK 0xffffffff +#define CNA_DCOMP_AMOUNT1_DCOMP_AMOUNT1__SHIFT 0 +static inline uint32_t CNA_DCOMP_AMOUNT1_DCOMP_AMOUNT1(uint32_t val) +{ + return ((val) << CNA_DCOMP_AMOUNT1_DCOMP_AMOUNT1__SHIFT) & CNA_DCOMP_AMOUNT1_DCOMP_AMOUNT1__MASK; +} + +#define REG_CNA_DCOMP_AMOUNT2 0x00001148 +#define CNA_DCOMP_AMOUNT2_DCOMP_AMOUNT2__MASK 0xffffffff +#define CNA_DCOMP_AMOUNT2_DCOMP_AMOUNT2__SHIFT 0 +static inline uint32_t CNA_DCOMP_AMOUNT2_DCOMP_AMOUNT2(uint32_t val) +{ + return ((val) << CNA_DCOMP_AMOUNT2_DCOMP_AMOUNT2__SHIFT) & CNA_DCOMP_AMOUNT2_DCOMP_AMOUNT2__MASK; +} + +#define REG_CNA_DCOMP_AMOUNT3 0x0000114c +#define CNA_DCOMP_AMOUNT3_DCOMP_AMOUNT3__MASK 0xffffffff +#define CNA_DCOMP_AMOUNT3_DCOMP_AMOUNT3__SHIFT 0 +static inline uint32_t CNA_DCOMP_AMOUNT3_DCOMP_AMOUNT3(uint32_t val) +{ + return ((val) << CNA_DCOMP_AMOUNT3_DCOMP_AMOUNT3__SHIFT) & CNA_DCOMP_AMOUNT3_DCOMP_AMOUNT3__MASK; +} + +#define REG_CNA_DCOMP_AMOUNT4 0x00001150 +#define CNA_DCOMP_AMOUNT4_DCOMP_AMOUNT4__MASK 0xffffffff +#define CNA_DCOMP_AMOUNT4_DCOMP_AMOUNT4__SHIFT 0 +static inline uint32_t CNA_DCOMP_AMOUNT4_DCOMP_AMOUNT4(uint32_t val) +{ + return ((val) << CNA_DCOMP_AMOUNT4_DCOMP_AMOUNT4__SHIFT) & CNA_DCOMP_AMOUNT4_DCOMP_AMOUNT4__MASK; +} + +#define REG_CNA_DCOMP_AMOUNT5 0x00001154 +#define CNA_DCOMP_AMOUNT5_DCOMP_AMOUNT5__MASK 0xffffffff +#define CNA_DCOMP_AMOUNT5_DCOMP_AMOUNT5__SHIFT 0 +static inline uint32_t CNA_DCOMP_AMOUNT5_DCOMP_AMOUNT5(uint32_t val) +{ + return ((val) << CNA_DCOMP_AMOUNT5_DCOMP_AMOUNT5__SHIFT) & CNA_DCOMP_AMOUNT5_DCOMP_AMOUNT5__MASK; +} + +#define REG_CNA_DCOMP_AMOUNT6 0x00001158 +#define CNA_DCOMP_AMOUNT6_DCOMP_AMOUNT6__MASK 0xffffffff +#define CNA_DCOMP_AMOUNT6_DCOMP_AMOUNT6__SHIFT 0 +static inline uint32_t CNA_DCOMP_AMOUNT6_DCOMP_AMOUNT6(uint32_t val) +{ + return ((val) << CNA_DCOMP_AMOUNT6_DCOMP_AMOUNT6__SHIFT) & CNA_DCOMP_AMOUNT6_DCOMP_AMOUNT6__MASK; +} + +#define REG_CNA_DCOMP_AMOUNT7 0x0000115c +#define CNA_DCOMP_AMOUNT7_DCOMP_AMOUNT7__MASK 0xffffffff +#define CNA_DCOMP_AMOUNT7_DCOMP_AMOUNT7__SHIFT 0 +static inline uint32_t CNA_DCOMP_AMOUNT7_DCOMP_AMOUNT7(uint32_t val) +{ + return ((val) << CNA_DCOMP_AMOUNT7_DCOMP_AMOUNT7__SHIFT) & CNA_DCOMP_AMOUNT7_DCOMP_AMOUNT7__MASK; +} + +#define REG_CNA_DCOMP_AMOUNT8 0x00001160 +#define CNA_DCOMP_AMOUNT8_DCOMP_AMOUNT8__MASK 0xffffffff +#define CNA_DCOMP_AMOUNT8_DCOMP_AMOUNT8__SHIFT 0 +static inline uint32_t CNA_DCOMP_AMOUNT8_DCOMP_AMOUNT8(uint32_t val) +{ + return ((val) << CNA_DCOMP_AMOUNT8_DCOMP_AMOUNT8__SHIFT) & CNA_DCOMP_AMOUNT8_DCOMP_AMOUNT8__MASK; +} + +#define REG_CNA_DCOMP_AMOUNT9 0x00001164 +#define CNA_DCOMP_AMOUNT9_DCOMP_AMOUNT9__MASK 0xffffffff +#define CNA_DCOMP_AMOUNT9_DCOMP_AMOUNT9__SHIFT 0 +static inline uint32_t CNA_DCOMP_AMOUNT9_DCOMP_AMOUNT9(uint32_t val) +{ + return ((val) << CNA_DCOMP_AMOUNT9_DCOMP_AMOUNT9__SHIFT) & CNA_DCOMP_AMOUNT9_DCOMP_AMOUNT9__MASK; +} + +#define REG_CNA_DCOMP_AMOUNT10 0x00001168 +#define CNA_DCOMP_AMOUNT10_DCOMP_AMOUNT10__MASK 0xffffffff +#define CNA_DCOMP_AMOUNT10_DCOMP_AMOUNT10__SHIFT 0 +static inline uint32_t CNA_DCOMP_AMOUNT10_DCOMP_AMOUNT10(uint32_t val) +{ + return ((val) << CNA_DCOMP_AMOUNT10_DCOMP_AMOUNT10__SHIFT) & CNA_DCOMP_AMOUNT10_DCOMP_AMOUNT10__MASK; +} + +#define REG_CNA_DCOMP_AMOUNT11 0x0000116c +#define CNA_DCOMP_AMOUNT11_DCOMP_AMOUNT11__MASK 0xffffffff +#define CNA_DCOMP_AMOUNT11_DCOMP_AMOUNT11__SHIFT 0 +static inline uint32_t CNA_DCOMP_AMOUNT11_DCOMP_AMOUNT11(uint32_t val) +{ + return ((val) << CNA_DCOMP_AMOUNT11_DCOMP_AMOUNT11__SHIFT) & CNA_DCOMP_AMOUNT11_DCOMP_AMOUNT11__MASK; +} + +#define REG_CNA_DCOMP_AMOUNT12 0x00001170 +#define CNA_DCOMP_AMOUNT12_DCOMP_AMOUNT12__MASK 0xffffffff +#define CNA_DCOMP_AMOUNT12_DCOMP_AMOUNT12__SHIFT 0 +static inline uint32_t CNA_DCOMP_AMOUNT12_DCOMP_AMOUNT12(uint32_t val) +{ + return ((val) << CNA_DCOMP_AMOUNT12_DCOMP_AMOUNT12__SHIFT) & CNA_DCOMP_AMOUNT12_DCOMP_AMOUNT12__MASK; +} + +#define REG_CNA_DCOMP_AMOUNT13 0x00001174 +#define CNA_DCOMP_AMOUNT13_DCOMP_AMOUNT13__MASK 0xffffffff +#define CNA_DCOMP_AMOUNT13_DCOMP_AMOUNT13__SHIFT 0 +static inline uint32_t CNA_DCOMP_AMOUNT13_DCOMP_AMOUNT13(uint32_t val) +{ + return ((val) << CNA_DCOMP_AMOUNT13_DCOMP_AMOUNT13__SHIFT) & CNA_DCOMP_AMOUNT13_DCOMP_AMOUNT13__MASK; +} + +#define REG_CNA_DCOMP_AMOUNT14 0x00001178 +#define CNA_DCOMP_AMOUNT14_DCOMP_AMOUNT14__MASK 0xffffffff +#define CNA_DCOMP_AMOUNT14_DCOMP_AMOUNT14__SHIFT 0 +static inline uint32_t CNA_DCOMP_AMOUNT14_DCOMP_AMOUNT14(uint32_t val) +{ + return ((val) << CNA_DCOMP_AMOUNT14_DCOMP_AMOUNT14__SHIFT) & CNA_DCOMP_AMOUNT14_DCOMP_AMOUNT14__MASK; +} + +#define REG_CNA_DCOMP_AMOUNT15 0x0000117c +#define CNA_DCOMP_AMOUNT15_DCOMP_AMOUNT15__MASK 0xffffffff +#define CNA_DCOMP_AMOUNT15_DCOMP_AMOUNT15__SHIFT 0 +static inline uint32_t CNA_DCOMP_AMOUNT15_DCOMP_AMOUNT15(uint32_t val) +{ + return ((val) << CNA_DCOMP_AMOUNT15_DCOMP_AMOUNT15__SHIFT) & CNA_DCOMP_AMOUNT15_DCOMP_AMOUNT15__MASK; +} + +#define REG_CNA_CVT_CON5 0x00001180 +#define CNA_CVT_CON5_PER_CHANNEL_CVT_EN__MASK 0xffffffff +#define CNA_CVT_CON5_PER_CHANNEL_CVT_EN__SHIFT 0 +static inline uint32_t CNA_CVT_CON5_PER_CHANNEL_CVT_EN(uint32_t val) +{ + return ((val) << CNA_CVT_CON5_PER_CHANNEL_CVT_EN__SHIFT) & CNA_CVT_CON5_PER_CHANNEL_CVT_EN__MASK; +} + +#define REG_CNA_PAD_CON1 0x00001184 +#define CNA_PAD_CON1_PAD_VALUE__MASK 0xffffffff +#define CNA_PAD_CON1_PAD_VALUE__SHIFT 0 +static inline uint32_t CNA_PAD_CON1_PAD_VALUE(uint32_t val) +{ + return ((val) << CNA_PAD_CON1_PAD_VALUE__SHIFT) & CNA_PAD_CON1_PAD_VALUE__MASK; +} + +#define REG_CORE_S_STATUS 0x00003000 +#define CORE_S_STATUS_RESERVED_0__MASK 0xfffc0000 +#define CORE_S_STATUS_RESERVED_0__SHIFT 18 +static inline uint32_t CORE_S_STATUS_RESERVED_0(uint32_t val) +{ + return ((val) << CORE_S_STATUS_RESERVED_0__SHIFT) & CORE_S_STATUS_RESERVED_0__MASK; +} +#define CORE_S_STATUS_STATUS_1__MASK 0x00030000 +#define CORE_S_STATUS_STATUS_1__SHIFT 16 +static inline uint32_t CORE_S_STATUS_STATUS_1(uint32_t val) +{ + return ((val) << CORE_S_STATUS_STATUS_1__SHIFT) & CORE_S_STATUS_STATUS_1__MASK; +} +#define CORE_S_STATUS_RESERVED_1__MASK 0x0000fffc +#define CORE_S_STATUS_RESERVED_1__SHIFT 2 +static inline uint32_t CORE_S_STATUS_RESERVED_1(uint32_t val) +{ + return ((val) << CORE_S_STATUS_RESERVED_1__SHIFT) & CORE_S_STATUS_RESERVED_1__MASK; +} +#define CORE_S_STATUS_STATUS_0__MASK 0x00000003 +#define CORE_S_STATUS_STATUS_0__SHIFT 0 +static inline uint32_t CORE_S_STATUS_STATUS_0(uint32_t val) +{ + return ((val) << CORE_S_STATUS_STATUS_0__SHIFT) & CORE_S_STATUS_STATUS_0__MASK; +} + +#define REG_CORE_S_POINTER 0x00003004 +#define CORE_S_POINTER_RESERVED_0__MASK 0xfffe0000 +#define CORE_S_POINTER_RESERVED_0__SHIFT 17 +static inline uint32_t CORE_S_POINTER_RESERVED_0(uint32_t val) +{ + return ((val) << CORE_S_POINTER_RESERVED_0__SHIFT) & CORE_S_POINTER_RESERVED_0__MASK; +} +#define CORE_S_POINTER_EXECUTER__MASK 0x00010000 +#define CORE_S_POINTER_EXECUTER__SHIFT 16 +static inline uint32_t CORE_S_POINTER_EXECUTER(uint32_t val) +{ + return ((val) << CORE_S_POINTER_EXECUTER__SHIFT) & CORE_S_POINTER_EXECUTER__MASK; +} +#define CORE_S_POINTER_RESERVED_1__MASK 0x0000ffc0 +#define CORE_S_POINTER_RESERVED_1__SHIFT 6 +static inline uint32_t CORE_S_POINTER_RESERVED_1(uint32_t val) +{ + return ((val) << CORE_S_POINTER_RESERVED_1__SHIFT) & CORE_S_POINTER_RESERVED_1__MASK; +} +#define CORE_S_POINTER_EXECUTER_PP_CLEAR__MASK 0x00000020 +#define CORE_S_POINTER_EXECUTER_PP_CLEAR__SHIFT 5 +static inline uint32_t CORE_S_POINTER_EXECUTER_PP_CLEAR(uint32_t val) +{ + return ((val) << CORE_S_POINTER_EXECUTER_PP_CLEAR__SHIFT) & CORE_S_POINTER_EXECUTER_PP_CLEAR__MASK; +} +#define CORE_S_POINTER_POINTER_PP_CLEAR__MASK 0x00000010 +#define CORE_S_POINTER_POINTER_PP_CLEAR__SHIFT 4 +static inline uint32_t CORE_S_POINTER_POINTER_PP_CLEAR(uint32_t val) +{ + return ((val) << CORE_S_POINTER_POINTER_PP_CLEAR__SHIFT) & CORE_S_POINTER_POINTER_PP_CLEAR__MASK; +} +#define CORE_S_POINTER_POINTER_PP_MODE__MASK 0x00000008 +#define CORE_S_POINTER_POINTER_PP_MODE__SHIFT 3 +static inline uint32_t CORE_S_POINTER_POINTER_PP_MODE(uint32_t val) +{ + return ((val) << CORE_S_POINTER_POINTER_PP_MODE__SHIFT) & CORE_S_POINTER_POINTER_PP_MODE__MASK; +} +#define CORE_S_POINTER_EXECUTER_PP_EN__MASK 0x00000004 +#define CORE_S_POINTER_EXECUTER_PP_EN__SHIFT 2 +static inline uint32_t CORE_S_POINTER_EXECUTER_PP_EN(uint32_t val) +{ + return ((val) << CORE_S_POINTER_EXECUTER_PP_EN__SHIFT) & CORE_S_POINTER_EXECUTER_PP_EN__MASK; +} +#define CORE_S_POINTER_POINTER_PP_EN__MASK 0x00000002 +#define CORE_S_POINTER_POINTER_PP_EN__SHIFT 1 +static inline uint32_t CORE_S_POINTER_POINTER_PP_EN(uint32_t val) +{ + return ((val) << CORE_S_POINTER_POINTER_PP_EN__SHIFT) & CORE_S_POINTER_POINTER_PP_EN__MASK; +} +#define CORE_S_POINTER_POINTER__MASK 0x00000001 +#define CORE_S_POINTER_POINTER__SHIFT 0 +static inline uint32_t CORE_S_POINTER_POINTER(uint32_t val) +{ + return ((val) << CORE_S_POINTER_POINTER__SHIFT) & CORE_S_POINTER_POINTER__MASK; +} + +#define REG_CORE_OPERATION_ENABLE 0x00003008 +#define CORE_OPERATION_ENABLE_RESERVED_0__MASK 0xfffffffe +#define CORE_OPERATION_ENABLE_RESERVED_0__SHIFT 1 +static inline uint32_t CORE_OPERATION_ENABLE_RESERVED_0(uint32_t val) +{ + return ((val) << CORE_OPERATION_ENABLE_RESERVED_0__SHIFT) & CORE_OPERATION_ENABLE_RESERVED_0__MASK; +} +#define CORE_OPERATION_ENABLE_OP_EN__MASK 0x00000001 +#define CORE_OPERATION_ENABLE_OP_EN__SHIFT 0 +static inline uint32_t CORE_OPERATION_ENABLE_OP_EN(uint32_t val) +{ + return ((val) << CORE_OPERATION_ENABLE_OP_EN__SHIFT) & CORE_OPERATION_ENABLE_OP_EN__MASK; +} + +#define REG_CORE_MAC_GATING 0x0000300c +#define CORE_MAC_GATING_RESERVED_0__MASK 0xf8000000 +#define CORE_MAC_GATING_RESERVED_0__SHIFT 27 +static inline uint32_t CORE_MAC_GATING_RESERVED_0(uint32_t val) +{ + return ((val) << CORE_MAC_GATING_RESERVED_0__SHIFT) & CORE_MAC_GATING_RESERVED_0__MASK; +} +#define CORE_MAC_GATING_SLCG_OP_EN__MASK 0x07ffffff +#define CORE_MAC_GATING_SLCG_OP_EN__SHIFT 0 +static inline uint32_t CORE_MAC_GATING_SLCG_OP_EN(uint32_t val) +{ + return ((val) << CORE_MAC_GATING_SLCG_OP_EN__SHIFT) & CORE_MAC_GATING_SLCG_OP_EN__MASK; +} + +#define REG_CORE_MISC_CFG 0x00003010 +#define CORE_MISC_CFG_RESERVED_0__MASK 0xfff00000 +#define CORE_MISC_CFG_RESERVED_0__SHIFT 20 +static inline uint32_t CORE_MISC_CFG_RESERVED_0(uint32_t val) +{ + return ((val) << CORE_MISC_CFG_RESERVED_0__SHIFT) & CORE_MISC_CFG_RESERVED_0__MASK; +} +#define CORE_MISC_CFG_SOFT_GATING__MASK 0x000fc000 +#define CORE_MISC_CFG_SOFT_GATING__SHIFT 14 +static inline uint32_t CORE_MISC_CFG_SOFT_GATING(uint32_t val) +{ + return ((val) << CORE_MISC_CFG_SOFT_GATING__SHIFT) & CORE_MISC_CFG_SOFT_GATING__MASK; +} +#define CORE_MISC_CFG_RESERVED_1__MASK 0x00003800 +#define CORE_MISC_CFG_RESERVED_1__SHIFT 11 +static inline uint32_t CORE_MISC_CFG_RESERVED_1(uint32_t val) +{ + return ((val) << CORE_MISC_CFG_RESERVED_1__SHIFT) & CORE_MISC_CFG_RESERVED_1__MASK; +} +#define CORE_MISC_CFG_PROC_PRECISION__MASK 0x00000700 +#define CORE_MISC_CFG_PROC_PRECISION__SHIFT 8 +static inline uint32_t CORE_MISC_CFG_PROC_PRECISION(uint32_t val) +{ + return ((val) << CORE_MISC_CFG_PROC_PRECISION__SHIFT) & CORE_MISC_CFG_PROC_PRECISION__MASK; +} +#define CORE_MISC_CFG_RESERVED_2__MASK 0x000000fc +#define CORE_MISC_CFG_RESERVED_2__SHIFT 2 +static inline uint32_t CORE_MISC_CFG_RESERVED_2(uint32_t val) +{ + return ((val) << CORE_MISC_CFG_RESERVED_2__SHIFT) & CORE_MISC_CFG_RESERVED_2__MASK; +} +#define CORE_MISC_CFG_DW_EN__MASK 0x00000002 +#define CORE_MISC_CFG_DW_EN__SHIFT 1 +static inline uint32_t CORE_MISC_CFG_DW_EN(uint32_t val) +{ + return ((val) << CORE_MISC_CFG_DW_EN__SHIFT) & CORE_MISC_CFG_DW_EN__MASK; +} +#define CORE_MISC_CFG_QD_EN__MASK 0x00000001 +#define CORE_MISC_CFG_QD_EN__SHIFT 0 +static inline uint32_t CORE_MISC_CFG_QD_EN(uint32_t val) +{ + return ((val) << CORE_MISC_CFG_QD_EN__SHIFT) & CORE_MISC_CFG_QD_EN__MASK; +} + +#define REG_CORE_DATAOUT_SIZE_0 0x00003014 +#define CORE_DATAOUT_SIZE_0_DATAOUT_HEIGHT__MASK 0xffff0000 +#define CORE_DATAOUT_SIZE_0_DATAOUT_HEIGHT__SHIFT 16 +static inline uint32_t CORE_DATAOUT_SIZE_0_DATAOUT_HEIGHT(uint32_t val) +{ + return ((val) << CORE_DATAOUT_SIZE_0_DATAOUT_HEIGHT__SHIFT) & CORE_DATAOUT_SIZE_0_DATAOUT_HEIGHT__MASK; +} +#define CORE_DATAOUT_SIZE_0_DATAOUT_WIDTH__MASK 0x0000ffff +#define CORE_DATAOUT_SIZE_0_DATAOUT_WIDTH__SHIFT 0 +static inline uint32_t CORE_DATAOUT_SIZE_0_DATAOUT_WIDTH(uint32_t val) +{ + return ((val) << CORE_DATAOUT_SIZE_0_DATAOUT_WIDTH__SHIFT) & CORE_DATAOUT_SIZE_0_DATAOUT_WIDTH__MASK; +} + +#define REG_CORE_DATAOUT_SIZE_1 0x00003018 +#define CORE_DATAOUT_SIZE_1_RESERVED_0__MASK 0xffff0000 +#define CORE_DATAOUT_SIZE_1_RESERVED_0__SHIFT 16 +static inline uint32_t CORE_DATAOUT_SIZE_1_RESERVED_0(uint32_t val) +{ + return ((val) << CORE_DATAOUT_SIZE_1_RESERVED_0__SHIFT) & CORE_DATAOUT_SIZE_1_RESERVED_0__MASK; +} +#define CORE_DATAOUT_SIZE_1_DATAOUT_CHANNEL__MASK 0x0000ffff +#define CORE_DATAOUT_SIZE_1_DATAOUT_CHANNEL__SHIFT 0 +static inline uint32_t CORE_DATAOUT_SIZE_1_DATAOUT_CHANNEL(uint32_t val) +{ + return ((val) << CORE_DATAOUT_SIZE_1_DATAOUT_CHANNEL__SHIFT) & CORE_DATAOUT_SIZE_1_DATAOUT_CHANNEL__MASK; +} + +#define REG_CORE_CLIP_TRUNCATE 0x0000301c +#define CORE_CLIP_TRUNCATE_RESERVED_0__MASK 0xffffff80 +#define CORE_CLIP_TRUNCATE_RESERVED_0__SHIFT 7 +static inline uint32_t CORE_CLIP_TRUNCATE_RESERVED_0(uint32_t val) +{ + return ((val) << CORE_CLIP_TRUNCATE_RESERVED_0__SHIFT) & CORE_CLIP_TRUNCATE_RESERVED_0__MASK; +} +#define CORE_CLIP_TRUNCATE_ROUND_TYPE__MASK 0x00000040 +#define CORE_CLIP_TRUNCATE_ROUND_TYPE__SHIFT 6 +static inline uint32_t CORE_CLIP_TRUNCATE_ROUND_TYPE(uint32_t val) +{ + return ((val) << CORE_CLIP_TRUNCATE_ROUND_TYPE__SHIFT) & CORE_CLIP_TRUNCATE_ROUND_TYPE__MASK; +} +#define CORE_CLIP_TRUNCATE_RESERVED_1__MASK 0x00000020 +#define CORE_CLIP_TRUNCATE_RESERVED_1__SHIFT 5 +static inline uint32_t CORE_CLIP_TRUNCATE_RESERVED_1(uint32_t val) +{ + return ((val) << CORE_CLIP_TRUNCATE_RESERVED_1__SHIFT) & CORE_CLIP_TRUNCATE_RESERVED_1__MASK; +} +#define CORE_CLIP_TRUNCATE_CLIP_TRUNCATE__MASK 0x0000001f +#define CORE_CLIP_TRUNCATE_CLIP_TRUNCATE__SHIFT 0 +static inline uint32_t CORE_CLIP_TRUNCATE_CLIP_TRUNCATE(uint32_t val) +{ + return ((val) << CORE_CLIP_TRUNCATE_CLIP_TRUNCATE__SHIFT) & CORE_CLIP_TRUNCATE_CLIP_TRUNCATE__MASK; +} + +#define REG_DPU_S_STATUS 0x00004000 +#define DPU_S_STATUS_RESERVED_0__MASK 0xfffc0000 +#define DPU_S_STATUS_RESERVED_0__SHIFT 18 +static inline uint32_t DPU_S_STATUS_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_S_STATUS_RESERVED_0__SHIFT) & DPU_S_STATUS_RESERVED_0__MASK; +} +#define DPU_S_STATUS_STATUS_1__MASK 0x00030000 +#define DPU_S_STATUS_STATUS_1__SHIFT 16 +static inline uint32_t DPU_S_STATUS_STATUS_1(uint32_t val) +{ + return ((val) << DPU_S_STATUS_STATUS_1__SHIFT) & DPU_S_STATUS_STATUS_1__MASK; +} +#define DPU_S_STATUS_RESERVED_1__MASK 0x0000fffc +#define DPU_S_STATUS_RESERVED_1__SHIFT 2 +static inline uint32_t DPU_S_STATUS_RESERVED_1(uint32_t val) +{ + return ((val) << DPU_S_STATUS_RESERVED_1__SHIFT) & DPU_S_STATUS_RESERVED_1__MASK; +} +#define DPU_S_STATUS_STATUS_0__MASK 0x00000003 +#define DPU_S_STATUS_STATUS_0__SHIFT 0 +static inline uint32_t DPU_S_STATUS_STATUS_0(uint32_t val) +{ + return ((val) << DPU_S_STATUS_STATUS_0__SHIFT) & DPU_S_STATUS_STATUS_0__MASK; +} + +#define REG_DPU_S_POINTER 0x00004004 +#define DPU_S_POINTER_RESERVED_0__MASK 0xfffe0000 +#define DPU_S_POINTER_RESERVED_0__SHIFT 17 +static inline uint32_t DPU_S_POINTER_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_S_POINTER_RESERVED_0__SHIFT) & DPU_S_POINTER_RESERVED_0__MASK; +} +#define DPU_S_POINTER_EXECUTER__MASK 0x00010000 +#define DPU_S_POINTER_EXECUTER__SHIFT 16 +static inline uint32_t DPU_S_POINTER_EXECUTER(uint32_t val) +{ + return ((val) << DPU_S_POINTER_EXECUTER__SHIFT) & DPU_S_POINTER_EXECUTER__MASK; +} +#define DPU_S_POINTER_RESERVED_1__MASK 0x0000ffc0 +#define DPU_S_POINTER_RESERVED_1__SHIFT 6 +static inline uint32_t DPU_S_POINTER_RESERVED_1(uint32_t val) +{ + return ((val) << DPU_S_POINTER_RESERVED_1__SHIFT) & DPU_S_POINTER_RESERVED_1__MASK; +} +#define DPU_S_POINTER_EXECUTER_PP_CLEAR__MASK 0x00000020 +#define DPU_S_POINTER_EXECUTER_PP_CLEAR__SHIFT 5 +static inline uint32_t DPU_S_POINTER_EXECUTER_PP_CLEAR(uint32_t val) +{ + return ((val) << DPU_S_POINTER_EXECUTER_PP_CLEAR__SHIFT) & DPU_S_POINTER_EXECUTER_PP_CLEAR__MASK; +} +#define DPU_S_POINTER_POINTER_PP_CLEAR__MASK 0x00000010 +#define DPU_S_POINTER_POINTER_PP_CLEAR__SHIFT 4 +static inline uint32_t DPU_S_POINTER_POINTER_PP_CLEAR(uint32_t val) +{ + return ((val) << DPU_S_POINTER_POINTER_PP_CLEAR__SHIFT) & DPU_S_POINTER_POINTER_PP_CLEAR__MASK; +} +#define DPU_S_POINTER_POINTER_PP_MODE__MASK 0x00000008 +#define DPU_S_POINTER_POINTER_PP_MODE__SHIFT 3 +static inline uint32_t DPU_S_POINTER_POINTER_PP_MODE(uint32_t val) +{ + return ((val) << DPU_S_POINTER_POINTER_PP_MODE__SHIFT) & DPU_S_POINTER_POINTER_PP_MODE__MASK; +} +#define DPU_S_POINTER_EXECUTER_PP_EN__MASK 0x00000004 +#define DPU_S_POINTER_EXECUTER_PP_EN__SHIFT 2 +static inline uint32_t DPU_S_POINTER_EXECUTER_PP_EN(uint32_t val) +{ + return ((val) << DPU_S_POINTER_EXECUTER_PP_EN__SHIFT) & DPU_S_POINTER_EXECUTER_PP_EN__MASK; +} +#define DPU_S_POINTER_POINTER_PP_EN__MASK 0x00000002 +#define DPU_S_POINTER_POINTER_PP_EN__SHIFT 1 +static inline uint32_t DPU_S_POINTER_POINTER_PP_EN(uint32_t val) +{ + return ((val) << DPU_S_POINTER_POINTER_PP_EN__SHIFT) & DPU_S_POINTER_POINTER_PP_EN__MASK; +} +#define DPU_S_POINTER_POINTER__MASK 0x00000001 +#define DPU_S_POINTER_POINTER__SHIFT 0 +static inline uint32_t DPU_S_POINTER_POINTER(uint32_t val) +{ + return ((val) << DPU_S_POINTER_POINTER__SHIFT) & DPU_S_POINTER_POINTER__MASK; +} + +#define REG_DPU_OPERATION_ENABLE 0x00004008 +#define DPU_OPERATION_ENABLE_RESERVED_0__MASK 0xfffffffe +#define DPU_OPERATION_ENABLE_RESERVED_0__SHIFT 1 +static inline uint32_t DPU_OPERATION_ENABLE_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_OPERATION_ENABLE_RESERVED_0__SHIFT) & DPU_OPERATION_ENABLE_RESERVED_0__MASK; +} +#define DPU_OPERATION_ENABLE_OP_EN__MASK 0x00000001 +#define DPU_OPERATION_ENABLE_OP_EN__SHIFT 0 +static inline uint32_t DPU_OPERATION_ENABLE_OP_EN(uint32_t val) +{ + return ((val) << DPU_OPERATION_ENABLE_OP_EN__SHIFT) & DPU_OPERATION_ENABLE_OP_EN__MASK; +} + +#define REG_DPU_FEATURE_MODE_CFG 0x0000400c +#define DPU_FEATURE_MODE_CFG_COMB_USE__MASK 0x80000000 +#define DPU_FEATURE_MODE_CFG_COMB_USE__SHIFT 31 +static inline uint32_t DPU_FEATURE_MODE_CFG_COMB_USE(uint32_t val) +{ + return ((val) << DPU_FEATURE_MODE_CFG_COMB_USE__SHIFT) & DPU_FEATURE_MODE_CFG_COMB_USE__MASK; +} +#define DPU_FEATURE_MODE_CFG_TP_EN__MASK 0x40000000 +#define DPU_FEATURE_MODE_CFG_TP_EN__SHIFT 30 +static inline uint32_t DPU_FEATURE_MODE_CFG_TP_EN(uint32_t val) +{ + return ((val) << DPU_FEATURE_MODE_CFG_TP_EN__SHIFT) & DPU_FEATURE_MODE_CFG_TP_EN__MASK; +} +#define DPU_FEATURE_MODE_CFG_RGP_TYPE__MASK 0x3c000000 +#define DPU_FEATURE_MODE_CFG_RGP_TYPE__SHIFT 26 +static inline uint32_t DPU_FEATURE_MODE_CFG_RGP_TYPE(uint32_t val) +{ + return ((val) << DPU_FEATURE_MODE_CFG_RGP_TYPE__SHIFT) & DPU_FEATURE_MODE_CFG_RGP_TYPE__MASK; +} +#define DPU_FEATURE_MODE_CFG_NONALIGN__MASK 0x02000000 +#define DPU_FEATURE_MODE_CFG_NONALIGN__SHIFT 25 +static inline uint32_t DPU_FEATURE_MODE_CFG_NONALIGN(uint32_t val) +{ + return ((val) << DPU_FEATURE_MODE_CFG_NONALIGN__SHIFT) & DPU_FEATURE_MODE_CFG_NONALIGN__MASK; +} +#define DPU_FEATURE_MODE_CFG_SURF_LEN__MASK 0x01fffe00 +#define DPU_FEATURE_MODE_CFG_SURF_LEN__SHIFT 9 +static inline uint32_t DPU_FEATURE_MODE_CFG_SURF_LEN(uint32_t val) +{ + return ((val) << DPU_FEATURE_MODE_CFG_SURF_LEN__SHIFT) & DPU_FEATURE_MODE_CFG_SURF_LEN__MASK; +} +#define DPU_FEATURE_MODE_CFG_BURST_LEN__MASK 0x000001e0 +#define DPU_FEATURE_MODE_CFG_BURST_LEN__SHIFT 5 +static inline uint32_t DPU_FEATURE_MODE_CFG_BURST_LEN(uint32_t val) +{ + return ((val) << DPU_FEATURE_MODE_CFG_BURST_LEN__SHIFT) & DPU_FEATURE_MODE_CFG_BURST_LEN__MASK; +} +#define DPU_FEATURE_MODE_CFG_CONV_MODE__MASK 0x00000018 +#define DPU_FEATURE_MODE_CFG_CONV_MODE__SHIFT 3 +static inline uint32_t DPU_FEATURE_MODE_CFG_CONV_MODE(uint32_t val) +{ + return ((val) << DPU_FEATURE_MODE_CFG_CONV_MODE__SHIFT) & DPU_FEATURE_MODE_CFG_CONV_MODE__MASK; +} +#define DPU_FEATURE_MODE_CFG_OUTPUT_MODE__MASK 0x00000006 +#define DPU_FEATURE_MODE_CFG_OUTPUT_MODE__SHIFT 1 +static inline uint32_t DPU_FEATURE_MODE_CFG_OUTPUT_MODE(uint32_t val) +{ + return ((val) << DPU_FEATURE_MODE_CFG_OUTPUT_MODE__SHIFT) & DPU_FEATURE_MODE_CFG_OUTPUT_MODE__MASK; +} +#define DPU_FEATURE_MODE_CFG_FLYING_MODE__MASK 0x00000001 +#define DPU_FEATURE_MODE_CFG_FLYING_MODE__SHIFT 0 +static inline uint32_t DPU_FEATURE_MODE_CFG_FLYING_MODE(uint32_t val) +{ + return ((val) << DPU_FEATURE_MODE_CFG_FLYING_MODE__SHIFT) & DPU_FEATURE_MODE_CFG_FLYING_MODE__MASK; +} + +#define REG_DPU_DATA_FORMAT 0x00004010 +#define DPU_DATA_FORMAT_OUT_PRECISION__MASK 0xe0000000 +#define DPU_DATA_FORMAT_OUT_PRECISION__SHIFT 29 +static inline uint32_t DPU_DATA_FORMAT_OUT_PRECISION(uint32_t val) +{ + return ((val) << DPU_DATA_FORMAT_OUT_PRECISION__SHIFT) & DPU_DATA_FORMAT_OUT_PRECISION__MASK; +} +#define DPU_DATA_FORMAT_IN_PRECISION__MASK 0x1c000000 +#define DPU_DATA_FORMAT_IN_PRECISION__SHIFT 26 +static inline uint32_t DPU_DATA_FORMAT_IN_PRECISION(uint32_t val) +{ + return ((val) << DPU_DATA_FORMAT_IN_PRECISION__SHIFT) & DPU_DATA_FORMAT_IN_PRECISION__MASK; +} +#define DPU_DATA_FORMAT_EW_TRUNCATE_NEG__MASK 0x03ff0000 +#define DPU_DATA_FORMAT_EW_TRUNCATE_NEG__SHIFT 16 +static inline uint32_t DPU_DATA_FORMAT_EW_TRUNCATE_NEG(uint32_t val) +{ + return ((val) << DPU_DATA_FORMAT_EW_TRUNCATE_NEG__SHIFT) & DPU_DATA_FORMAT_EW_TRUNCATE_NEG__MASK; +} +#define DPU_DATA_FORMAT_BN_MUL_SHIFT_VALUE_NEG__MASK 0x0000fc00 +#define DPU_DATA_FORMAT_BN_MUL_SHIFT_VALUE_NEG__SHIFT 10 +static inline uint32_t DPU_DATA_FORMAT_BN_MUL_SHIFT_VALUE_NEG(uint32_t val) +{ + return ((val) << DPU_DATA_FORMAT_BN_MUL_SHIFT_VALUE_NEG__SHIFT) & DPU_DATA_FORMAT_BN_MUL_SHIFT_VALUE_NEG__MASK; +} +#define DPU_DATA_FORMAT_BS_MUL_SHIFT_VALUE_NEG__MASK 0x000003f0 +#define DPU_DATA_FORMAT_BS_MUL_SHIFT_VALUE_NEG__SHIFT 4 +static inline uint32_t DPU_DATA_FORMAT_BS_MUL_SHIFT_VALUE_NEG(uint32_t val) +{ + return ((val) << DPU_DATA_FORMAT_BS_MUL_SHIFT_VALUE_NEG__SHIFT) & DPU_DATA_FORMAT_BS_MUL_SHIFT_VALUE_NEG__MASK; +} +#define DPU_DATA_FORMAT_MC_SURF_OUT__MASK 0x00000008 +#define DPU_DATA_FORMAT_MC_SURF_OUT__SHIFT 3 +static inline uint32_t DPU_DATA_FORMAT_MC_SURF_OUT(uint32_t val) +{ + return ((val) << DPU_DATA_FORMAT_MC_SURF_OUT__SHIFT) & DPU_DATA_FORMAT_MC_SURF_OUT__MASK; +} +#define DPU_DATA_FORMAT_PROC_PRECISION__MASK 0x00000007 +#define DPU_DATA_FORMAT_PROC_PRECISION__SHIFT 0 +static inline uint32_t DPU_DATA_FORMAT_PROC_PRECISION(uint32_t val) +{ + return ((val) << DPU_DATA_FORMAT_PROC_PRECISION__SHIFT) & DPU_DATA_FORMAT_PROC_PRECISION__MASK; +} + +#define REG_DPU_OFFSET_PEND 0x00004014 +#define DPU_OFFSET_PEND_RESERVED_0__MASK 0xffff0000 +#define DPU_OFFSET_PEND_RESERVED_0__SHIFT 16 +static inline uint32_t DPU_OFFSET_PEND_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_OFFSET_PEND_RESERVED_0__SHIFT) & DPU_OFFSET_PEND_RESERVED_0__MASK; +} +#define DPU_OFFSET_PEND_OFFSET_PEND__MASK 0x0000ffff +#define DPU_OFFSET_PEND_OFFSET_PEND__SHIFT 0 +static inline uint32_t DPU_OFFSET_PEND_OFFSET_PEND(uint32_t val) +{ + return ((val) << DPU_OFFSET_PEND_OFFSET_PEND__SHIFT) & DPU_OFFSET_PEND_OFFSET_PEND__MASK; +} + +#define REG_DPU_DST_BASE_ADDR 0x00004020 +#define DPU_DST_BASE_ADDR_DST_BASE_ADDR__MASK 0xffffffff +#define DPU_DST_BASE_ADDR_DST_BASE_ADDR__SHIFT 0 +static inline uint32_t DPU_DST_BASE_ADDR_DST_BASE_ADDR(uint32_t val) +{ + return ((val) << DPU_DST_BASE_ADDR_DST_BASE_ADDR__SHIFT) & DPU_DST_BASE_ADDR_DST_BASE_ADDR__MASK; +} + +#define REG_DPU_DST_SURF_STRIDE 0x00004024 +#define DPU_DST_SURF_STRIDE_DST_SURF_STRIDE__MASK 0xfffffff0 +#define DPU_DST_SURF_STRIDE_DST_SURF_STRIDE__SHIFT 4 +static inline uint32_t DPU_DST_SURF_STRIDE_DST_SURF_STRIDE(uint32_t val) +{ + return ((val) << DPU_DST_SURF_STRIDE_DST_SURF_STRIDE__SHIFT) & DPU_DST_SURF_STRIDE_DST_SURF_STRIDE__MASK; +} +#define DPU_DST_SURF_STRIDE_RESERVED_0__MASK 0x0000000f +#define DPU_DST_SURF_STRIDE_RESERVED_0__SHIFT 0 +static inline uint32_t DPU_DST_SURF_STRIDE_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_DST_SURF_STRIDE_RESERVED_0__SHIFT) & DPU_DST_SURF_STRIDE_RESERVED_0__MASK; +} + +#define REG_DPU_DATA_CUBE_WIDTH 0x00004030 +#define DPU_DATA_CUBE_WIDTH_RESERVED_0__MASK 0xffffe000 +#define DPU_DATA_CUBE_WIDTH_RESERVED_0__SHIFT 13 +static inline uint32_t DPU_DATA_CUBE_WIDTH_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_DATA_CUBE_WIDTH_RESERVED_0__SHIFT) & DPU_DATA_CUBE_WIDTH_RESERVED_0__MASK; +} +#define DPU_DATA_CUBE_WIDTH_WIDTH__MASK 0x00001fff +#define DPU_DATA_CUBE_WIDTH_WIDTH__SHIFT 0 +static inline uint32_t DPU_DATA_CUBE_WIDTH_WIDTH(uint32_t val) +{ + return ((val) << DPU_DATA_CUBE_WIDTH_WIDTH__SHIFT) & DPU_DATA_CUBE_WIDTH_WIDTH__MASK; +} + +#define REG_DPU_DATA_CUBE_HEIGHT 0x00004034 +#define DPU_DATA_CUBE_HEIGHT_RESERVED_0__MASK 0xfe000000 +#define DPU_DATA_CUBE_HEIGHT_RESERVED_0__SHIFT 25 +static inline uint32_t DPU_DATA_CUBE_HEIGHT_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_DATA_CUBE_HEIGHT_RESERVED_0__SHIFT) & DPU_DATA_CUBE_HEIGHT_RESERVED_0__MASK; +} +#define DPU_DATA_CUBE_HEIGHT_MINMAX_CTL__MASK 0x01c00000 +#define DPU_DATA_CUBE_HEIGHT_MINMAX_CTL__SHIFT 22 +static inline uint32_t DPU_DATA_CUBE_HEIGHT_MINMAX_CTL(uint32_t val) +{ + return ((val) << DPU_DATA_CUBE_HEIGHT_MINMAX_CTL__SHIFT) & DPU_DATA_CUBE_HEIGHT_MINMAX_CTL__MASK; +} +#define DPU_DATA_CUBE_HEIGHT_RESERVED_1__MASK 0x003fe000 +#define DPU_DATA_CUBE_HEIGHT_RESERVED_1__SHIFT 13 +static inline uint32_t DPU_DATA_CUBE_HEIGHT_RESERVED_1(uint32_t val) +{ + return ((val) << DPU_DATA_CUBE_HEIGHT_RESERVED_1__SHIFT) & DPU_DATA_CUBE_HEIGHT_RESERVED_1__MASK; +} +#define DPU_DATA_CUBE_HEIGHT_HEIGHT__MASK 0x00001fff +#define DPU_DATA_CUBE_HEIGHT_HEIGHT__SHIFT 0 +static inline uint32_t DPU_DATA_CUBE_HEIGHT_HEIGHT(uint32_t val) +{ + return ((val) << DPU_DATA_CUBE_HEIGHT_HEIGHT__SHIFT) & DPU_DATA_CUBE_HEIGHT_HEIGHT__MASK; +} + +#define REG_DPU_DATA_CUBE_NOTCH_ADDR 0x00004038 +#define DPU_DATA_CUBE_NOTCH_ADDR_RESERVED_0__MASK 0xe0000000 +#define DPU_DATA_CUBE_NOTCH_ADDR_RESERVED_0__SHIFT 29 +static inline uint32_t DPU_DATA_CUBE_NOTCH_ADDR_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_DATA_CUBE_NOTCH_ADDR_RESERVED_0__SHIFT) & DPU_DATA_CUBE_NOTCH_ADDR_RESERVED_0__MASK; +} +#define DPU_DATA_CUBE_NOTCH_ADDR_NOTCH_ADDR_1__MASK 0x1fff0000 +#define DPU_DATA_CUBE_NOTCH_ADDR_NOTCH_ADDR_1__SHIFT 16 +static inline uint32_t DPU_DATA_CUBE_NOTCH_ADDR_NOTCH_ADDR_1(uint32_t val) +{ + return ((val) << DPU_DATA_CUBE_NOTCH_ADDR_NOTCH_ADDR_1__SHIFT) & DPU_DATA_CUBE_NOTCH_ADDR_NOTCH_ADDR_1__MASK; +} +#define DPU_DATA_CUBE_NOTCH_ADDR_RESERVED_1__MASK 0x0000e000 +#define DPU_DATA_CUBE_NOTCH_ADDR_RESERVED_1__SHIFT 13 +static inline uint32_t DPU_DATA_CUBE_NOTCH_ADDR_RESERVED_1(uint32_t val) +{ + return ((val) << DPU_DATA_CUBE_NOTCH_ADDR_RESERVED_1__SHIFT) & DPU_DATA_CUBE_NOTCH_ADDR_RESERVED_1__MASK; +} +#define DPU_DATA_CUBE_NOTCH_ADDR_NOTCH_ADDR_0__MASK 0x00001fff +#define DPU_DATA_CUBE_NOTCH_ADDR_NOTCH_ADDR_0__SHIFT 0 +static inline uint32_t DPU_DATA_CUBE_NOTCH_ADDR_NOTCH_ADDR_0(uint32_t val) +{ + return ((val) << DPU_DATA_CUBE_NOTCH_ADDR_NOTCH_ADDR_0__SHIFT) & DPU_DATA_CUBE_NOTCH_ADDR_NOTCH_ADDR_0__MASK; +} + +#define REG_DPU_DATA_CUBE_CHANNEL 0x0000403c +#define DPU_DATA_CUBE_CHANNEL_RESERVED_0__MASK 0xe0000000 +#define DPU_DATA_CUBE_CHANNEL_RESERVED_0__SHIFT 29 +static inline uint32_t DPU_DATA_CUBE_CHANNEL_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_DATA_CUBE_CHANNEL_RESERVED_0__SHIFT) & DPU_DATA_CUBE_CHANNEL_RESERVED_0__MASK; +} +#define DPU_DATA_CUBE_CHANNEL_ORIG_CHANNEL__MASK 0x1fff0000 +#define DPU_DATA_CUBE_CHANNEL_ORIG_CHANNEL__SHIFT 16 +static inline uint32_t DPU_DATA_CUBE_CHANNEL_ORIG_CHANNEL(uint32_t val) +{ + return ((val) << DPU_DATA_CUBE_CHANNEL_ORIG_CHANNEL__SHIFT) & DPU_DATA_CUBE_CHANNEL_ORIG_CHANNEL__MASK; +} +#define DPU_DATA_CUBE_CHANNEL_RESERVED_1__MASK 0x0000e000 +#define DPU_DATA_CUBE_CHANNEL_RESERVED_1__SHIFT 13 +static inline uint32_t DPU_DATA_CUBE_CHANNEL_RESERVED_1(uint32_t val) +{ + return ((val) << DPU_DATA_CUBE_CHANNEL_RESERVED_1__SHIFT) & DPU_DATA_CUBE_CHANNEL_RESERVED_1__MASK; +} +#define DPU_DATA_CUBE_CHANNEL_CHANNEL__MASK 0x00001fff +#define DPU_DATA_CUBE_CHANNEL_CHANNEL__SHIFT 0 +static inline uint32_t DPU_DATA_CUBE_CHANNEL_CHANNEL(uint32_t val) +{ + return ((val) << DPU_DATA_CUBE_CHANNEL_CHANNEL__SHIFT) & DPU_DATA_CUBE_CHANNEL_CHANNEL__MASK; +} + +#define REG_DPU_BS_CFG 0x00004040 +#define DPU_BS_CFG_RESERVED_0__MASK 0xfff00000 +#define DPU_BS_CFG_RESERVED_0__SHIFT 20 +static inline uint32_t DPU_BS_CFG_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_BS_CFG_RESERVED_0__SHIFT) & DPU_BS_CFG_RESERVED_0__MASK; +} +#define DPU_BS_CFG_BS_ALU_ALGO__MASK 0x000f0000 +#define DPU_BS_CFG_BS_ALU_ALGO__SHIFT 16 +static inline uint32_t DPU_BS_CFG_BS_ALU_ALGO(uint32_t val) +{ + return ((val) << DPU_BS_CFG_BS_ALU_ALGO__SHIFT) & DPU_BS_CFG_BS_ALU_ALGO__MASK; +} +#define DPU_BS_CFG_RESERVED_1__MASK 0x0000fe00 +#define DPU_BS_CFG_RESERVED_1__SHIFT 9 +static inline uint32_t DPU_BS_CFG_RESERVED_1(uint32_t val) +{ + return ((val) << DPU_BS_CFG_RESERVED_1__SHIFT) & DPU_BS_CFG_RESERVED_1__MASK; +} +#define DPU_BS_CFG_BS_ALU_SRC__MASK 0x00000100 +#define DPU_BS_CFG_BS_ALU_SRC__SHIFT 8 +static inline uint32_t DPU_BS_CFG_BS_ALU_SRC(uint32_t val) +{ + return ((val) << DPU_BS_CFG_BS_ALU_SRC__SHIFT) & DPU_BS_CFG_BS_ALU_SRC__MASK; +} +#define DPU_BS_CFG_BS_RELUX_EN__MASK 0x00000080 +#define DPU_BS_CFG_BS_RELUX_EN__SHIFT 7 +static inline uint32_t DPU_BS_CFG_BS_RELUX_EN(uint32_t val) +{ + return ((val) << DPU_BS_CFG_BS_RELUX_EN__SHIFT) & DPU_BS_CFG_BS_RELUX_EN__MASK; +} +#define DPU_BS_CFG_BS_RELU_BYPASS__MASK 0x00000040 +#define DPU_BS_CFG_BS_RELU_BYPASS__SHIFT 6 +static inline uint32_t DPU_BS_CFG_BS_RELU_BYPASS(uint32_t val) +{ + return ((val) << DPU_BS_CFG_BS_RELU_BYPASS__SHIFT) & DPU_BS_CFG_BS_RELU_BYPASS__MASK; +} +#define DPU_BS_CFG_BS_MUL_PRELU__MASK 0x00000020 +#define DPU_BS_CFG_BS_MUL_PRELU__SHIFT 5 +static inline uint32_t DPU_BS_CFG_BS_MUL_PRELU(uint32_t val) +{ + return ((val) << DPU_BS_CFG_BS_MUL_PRELU__SHIFT) & DPU_BS_CFG_BS_MUL_PRELU__MASK; +} +#define DPU_BS_CFG_BS_MUL_BYPASS__MASK 0x00000010 +#define DPU_BS_CFG_BS_MUL_BYPASS__SHIFT 4 +static inline uint32_t DPU_BS_CFG_BS_MUL_BYPASS(uint32_t val) +{ + return ((val) << DPU_BS_CFG_BS_MUL_BYPASS__SHIFT) & DPU_BS_CFG_BS_MUL_BYPASS__MASK; +} +#define DPU_BS_CFG_RESERVED_2__MASK 0x0000000c +#define DPU_BS_CFG_RESERVED_2__SHIFT 2 +static inline uint32_t DPU_BS_CFG_RESERVED_2(uint32_t val) +{ + return ((val) << DPU_BS_CFG_RESERVED_2__SHIFT) & DPU_BS_CFG_RESERVED_2__MASK; +} +#define DPU_BS_CFG_BS_ALU_BYPASS__MASK 0x00000002 +#define DPU_BS_CFG_BS_ALU_BYPASS__SHIFT 1 +static inline uint32_t DPU_BS_CFG_BS_ALU_BYPASS(uint32_t val) +{ + return ((val) << DPU_BS_CFG_BS_ALU_BYPASS__SHIFT) & DPU_BS_CFG_BS_ALU_BYPASS__MASK; +} +#define DPU_BS_CFG_BS_BYPASS__MASK 0x00000001 +#define DPU_BS_CFG_BS_BYPASS__SHIFT 0 +static inline uint32_t DPU_BS_CFG_BS_BYPASS(uint32_t val) +{ + return ((val) << DPU_BS_CFG_BS_BYPASS__SHIFT) & DPU_BS_CFG_BS_BYPASS__MASK; +} + +#define REG_DPU_BS_ALU_CFG 0x00004044 +#define DPU_BS_ALU_CFG_BS_ALU_OPERAND__MASK 0xffffffff +#define DPU_BS_ALU_CFG_BS_ALU_OPERAND__SHIFT 0 +static inline uint32_t DPU_BS_ALU_CFG_BS_ALU_OPERAND(uint32_t val) +{ + return ((val) << DPU_BS_ALU_CFG_BS_ALU_OPERAND__SHIFT) & DPU_BS_ALU_CFG_BS_ALU_OPERAND__MASK; +} + +#define REG_DPU_BS_MUL_CFG 0x00004048 +#define DPU_BS_MUL_CFG_BS_MUL_OPERAND__MASK 0xffff0000 +#define DPU_BS_MUL_CFG_BS_MUL_OPERAND__SHIFT 16 +static inline uint32_t DPU_BS_MUL_CFG_BS_MUL_OPERAND(uint32_t val) +{ + return ((val) << DPU_BS_MUL_CFG_BS_MUL_OPERAND__SHIFT) & DPU_BS_MUL_CFG_BS_MUL_OPERAND__MASK; +} +#define DPU_BS_MUL_CFG_RESERVED_0__MASK 0x0000c000 +#define DPU_BS_MUL_CFG_RESERVED_0__SHIFT 14 +static inline uint32_t DPU_BS_MUL_CFG_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_BS_MUL_CFG_RESERVED_0__SHIFT) & DPU_BS_MUL_CFG_RESERVED_0__MASK; +} +#define DPU_BS_MUL_CFG_BS_MUL_SHIFT_VALUE__MASK 0x00003f00 +#define DPU_BS_MUL_CFG_BS_MUL_SHIFT_VALUE__SHIFT 8 +static inline uint32_t DPU_BS_MUL_CFG_BS_MUL_SHIFT_VALUE(uint32_t val) +{ + return ((val) << DPU_BS_MUL_CFG_BS_MUL_SHIFT_VALUE__SHIFT) & DPU_BS_MUL_CFG_BS_MUL_SHIFT_VALUE__MASK; +} +#define DPU_BS_MUL_CFG_RESERVED_1__MASK 0x000000fc +#define DPU_BS_MUL_CFG_RESERVED_1__SHIFT 2 +static inline uint32_t DPU_BS_MUL_CFG_RESERVED_1(uint32_t val) +{ + return ((val) << DPU_BS_MUL_CFG_RESERVED_1__SHIFT) & DPU_BS_MUL_CFG_RESERVED_1__MASK; +} +#define DPU_BS_MUL_CFG_BS_TRUNCATE_SRC__MASK 0x00000002 +#define DPU_BS_MUL_CFG_BS_TRUNCATE_SRC__SHIFT 1 +static inline uint32_t DPU_BS_MUL_CFG_BS_TRUNCATE_SRC(uint32_t val) +{ + return ((val) << DPU_BS_MUL_CFG_BS_TRUNCATE_SRC__SHIFT) & DPU_BS_MUL_CFG_BS_TRUNCATE_SRC__MASK; +} +#define DPU_BS_MUL_CFG_BS_MUL_SRC__MASK 0x00000001 +#define DPU_BS_MUL_CFG_BS_MUL_SRC__SHIFT 0 +static inline uint32_t DPU_BS_MUL_CFG_BS_MUL_SRC(uint32_t val) +{ + return ((val) << DPU_BS_MUL_CFG_BS_MUL_SRC__SHIFT) & DPU_BS_MUL_CFG_BS_MUL_SRC__MASK; +} + +#define REG_DPU_BS_RELUX_CMP_VALUE 0x0000404c +#define DPU_BS_RELUX_CMP_VALUE_BS_RELUX_CMP_DAT__MASK 0xffffffff +#define DPU_BS_RELUX_CMP_VALUE_BS_RELUX_CMP_DAT__SHIFT 0 +static inline uint32_t DPU_BS_RELUX_CMP_VALUE_BS_RELUX_CMP_DAT(uint32_t val) +{ + return ((val) << DPU_BS_RELUX_CMP_VALUE_BS_RELUX_CMP_DAT__SHIFT) & DPU_BS_RELUX_CMP_VALUE_BS_RELUX_CMP_DAT__MASK; +} + +#define REG_DPU_BS_OW_CFG 0x00004050 +#define DPU_BS_OW_CFG_RGP_CNTER__MASK 0xf0000000 +#define DPU_BS_OW_CFG_RGP_CNTER__SHIFT 28 +static inline uint32_t DPU_BS_OW_CFG_RGP_CNTER(uint32_t val) +{ + return ((val) << DPU_BS_OW_CFG_RGP_CNTER__SHIFT) & DPU_BS_OW_CFG_RGP_CNTER__MASK; +} +#define DPU_BS_OW_CFG_TP_ORG_EN__MASK 0x08000000 +#define DPU_BS_OW_CFG_TP_ORG_EN__SHIFT 27 +static inline uint32_t DPU_BS_OW_CFG_TP_ORG_EN(uint32_t val) +{ + return ((val) << DPU_BS_OW_CFG_TP_ORG_EN__SHIFT) & DPU_BS_OW_CFG_TP_ORG_EN__MASK; +} +#define DPU_BS_OW_CFG_RESERVED_0__MASK 0x07fff800 +#define DPU_BS_OW_CFG_RESERVED_0__SHIFT 11 +static inline uint32_t DPU_BS_OW_CFG_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_BS_OW_CFG_RESERVED_0__SHIFT) & DPU_BS_OW_CFG_RESERVED_0__MASK; +} +#define DPU_BS_OW_CFG_SIZE_E_2__MASK 0x00000700 +#define DPU_BS_OW_CFG_SIZE_E_2__SHIFT 8 +static inline uint32_t DPU_BS_OW_CFG_SIZE_E_2(uint32_t val) +{ + return ((val) << DPU_BS_OW_CFG_SIZE_E_2__SHIFT) & DPU_BS_OW_CFG_SIZE_E_2__MASK; +} +#define DPU_BS_OW_CFG_SIZE_E_1__MASK 0x000000e0 +#define DPU_BS_OW_CFG_SIZE_E_1__SHIFT 5 +static inline uint32_t DPU_BS_OW_CFG_SIZE_E_1(uint32_t val) +{ + return ((val) << DPU_BS_OW_CFG_SIZE_E_1__SHIFT) & DPU_BS_OW_CFG_SIZE_E_1__MASK; +} +#define DPU_BS_OW_CFG_SIZE_E_0__MASK 0x0000001c +#define DPU_BS_OW_CFG_SIZE_E_0__SHIFT 2 +static inline uint32_t DPU_BS_OW_CFG_SIZE_E_0(uint32_t val) +{ + return ((val) << DPU_BS_OW_CFG_SIZE_E_0__SHIFT) & DPU_BS_OW_CFG_SIZE_E_0__MASK; +} +#define DPU_BS_OW_CFG_OD_BYPASS__MASK 0x00000002 +#define DPU_BS_OW_CFG_OD_BYPASS__SHIFT 1 +static inline uint32_t DPU_BS_OW_CFG_OD_BYPASS(uint32_t val) +{ + return ((val) << DPU_BS_OW_CFG_OD_BYPASS__SHIFT) & DPU_BS_OW_CFG_OD_BYPASS__MASK; +} +#define DPU_BS_OW_CFG_OW_SRC__MASK 0x00000001 +#define DPU_BS_OW_CFG_OW_SRC__SHIFT 0 +static inline uint32_t DPU_BS_OW_CFG_OW_SRC(uint32_t val) +{ + return ((val) << DPU_BS_OW_CFG_OW_SRC__SHIFT) & DPU_BS_OW_CFG_OW_SRC__MASK; +} + +#define REG_DPU_BS_OW_OP 0x00004054 +#define DPU_BS_OW_OP_RESERVED_0__MASK 0xffff0000 +#define DPU_BS_OW_OP_RESERVED_0__SHIFT 16 +static inline uint32_t DPU_BS_OW_OP_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_BS_OW_OP_RESERVED_0__SHIFT) & DPU_BS_OW_OP_RESERVED_0__MASK; +} +#define DPU_BS_OW_OP_OW_OP__MASK 0x0000ffff +#define DPU_BS_OW_OP_OW_OP__SHIFT 0 +static inline uint32_t DPU_BS_OW_OP_OW_OP(uint32_t val) +{ + return ((val) << DPU_BS_OW_OP_OW_OP__SHIFT) & DPU_BS_OW_OP_OW_OP__MASK; +} + +#define REG_DPU_WDMA_SIZE_0 0x00004058 +#define DPU_WDMA_SIZE_0_RESERVED_0__MASK 0xf0000000 +#define DPU_WDMA_SIZE_0_RESERVED_0__SHIFT 28 +static inline uint32_t DPU_WDMA_SIZE_0_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_WDMA_SIZE_0_RESERVED_0__SHIFT) & DPU_WDMA_SIZE_0_RESERVED_0__MASK; +} +#define DPU_WDMA_SIZE_0_TP_PRECISION__MASK 0x08000000 +#define DPU_WDMA_SIZE_0_TP_PRECISION__SHIFT 27 +static inline uint32_t DPU_WDMA_SIZE_0_TP_PRECISION(uint32_t val) +{ + return ((val) << DPU_WDMA_SIZE_0_TP_PRECISION__SHIFT) & DPU_WDMA_SIZE_0_TP_PRECISION__MASK; +} +#define DPU_WDMA_SIZE_0_SIZE_C_WDMA__MASK 0x07ff0000 +#define DPU_WDMA_SIZE_0_SIZE_C_WDMA__SHIFT 16 +static inline uint32_t DPU_WDMA_SIZE_0_SIZE_C_WDMA(uint32_t val) +{ + return ((val) << DPU_WDMA_SIZE_0_SIZE_C_WDMA__SHIFT) & DPU_WDMA_SIZE_0_SIZE_C_WDMA__MASK; +} +#define DPU_WDMA_SIZE_0_RESERVED_1__MASK 0x0000e000 +#define DPU_WDMA_SIZE_0_RESERVED_1__SHIFT 13 +static inline uint32_t DPU_WDMA_SIZE_0_RESERVED_1(uint32_t val) +{ + return ((val) << DPU_WDMA_SIZE_0_RESERVED_1__SHIFT) & DPU_WDMA_SIZE_0_RESERVED_1__MASK; +} +#define DPU_WDMA_SIZE_0_CHANNEL_WDMA__MASK 0x00001fff +#define DPU_WDMA_SIZE_0_CHANNEL_WDMA__SHIFT 0 +static inline uint32_t DPU_WDMA_SIZE_0_CHANNEL_WDMA(uint32_t val) +{ + return ((val) << DPU_WDMA_SIZE_0_CHANNEL_WDMA__SHIFT) & DPU_WDMA_SIZE_0_CHANNEL_WDMA__MASK; +} + +#define REG_DPU_WDMA_SIZE_1 0x0000405c +#define DPU_WDMA_SIZE_1_RESERVED_0__MASK 0xe0000000 +#define DPU_WDMA_SIZE_1_RESERVED_0__SHIFT 29 +static inline uint32_t DPU_WDMA_SIZE_1_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_WDMA_SIZE_1_RESERVED_0__SHIFT) & DPU_WDMA_SIZE_1_RESERVED_0__MASK; +} +#define DPU_WDMA_SIZE_1_HEIGHT_WDMA__MASK 0x1fff0000 +#define DPU_WDMA_SIZE_1_HEIGHT_WDMA__SHIFT 16 +static inline uint32_t DPU_WDMA_SIZE_1_HEIGHT_WDMA(uint32_t val) +{ + return ((val) << DPU_WDMA_SIZE_1_HEIGHT_WDMA__SHIFT) & DPU_WDMA_SIZE_1_HEIGHT_WDMA__MASK; +} +#define DPU_WDMA_SIZE_1_RESERVED_1__MASK 0x0000e000 +#define DPU_WDMA_SIZE_1_RESERVED_1__SHIFT 13 +static inline uint32_t DPU_WDMA_SIZE_1_RESERVED_1(uint32_t val) +{ + return ((val) << DPU_WDMA_SIZE_1_RESERVED_1__SHIFT) & DPU_WDMA_SIZE_1_RESERVED_1__MASK; +} +#define DPU_WDMA_SIZE_1_WIDTH_WDMA__MASK 0x00001fff +#define DPU_WDMA_SIZE_1_WIDTH_WDMA__SHIFT 0 +static inline uint32_t DPU_WDMA_SIZE_1_WIDTH_WDMA(uint32_t val) +{ + return ((val) << DPU_WDMA_SIZE_1_WIDTH_WDMA__SHIFT) & DPU_WDMA_SIZE_1_WIDTH_WDMA__MASK; +} + +#define REG_DPU_BN_CFG 0x00004060 +#define DPU_BN_CFG_RESERVED_0__MASK 0xfff00000 +#define DPU_BN_CFG_RESERVED_0__SHIFT 20 +static inline uint32_t DPU_BN_CFG_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_BN_CFG_RESERVED_0__SHIFT) & DPU_BN_CFG_RESERVED_0__MASK; +} +#define DPU_BN_CFG_BN_ALU_ALGO__MASK 0x000f0000 +#define DPU_BN_CFG_BN_ALU_ALGO__SHIFT 16 +static inline uint32_t DPU_BN_CFG_BN_ALU_ALGO(uint32_t val) +{ + return ((val) << DPU_BN_CFG_BN_ALU_ALGO__SHIFT) & DPU_BN_CFG_BN_ALU_ALGO__MASK; +} +#define DPU_BN_CFG_RESERVED_1__MASK 0x0000fe00 +#define DPU_BN_CFG_RESERVED_1__SHIFT 9 +static inline uint32_t DPU_BN_CFG_RESERVED_1(uint32_t val) +{ + return ((val) << DPU_BN_CFG_RESERVED_1__SHIFT) & DPU_BN_CFG_RESERVED_1__MASK; +} +#define DPU_BN_CFG_BN_ALU_SRC__MASK 0x00000100 +#define DPU_BN_CFG_BN_ALU_SRC__SHIFT 8 +static inline uint32_t DPU_BN_CFG_BN_ALU_SRC(uint32_t val) +{ + return ((val) << DPU_BN_CFG_BN_ALU_SRC__SHIFT) & DPU_BN_CFG_BN_ALU_SRC__MASK; +} +#define DPU_BN_CFG_BN_RELUX_EN__MASK 0x00000080 +#define DPU_BN_CFG_BN_RELUX_EN__SHIFT 7 +static inline uint32_t DPU_BN_CFG_BN_RELUX_EN(uint32_t val) +{ + return ((val) << DPU_BN_CFG_BN_RELUX_EN__SHIFT) & DPU_BN_CFG_BN_RELUX_EN__MASK; +} +#define DPU_BN_CFG_BN_RELU_BYPASS__MASK 0x00000040 +#define DPU_BN_CFG_BN_RELU_BYPASS__SHIFT 6 +static inline uint32_t DPU_BN_CFG_BN_RELU_BYPASS(uint32_t val) +{ + return ((val) << DPU_BN_CFG_BN_RELU_BYPASS__SHIFT) & DPU_BN_CFG_BN_RELU_BYPASS__MASK; +} +#define DPU_BN_CFG_BN_MUL_PRELU__MASK 0x00000020 +#define DPU_BN_CFG_BN_MUL_PRELU__SHIFT 5 +static inline uint32_t DPU_BN_CFG_BN_MUL_PRELU(uint32_t val) +{ + return ((val) << DPU_BN_CFG_BN_MUL_PRELU__SHIFT) & DPU_BN_CFG_BN_MUL_PRELU__MASK; +} +#define DPU_BN_CFG_BN_MUL_BYPASS__MASK 0x00000010 +#define DPU_BN_CFG_BN_MUL_BYPASS__SHIFT 4 +static inline uint32_t DPU_BN_CFG_BN_MUL_BYPASS(uint32_t val) +{ + return ((val) << DPU_BN_CFG_BN_MUL_BYPASS__SHIFT) & DPU_BN_CFG_BN_MUL_BYPASS__MASK; +} +#define DPU_BN_CFG_RESERVED_2__MASK 0x0000000c +#define DPU_BN_CFG_RESERVED_2__SHIFT 2 +static inline uint32_t DPU_BN_CFG_RESERVED_2(uint32_t val) +{ + return ((val) << DPU_BN_CFG_RESERVED_2__SHIFT) & DPU_BN_CFG_RESERVED_2__MASK; +} +#define DPU_BN_CFG_BN_ALU_BYPASS__MASK 0x00000002 +#define DPU_BN_CFG_BN_ALU_BYPASS__SHIFT 1 +static inline uint32_t DPU_BN_CFG_BN_ALU_BYPASS(uint32_t val) +{ + return ((val) << DPU_BN_CFG_BN_ALU_BYPASS__SHIFT) & DPU_BN_CFG_BN_ALU_BYPASS__MASK; +} +#define DPU_BN_CFG_BN_BYPASS__MASK 0x00000001 +#define DPU_BN_CFG_BN_BYPASS__SHIFT 0 +static inline uint32_t DPU_BN_CFG_BN_BYPASS(uint32_t val) +{ + return ((val) << DPU_BN_CFG_BN_BYPASS__SHIFT) & DPU_BN_CFG_BN_BYPASS__MASK; +} + +#define REG_DPU_BN_ALU_CFG 0x00004064 +#define DPU_BN_ALU_CFG_BN_ALU_OPERAND__MASK 0xffffffff +#define DPU_BN_ALU_CFG_BN_ALU_OPERAND__SHIFT 0 +static inline uint32_t DPU_BN_ALU_CFG_BN_ALU_OPERAND(uint32_t val) +{ + return ((val) << DPU_BN_ALU_CFG_BN_ALU_OPERAND__SHIFT) & DPU_BN_ALU_CFG_BN_ALU_OPERAND__MASK; +} + +#define REG_DPU_BN_MUL_CFG 0x00004068 +#define DPU_BN_MUL_CFG_BN_MUL_OPERAND__MASK 0xffff0000 +#define DPU_BN_MUL_CFG_BN_MUL_OPERAND__SHIFT 16 +static inline uint32_t DPU_BN_MUL_CFG_BN_MUL_OPERAND(uint32_t val) +{ + return ((val) << DPU_BN_MUL_CFG_BN_MUL_OPERAND__SHIFT) & DPU_BN_MUL_CFG_BN_MUL_OPERAND__MASK; +} +#define DPU_BN_MUL_CFG_RESERVED_0__MASK 0x0000c000 +#define DPU_BN_MUL_CFG_RESERVED_0__SHIFT 14 +static inline uint32_t DPU_BN_MUL_CFG_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_BN_MUL_CFG_RESERVED_0__SHIFT) & DPU_BN_MUL_CFG_RESERVED_0__MASK; +} +#define DPU_BN_MUL_CFG_BN_MUL_SHIFT_VALUE__MASK 0x00003f00 +#define DPU_BN_MUL_CFG_BN_MUL_SHIFT_VALUE__SHIFT 8 +static inline uint32_t DPU_BN_MUL_CFG_BN_MUL_SHIFT_VALUE(uint32_t val) +{ + return ((val) << DPU_BN_MUL_CFG_BN_MUL_SHIFT_VALUE__SHIFT) & DPU_BN_MUL_CFG_BN_MUL_SHIFT_VALUE__MASK; +} +#define DPU_BN_MUL_CFG_RESERVED_1__MASK 0x000000fc +#define DPU_BN_MUL_CFG_RESERVED_1__SHIFT 2 +static inline uint32_t DPU_BN_MUL_CFG_RESERVED_1(uint32_t val) +{ + return ((val) << DPU_BN_MUL_CFG_RESERVED_1__SHIFT) & DPU_BN_MUL_CFG_RESERVED_1__MASK; +} +#define DPU_BN_MUL_CFG_BN_TRUNCATE_SRC__MASK 0x00000002 +#define DPU_BN_MUL_CFG_BN_TRUNCATE_SRC__SHIFT 1 +static inline uint32_t DPU_BN_MUL_CFG_BN_TRUNCATE_SRC(uint32_t val) +{ + return ((val) << DPU_BN_MUL_CFG_BN_TRUNCATE_SRC__SHIFT) & DPU_BN_MUL_CFG_BN_TRUNCATE_SRC__MASK; +} +#define DPU_BN_MUL_CFG_BN_MUL_SRC__MASK 0x00000001 +#define DPU_BN_MUL_CFG_BN_MUL_SRC__SHIFT 0 +static inline uint32_t DPU_BN_MUL_CFG_BN_MUL_SRC(uint32_t val) +{ + return ((val) << DPU_BN_MUL_CFG_BN_MUL_SRC__SHIFT) & DPU_BN_MUL_CFG_BN_MUL_SRC__MASK; +} + +#define REG_DPU_BN_RELUX_CMP_VALUE 0x0000406c +#define DPU_BN_RELUX_CMP_VALUE_BN_RELUX_CMP_DAT__MASK 0xffffffff +#define DPU_BN_RELUX_CMP_VALUE_BN_RELUX_CMP_DAT__SHIFT 0 +static inline uint32_t DPU_BN_RELUX_CMP_VALUE_BN_RELUX_CMP_DAT(uint32_t val) +{ + return ((val) << DPU_BN_RELUX_CMP_VALUE_BN_RELUX_CMP_DAT__SHIFT) & DPU_BN_RELUX_CMP_VALUE_BN_RELUX_CMP_DAT__MASK; +} + +#define REG_DPU_EW_CFG 0x00004070 +#define DPU_EW_CFG_EW_CVT_TYPE__MASK 0x80000000 +#define DPU_EW_CFG_EW_CVT_TYPE__SHIFT 31 +static inline uint32_t DPU_EW_CFG_EW_CVT_TYPE(uint32_t val) +{ + return ((val) << DPU_EW_CFG_EW_CVT_TYPE__SHIFT) & DPU_EW_CFG_EW_CVT_TYPE__MASK; +} +#define DPU_EW_CFG_EW_CVT_ROUND__MASK 0x40000000 +#define DPU_EW_CFG_EW_CVT_ROUND__SHIFT 30 +static inline uint32_t DPU_EW_CFG_EW_CVT_ROUND(uint32_t val) +{ + return ((val) << DPU_EW_CFG_EW_CVT_ROUND__SHIFT) & DPU_EW_CFG_EW_CVT_ROUND__MASK; +} +#define DPU_EW_CFG_EW_DATA_MODE__MASK 0x30000000 +#define DPU_EW_CFG_EW_DATA_MODE__SHIFT 28 +static inline uint32_t DPU_EW_CFG_EW_DATA_MODE(uint32_t val) +{ + return ((val) << DPU_EW_CFG_EW_DATA_MODE__SHIFT) & DPU_EW_CFG_EW_DATA_MODE__MASK; +} +#define DPU_EW_CFG_RESERVED_0__MASK 0x0f000000 +#define DPU_EW_CFG_RESERVED_0__SHIFT 24 +static inline uint32_t DPU_EW_CFG_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_EW_CFG_RESERVED_0__SHIFT) & DPU_EW_CFG_RESERVED_0__MASK; +} +#define DPU_EW_CFG_EDATA_SIZE__MASK 0x00c00000 +#define DPU_EW_CFG_EDATA_SIZE__SHIFT 22 +static inline uint32_t DPU_EW_CFG_EDATA_SIZE(uint32_t val) +{ + return ((val) << DPU_EW_CFG_EDATA_SIZE__SHIFT) & DPU_EW_CFG_EDATA_SIZE__MASK; +} +#define DPU_EW_CFG_EW_EQUAL_EN__MASK 0x00200000 +#define DPU_EW_CFG_EW_EQUAL_EN__SHIFT 21 +static inline uint32_t DPU_EW_CFG_EW_EQUAL_EN(uint32_t val) +{ + return ((val) << DPU_EW_CFG_EW_EQUAL_EN__SHIFT) & DPU_EW_CFG_EW_EQUAL_EN__MASK; +} +#define DPU_EW_CFG_EW_BINARY_EN__MASK 0x00100000 +#define DPU_EW_CFG_EW_BINARY_EN__SHIFT 20 +static inline uint32_t DPU_EW_CFG_EW_BINARY_EN(uint32_t val) +{ + return ((val) << DPU_EW_CFG_EW_BINARY_EN__SHIFT) & DPU_EW_CFG_EW_BINARY_EN__MASK; +} +#define DPU_EW_CFG_EW_ALU_ALGO__MASK 0x000f0000 +#define DPU_EW_CFG_EW_ALU_ALGO__SHIFT 16 +static inline uint32_t DPU_EW_CFG_EW_ALU_ALGO(uint32_t val) +{ + return ((val) << DPU_EW_CFG_EW_ALU_ALGO__SHIFT) & DPU_EW_CFG_EW_ALU_ALGO__MASK; +} +#define DPU_EW_CFG_RESERVED_1__MASK 0x0000f800 +#define DPU_EW_CFG_RESERVED_1__SHIFT 11 +static inline uint32_t DPU_EW_CFG_RESERVED_1(uint32_t val) +{ + return ((val) << DPU_EW_CFG_RESERVED_1__SHIFT) & DPU_EW_CFG_RESERVED_1__MASK; +} +#define DPU_EW_CFG_EW_RELUX_EN__MASK 0x00000400 +#define DPU_EW_CFG_EW_RELUX_EN__SHIFT 10 +static inline uint32_t DPU_EW_CFG_EW_RELUX_EN(uint32_t val) +{ + return ((val) << DPU_EW_CFG_EW_RELUX_EN__SHIFT) & DPU_EW_CFG_EW_RELUX_EN__MASK; +} +#define DPU_EW_CFG_EW_RELU_BYPASS__MASK 0x00000200 +#define DPU_EW_CFG_EW_RELU_BYPASS__SHIFT 9 +static inline uint32_t DPU_EW_CFG_EW_RELU_BYPASS(uint32_t val) +{ + return ((val) << DPU_EW_CFG_EW_RELU_BYPASS__SHIFT) & DPU_EW_CFG_EW_RELU_BYPASS__MASK; +} +#define DPU_EW_CFG_EW_OP_CVT_BYPASS__MASK 0x00000100 +#define DPU_EW_CFG_EW_OP_CVT_BYPASS__SHIFT 8 +static inline uint32_t DPU_EW_CFG_EW_OP_CVT_BYPASS(uint32_t val) +{ + return ((val) << DPU_EW_CFG_EW_OP_CVT_BYPASS__SHIFT) & DPU_EW_CFG_EW_OP_CVT_BYPASS__MASK; +} +#define DPU_EW_CFG_EW_LUT_BYPASS__MASK 0x00000080 +#define DPU_EW_CFG_EW_LUT_BYPASS__SHIFT 7 +static inline uint32_t DPU_EW_CFG_EW_LUT_BYPASS(uint32_t val) +{ + return ((val) << DPU_EW_CFG_EW_LUT_BYPASS__SHIFT) & DPU_EW_CFG_EW_LUT_BYPASS__MASK; +} +#define DPU_EW_CFG_EW_OP_SRC__MASK 0x00000040 +#define DPU_EW_CFG_EW_OP_SRC__SHIFT 6 +static inline uint32_t DPU_EW_CFG_EW_OP_SRC(uint32_t val) +{ + return ((val) << DPU_EW_CFG_EW_OP_SRC__SHIFT) & DPU_EW_CFG_EW_OP_SRC__MASK; +} +#define DPU_EW_CFG_EW_MUL_PRELU__MASK 0x00000020 +#define DPU_EW_CFG_EW_MUL_PRELU__SHIFT 5 +static inline uint32_t DPU_EW_CFG_EW_MUL_PRELU(uint32_t val) +{ + return ((val) << DPU_EW_CFG_EW_MUL_PRELU__SHIFT) & DPU_EW_CFG_EW_MUL_PRELU__MASK; +} +#define DPU_EW_CFG_RESERVED_2__MASK 0x00000018 +#define DPU_EW_CFG_RESERVED_2__SHIFT 3 +static inline uint32_t DPU_EW_CFG_RESERVED_2(uint32_t val) +{ + return ((val) << DPU_EW_CFG_RESERVED_2__SHIFT) & DPU_EW_CFG_RESERVED_2__MASK; +} +#define DPU_EW_CFG_EW_OP_TYPE__MASK 0x00000004 +#define DPU_EW_CFG_EW_OP_TYPE__SHIFT 2 +static inline uint32_t DPU_EW_CFG_EW_OP_TYPE(uint32_t val) +{ + return ((val) << DPU_EW_CFG_EW_OP_TYPE__SHIFT) & DPU_EW_CFG_EW_OP_TYPE__MASK; +} +#define DPU_EW_CFG_EW_OP_BYPASS__MASK 0x00000002 +#define DPU_EW_CFG_EW_OP_BYPASS__SHIFT 1 +static inline uint32_t DPU_EW_CFG_EW_OP_BYPASS(uint32_t val) +{ + return ((val) << DPU_EW_CFG_EW_OP_BYPASS__SHIFT) & DPU_EW_CFG_EW_OP_BYPASS__MASK; +} +#define DPU_EW_CFG_EW_BYPASS__MASK 0x00000001 +#define DPU_EW_CFG_EW_BYPASS__SHIFT 0 +static inline uint32_t DPU_EW_CFG_EW_BYPASS(uint32_t val) +{ + return ((val) << DPU_EW_CFG_EW_BYPASS__SHIFT) & DPU_EW_CFG_EW_BYPASS__MASK; +} + +#define REG_DPU_EW_CVT_OFFSET_VALUE 0x00004074 +#define DPU_EW_CVT_OFFSET_VALUE_EW_OP_CVT_OFFSET__MASK 0xffffffff +#define DPU_EW_CVT_OFFSET_VALUE_EW_OP_CVT_OFFSET__SHIFT 0 +static inline uint32_t DPU_EW_CVT_OFFSET_VALUE_EW_OP_CVT_OFFSET(uint32_t val) +{ + return ((val) << DPU_EW_CVT_OFFSET_VALUE_EW_OP_CVT_OFFSET__SHIFT) & DPU_EW_CVT_OFFSET_VALUE_EW_OP_CVT_OFFSET__MASK; +} + +#define REG_DPU_EW_CVT_SCALE_VALUE 0x00004078 +#define DPU_EW_CVT_SCALE_VALUE_EW_TRUNCATE__MASK 0xffc00000 +#define DPU_EW_CVT_SCALE_VALUE_EW_TRUNCATE__SHIFT 22 +static inline uint32_t DPU_EW_CVT_SCALE_VALUE_EW_TRUNCATE(uint32_t val) +{ + return ((val) << DPU_EW_CVT_SCALE_VALUE_EW_TRUNCATE__SHIFT) & DPU_EW_CVT_SCALE_VALUE_EW_TRUNCATE__MASK; +} +#define DPU_EW_CVT_SCALE_VALUE_EW_OP_CVT_SHIFT__MASK 0x003f0000 +#define DPU_EW_CVT_SCALE_VALUE_EW_OP_CVT_SHIFT__SHIFT 16 +static inline uint32_t DPU_EW_CVT_SCALE_VALUE_EW_OP_CVT_SHIFT(uint32_t val) +{ + return ((val) << DPU_EW_CVT_SCALE_VALUE_EW_OP_CVT_SHIFT__SHIFT) & DPU_EW_CVT_SCALE_VALUE_EW_OP_CVT_SHIFT__MASK; +} +#define DPU_EW_CVT_SCALE_VALUE_EW_OP_CVT_SCALE__MASK 0x0000ffff +#define DPU_EW_CVT_SCALE_VALUE_EW_OP_CVT_SCALE__SHIFT 0 +static inline uint32_t DPU_EW_CVT_SCALE_VALUE_EW_OP_CVT_SCALE(uint32_t val) +{ + return ((val) << DPU_EW_CVT_SCALE_VALUE_EW_OP_CVT_SCALE__SHIFT) & DPU_EW_CVT_SCALE_VALUE_EW_OP_CVT_SCALE__MASK; +} + +#define REG_DPU_EW_RELUX_CMP_VALUE 0x0000407c +#define DPU_EW_RELUX_CMP_VALUE_EW_RELUX_CMP_DAT__MASK 0xffffffff +#define DPU_EW_RELUX_CMP_VALUE_EW_RELUX_CMP_DAT__SHIFT 0 +static inline uint32_t DPU_EW_RELUX_CMP_VALUE_EW_RELUX_CMP_DAT(uint32_t val) +{ + return ((val) << DPU_EW_RELUX_CMP_VALUE_EW_RELUX_CMP_DAT__SHIFT) & DPU_EW_RELUX_CMP_VALUE_EW_RELUX_CMP_DAT__MASK; +} + +#define REG_DPU_OUT_CVT_OFFSET 0x00004080 +#define DPU_OUT_CVT_OFFSET_OUT_CVT_OFFSET__MASK 0xffffffff +#define DPU_OUT_CVT_OFFSET_OUT_CVT_OFFSET__SHIFT 0 +static inline uint32_t DPU_OUT_CVT_OFFSET_OUT_CVT_OFFSET(uint32_t val) +{ + return ((val) << DPU_OUT_CVT_OFFSET_OUT_CVT_OFFSET__SHIFT) & DPU_OUT_CVT_OFFSET_OUT_CVT_OFFSET__MASK; +} + +#define REG_DPU_OUT_CVT_SCALE 0x00004084 +#define DPU_OUT_CVT_SCALE_RESERVED_0__MASK 0xfffe0000 +#define DPU_OUT_CVT_SCALE_RESERVED_0__SHIFT 17 +static inline uint32_t DPU_OUT_CVT_SCALE_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_OUT_CVT_SCALE_RESERVED_0__SHIFT) & DPU_OUT_CVT_SCALE_RESERVED_0__MASK; +} +#define DPU_OUT_CVT_SCALE_FP32TOFP16_EN__MASK 0x00010000 +#define DPU_OUT_CVT_SCALE_FP32TOFP16_EN__SHIFT 16 +static inline uint32_t DPU_OUT_CVT_SCALE_FP32TOFP16_EN(uint32_t val) +{ + return ((val) << DPU_OUT_CVT_SCALE_FP32TOFP16_EN__SHIFT) & DPU_OUT_CVT_SCALE_FP32TOFP16_EN__MASK; +} +#define DPU_OUT_CVT_SCALE_OUT_CVT_SCALE__MASK 0x0000ffff +#define DPU_OUT_CVT_SCALE_OUT_CVT_SCALE__SHIFT 0 +static inline uint32_t DPU_OUT_CVT_SCALE_OUT_CVT_SCALE(uint32_t val) +{ + return ((val) << DPU_OUT_CVT_SCALE_OUT_CVT_SCALE__SHIFT) & DPU_OUT_CVT_SCALE_OUT_CVT_SCALE__MASK; +} + +#define REG_DPU_OUT_CVT_SHIFT 0x00004088 +#define DPU_OUT_CVT_SHIFT_CVT_TYPE__MASK 0x80000000 +#define DPU_OUT_CVT_SHIFT_CVT_TYPE__SHIFT 31 +static inline uint32_t DPU_OUT_CVT_SHIFT_CVT_TYPE(uint32_t val) +{ + return ((val) << DPU_OUT_CVT_SHIFT_CVT_TYPE__SHIFT) & DPU_OUT_CVT_SHIFT_CVT_TYPE__MASK; +} +#define DPU_OUT_CVT_SHIFT_CVT_ROUND__MASK 0x40000000 +#define DPU_OUT_CVT_SHIFT_CVT_ROUND__SHIFT 30 +static inline uint32_t DPU_OUT_CVT_SHIFT_CVT_ROUND(uint32_t val) +{ + return ((val) << DPU_OUT_CVT_SHIFT_CVT_ROUND__SHIFT) & DPU_OUT_CVT_SHIFT_CVT_ROUND__MASK; +} +#define DPU_OUT_CVT_SHIFT_RESERVED_0__MASK 0x3ff00000 +#define DPU_OUT_CVT_SHIFT_RESERVED_0__SHIFT 20 +static inline uint32_t DPU_OUT_CVT_SHIFT_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_OUT_CVT_SHIFT_RESERVED_0__SHIFT) & DPU_OUT_CVT_SHIFT_RESERVED_0__MASK; +} +#define DPU_OUT_CVT_SHIFT_MINUS_EXP__MASK 0x000ff000 +#define DPU_OUT_CVT_SHIFT_MINUS_EXP__SHIFT 12 +static inline uint32_t DPU_OUT_CVT_SHIFT_MINUS_EXP(uint32_t val) +{ + return ((val) << DPU_OUT_CVT_SHIFT_MINUS_EXP__SHIFT) & DPU_OUT_CVT_SHIFT_MINUS_EXP__MASK; +} +#define DPU_OUT_CVT_SHIFT_OUT_CVT_SHIFT__MASK 0x00000fff +#define DPU_OUT_CVT_SHIFT_OUT_CVT_SHIFT__SHIFT 0 +static inline uint32_t DPU_OUT_CVT_SHIFT_OUT_CVT_SHIFT(uint32_t val) +{ + return ((val) << DPU_OUT_CVT_SHIFT_OUT_CVT_SHIFT__SHIFT) & DPU_OUT_CVT_SHIFT_OUT_CVT_SHIFT__MASK; +} + +#define REG_DPU_EW_OP_VALUE_0 0x00004090 +#define DPU_EW_OP_VALUE_0_EW_OPERAND_0__MASK 0xffffffff +#define DPU_EW_OP_VALUE_0_EW_OPERAND_0__SHIFT 0 +static inline uint32_t DPU_EW_OP_VALUE_0_EW_OPERAND_0(uint32_t val) +{ + return ((val) << DPU_EW_OP_VALUE_0_EW_OPERAND_0__SHIFT) & DPU_EW_OP_VALUE_0_EW_OPERAND_0__MASK; +} + +#define REG_DPU_EW_OP_VALUE_1 0x00004094 +#define DPU_EW_OP_VALUE_1_EW_OPERAND_1__MASK 0xffffffff +#define DPU_EW_OP_VALUE_1_EW_OPERAND_1__SHIFT 0 +static inline uint32_t DPU_EW_OP_VALUE_1_EW_OPERAND_1(uint32_t val) +{ + return ((val) << DPU_EW_OP_VALUE_1_EW_OPERAND_1__SHIFT) & DPU_EW_OP_VALUE_1_EW_OPERAND_1__MASK; +} + +#define REG_DPU_EW_OP_VALUE_2 0x00004098 +#define DPU_EW_OP_VALUE_2_EW_OPERAND_2__MASK 0xffffffff +#define DPU_EW_OP_VALUE_2_EW_OPERAND_2__SHIFT 0 +static inline uint32_t DPU_EW_OP_VALUE_2_EW_OPERAND_2(uint32_t val) +{ + return ((val) << DPU_EW_OP_VALUE_2_EW_OPERAND_2__SHIFT) & DPU_EW_OP_VALUE_2_EW_OPERAND_2__MASK; +} + +#define REG_DPU_EW_OP_VALUE_3 0x0000409c +#define DPU_EW_OP_VALUE_3_EW_OPERAND_3__MASK 0xffffffff +#define DPU_EW_OP_VALUE_3_EW_OPERAND_3__SHIFT 0 +static inline uint32_t DPU_EW_OP_VALUE_3_EW_OPERAND_3(uint32_t val) +{ + return ((val) << DPU_EW_OP_VALUE_3_EW_OPERAND_3__SHIFT) & DPU_EW_OP_VALUE_3_EW_OPERAND_3__MASK; +} + +#define REG_DPU_EW_OP_VALUE_4 0x000040a0 +#define DPU_EW_OP_VALUE_4_EW_OPERAND_4__MASK 0xffffffff +#define DPU_EW_OP_VALUE_4_EW_OPERAND_4__SHIFT 0 +static inline uint32_t DPU_EW_OP_VALUE_4_EW_OPERAND_4(uint32_t val) +{ + return ((val) << DPU_EW_OP_VALUE_4_EW_OPERAND_4__SHIFT) & DPU_EW_OP_VALUE_4_EW_OPERAND_4__MASK; +} + +#define REG_DPU_EW_OP_VALUE_5 0x000040a4 +#define DPU_EW_OP_VALUE_5_EW_OPERAND_5__MASK 0xffffffff +#define DPU_EW_OP_VALUE_5_EW_OPERAND_5__SHIFT 0 +static inline uint32_t DPU_EW_OP_VALUE_5_EW_OPERAND_5(uint32_t val) +{ + return ((val) << DPU_EW_OP_VALUE_5_EW_OPERAND_5__SHIFT) & DPU_EW_OP_VALUE_5_EW_OPERAND_5__MASK; +} + +#define REG_DPU_EW_OP_VALUE_6 0x000040a8 +#define DPU_EW_OP_VALUE_6_EW_OPERAND_6__MASK 0xffffffff +#define DPU_EW_OP_VALUE_6_EW_OPERAND_6__SHIFT 0 +static inline uint32_t DPU_EW_OP_VALUE_6_EW_OPERAND_6(uint32_t val) +{ + return ((val) << DPU_EW_OP_VALUE_6_EW_OPERAND_6__SHIFT) & DPU_EW_OP_VALUE_6_EW_OPERAND_6__MASK; +} + +#define REG_DPU_EW_OP_VALUE_7 0x000040ac +#define DPU_EW_OP_VALUE_7_EW_OPERAND_7__MASK 0xffffffff +#define DPU_EW_OP_VALUE_7_EW_OPERAND_7__SHIFT 0 +static inline uint32_t DPU_EW_OP_VALUE_7_EW_OPERAND_7(uint32_t val) +{ + return ((val) << DPU_EW_OP_VALUE_7_EW_OPERAND_7__SHIFT) & DPU_EW_OP_VALUE_7_EW_OPERAND_7__MASK; +} + +#define REG_DPU_SURFACE_ADD 0x000040c0 +#define DPU_SURFACE_ADD_SURF_ADD__MASK 0xfffffff0 +#define DPU_SURFACE_ADD_SURF_ADD__SHIFT 4 +static inline uint32_t DPU_SURFACE_ADD_SURF_ADD(uint32_t val) +{ + return ((val) << DPU_SURFACE_ADD_SURF_ADD__SHIFT) & DPU_SURFACE_ADD_SURF_ADD__MASK; +} +#define DPU_SURFACE_ADD_RESERVED_0__MASK 0x0000000f +#define DPU_SURFACE_ADD_RESERVED_0__SHIFT 0 +static inline uint32_t DPU_SURFACE_ADD_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_SURFACE_ADD_RESERVED_0__SHIFT) & DPU_SURFACE_ADD_RESERVED_0__MASK; +} + +#define REG_DPU_LUT_ACCESS_CFG 0x00004100 +#define DPU_LUT_ACCESS_CFG_RESERVED_0__MASK 0xfffc0000 +#define DPU_LUT_ACCESS_CFG_RESERVED_0__SHIFT 18 +static inline uint32_t DPU_LUT_ACCESS_CFG_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_LUT_ACCESS_CFG_RESERVED_0__SHIFT) & DPU_LUT_ACCESS_CFG_RESERVED_0__MASK; +} +#define DPU_LUT_ACCESS_CFG_LUT_ACCESS_TYPE__MASK 0x00020000 +#define DPU_LUT_ACCESS_CFG_LUT_ACCESS_TYPE__SHIFT 17 +static inline uint32_t DPU_LUT_ACCESS_CFG_LUT_ACCESS_TYPE(uint32_t val) +{ + return ((val) << DPU_LUT_ACCESS_CFG_LUT_ACCESS_TYPE__SHIFT) & DPU_LUT_ACCESS_CFG_LUT_ACCESS_TYPE__MASK; +} +#define DPU_LUT_ACCESS_CFG_LUT_TABLE_ID__MASK 0x00010000 +#define DPU_LUT_ACCESS_CFG_LUT_TABLE_ID__SHIFT 16 +static inline uint32_t DPU_LUT_ACCESS_CFG_LUT_TABLE_ID(uint32_t val) +{ + return ((val) << DPU_LUT_ACCESS_CFG_LUT_TABLE_ID__SHIFT) & DPU_LUT_ACCESS_CFG_LUT_TABLE_ID__MASK; +} +#define DPU_LUT_ACCESS_CFG_RESERVED_1__MASK 0x0000fc00 +#define DPU_LUT_ACCESS_CFG_RESERVED_1__SHIFT 10 +static inline uint32_t DPU_LUT_ACCESS_CFG_RESERVED_1(uint32_t val) +{ + return ((val) << DPU_LUT_ACCESS_CFG_RESERVED_1__SHIFT) & DPU_LUT_ACCESS_CFG_RESERVED_1__MASK; +} +#define DPU_LUT_ACCESS_CFG_LUT_ADDR__MASK 0x000003ff +#define DPU_LUT_ACCESS_CFG_LUT_ADDR__SHIFT 0 +static inline uint32_t DPU_LUT_ACCESS_CFG_LUT_ADDR(uint32_t val) +{ + return ((val) << DPU_LUT_ACCESS_CFG_LUT_ADDR__SHIFT) & DPU_LUT_ACCESS_CFG_LUT_ADDR__MASK; +} + +#define REG_DPU_LUT_ACCESS_DATA 0x00004104 +#define DPU_LUT_ACCESS_DATA_RESERVED_0__MASK 0xffff0000 +#define DPU_LUT_ACCESS_DATA_RESERVED_0__SHIFT 16 +static inline uint32_t DPU_LUT_ACCESS_DATA_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_LUT_ACCESS_DATA_RESERVED_0__SHIFT) & DPU_LUT_ACCESS_DATA_RESERVED_0__MASK; +} +#define DPU_LUT_ACCESS_DATA_LUT_ACCESS_DATA__MASK 0x0000ffff +#define DPU_LUT_ACCESS_DATA_LUT_ACCESS_DATA__SHIFT 0 +static inline uint32_t DPU_LUT_ACCESS_DATA_LUT_ACCESS_DATA(uint32_t val) +{ + return ((val) << DPU_LUT_ACCESS_DATA_LUT_ACCESS_DATA__SHIFT) & DPU_LUT_ACCESS_DATA_LUT_ACCESS_DATA__MASK; +} + +#define REG_DPU_LUT_CFG 0x00004108 +#define DPU_LUT_CFG_RESERVED_0__MASK 0xffffff00 +#define DPU_LUT_CFG_RESERVED_0__SHIFT 8 +static inline uint32_t DPU_LUT_CFG_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_LUT_CFG_RESERVED_0__SHIFT) & DPU_LUT_CFG_RESERVED_0__MASK; +} +#define DPU_LUT_CFG_LUT_CAL_SEL__MASK 0x00000080 +#define DPU_LUT_CFG_LUT_CAL_SEL__SHIFT 7 +static inline uint32_t DPU_LUT_CFG_LUT_CAL_SEL(uint32_t val) +{ + return ((val) << DPU_LUT_CFG_LUT_CAL_SEL__SHIFT) & DPU_LUT_CFG_LUT_CAL_SEL__MASK; +} +#define DPU_LUT_CFG_LUT_HYBRID_PRIORITY__MASK 0x00000040 +#define DPU_LUT_CFG_LUT_HYBRID_PRIORITY__SHIFT 6 +static inline uint32_t DPU_LUT_CFG_LUT_HYBRID_PRIORITY(uint32_t val) +{ + return ((val) << DPU_LUT_CFG_LUT_HYBRID_PRIORITY__SHIFT) & DPU_LUT_CFG_LUT_HYBRID_PRIORITY__MASK; +} +#define DPU_LUT_CFG_LUT_OFLOW_PRIORITY__MASK 0x00000020 +#define DPU_LUT_CFG_LUT_OFLOW_PRIORITY__SHIFT 5 +static inline uint32_t DPU_LUT_CFG_LUT_OFLOW_PRIORITY(uint32_t val) +{ + return ((val) << DPU_LUT_CFG_LUT_OFLOW_PRIORITY__SHIFT) & DPU_LUT_CFG_LUT_OFLOW_PRIORITY__MASK; +} +#define DPU_LUT_CFG_LUT_UFLOW_PRIORITY__MASK 0x00000010 +#define DPU_LUT_CFG_LUT_UFLOW_PRIORITY__SHIFT 4 +static inline uint32_t DPU_LUT_CFG_LUT_UFLOW_PRIORITY(uint32_t val) +{ + return ((val) << DPU_LUT_CFG_LUT_UFLOW_PRIORITY__SHIFT) & DPU_LUT_CFG_LUT_UFLOW_PRIORITY__MASK; +} +#define DPU_LUT_CFG_LUT_LO_LE_MUX__MASK 0x0000000c +#define DPU_LUT_CFG_LUT_LO_LE_MUX__SHIFT 2 +static inline uint32_t DPU_LUT_CFG_LUT_LO_LE_MUX(uint32_t val) +{ + return ((val) << DPU_LUT_CFG_LUT_LO_LE_MUX__SHIFT) & DPU_LUT_CFG_LUT_LO_LE_MUX__MASK; +} +#define DPU_LUT_CFG_LUT_EXPAND_EN__MASK 0x00000002 +#define DPU_LUT_CFG_LUT_EXPAND_EN__SHIFT 1 +static inline uint32_t DPU_LUT_CFG_LUT_EXPAND_EN(uint32_t val) +{ + return ((val) << DPU_LUT_CFG_LUT_EXPAND_EN__SHIFT) & DPU_LUT_CFG_LUT_EXPAND_EN__MASK; +} +#define DPU_LUT_CFG_LUT_ROAD_SEL__MASK 0x00000001 +#define DPU_LUT_CFG_LUT_ROAD_SEL__SHIFT 0 +static inline uint32_t DPU_LUT_CFG_LUT_ROAD_SEL(uint32_t val) +{ + return ((val) << DPU_LUT_CFG_LUT_ROAD_SEL__SHIFT) & DPU_LUT_CFG_LUT_ROAD_SEL__MASK; +} + +#define REG_DPU_LUT_INFO 0x0000410c +#define DPU_LUT_INFO_RESERVED_0__MASK 0xff000000 +#define DPU_LUT_INFO_RESERVED_0__SHIFT 24 +static inline uint32_t DPU_LUT_INFO_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_LUT_INFO_RESERVED_0__SHIFT) & DPU_LUT_INFO_RESERVED_0__MASK; +} +#define DPU_LUT_INFO_LUT_LO_INDEX_SELECT__MASK 0x00ff0000 +#define DPU_LUT_INFO_LUT_LO_INDEX_SELECT__SHIFT 16 +static inline uint32_t DPU_LUT_INFO_LUT_LO_INDEX_SELECT(uint32_t val) +{ + return ((val) << DPU_LUT_INFO_LUT_LO_INDEX_SELECT__SHIFT) & DPU_LUT_INFO_LUT_LO_INDEX_SELECT__MASK; +} +#define DPU_LUT_INFO_LUT_LE_INDEX_SELECT__MASK 0x0000ff00 +#define DPU_LUT_INFO_LUT_LE_INDEX_SELECT__SHIFT 8 +static inline uint32_t DPU_LUT_INFO_LUT_LE_INDEX_SELECT(uint32_t val) +{ + return ((val) << DPU_LUT_INFO_LUT_LE_INDEX_SELECT__SHIFT) & DPU_LUT_INFO_LUT_LE_INDEX_SELECT__MASK; +} +#define DPU_LUT_INFO_RESERVED_1__MASK 0x000000ff +#define DPU_LUT_INFO_RESERVED_1__SHIFT 0 +static inline uint32_t DPU_LUT_INFO_RESERVED_1(uint32_t val) +{ + return ((val) << DPU_LUT_INFO_RESERVED_1__SHIFT) & DPU_LUT_INFO_RESERVED_1__MASK; +} + +#define REG_DPU_LUT_LE_START 0x00004110 +#define DPU_LUT_LE_START_LUT_LE_START__MASK 0xffffffff +#define DPU_LUT_LE_START_LUT_LE_START__SHIFT 0 +static inline uint32_t DPU_LUT_LE_START_LUT_LE_START(uint32_t val) +{ + return ((val) << DPU_LUT_LE_START_LUT_LE_START__SHIFT) & DPU_LUT_LE_START_LUT_LE_START__MASK; +} + +#define REG_DPU_LUT_LE_END 0x00004114 +#define DPU_LUT_LE_END_LUT_LE_END__MASK 0xffffffff +#define DPU_LUT_LE_END_LUT_LE_END__SHIFT 0 +static inline uint32_t DPU_LUT_LE_END_LUT_LE_END(uint32_t val) +{ + return ((val) << DPU_LUT_LE_END_LUT_LE_END__SHIFT) & DPU_LUT_LE_END_LUT_LE_END__MASK; +} + +#define REG_DPU_LUT_LO_START 0x00004118 +#define DPU_LUT_LO_START_LUT_LO_START__MASK 0xffffffff +#define DPU_LUT_LO_START_LUT_LO_START__SHIFT 0 +static inline uint32_t DPU_LUT_LO_START_LUT_LO_START(uint32_t val) +{ + return ((val) << DPU_LUT_LO_START_LUT_LO_START__SHIFT) & DPU_LUT_LO_START_LUT_LO_START__MASK; +} + +#define REG_DPU_LUT_LO_END 0x0000411c +#define DPU_LUT_LO_END_LUT_LO_END__MASK 0xffffffff +#define DPU_LUT_LO_END_LUT_LO_END__SHIFT 0 +static inline uint32_t DPU_LUT_LO_END_LUT_LO_END(uint32_t val) +{ + return ((val) << DPU_LUT_LO_END_LUT_LO_END__SHIFT) & DPU_LUT_LO_END_LUT_LO_END__MASK; +} + +#define REG_DPU_LUT_LE_SLOPE_SCALE 0x00004120 +#define DPU_LUT_LE_SLOPE_SCALE_LUT_LE_SLOPE_OFLOW_SCALE__MASK 0xffff0000 +#define DPU_LUT_LE_SLOPE_SCALE_LUT_LE_SLOPE_OFLOW_SCALE__SHIFT 16 +static inline uint32_t DPU_LUT_LE_SLOPE_SCALE_LUT_LE_SLOPE_OFLOW_SCALE(uint32_t val) +{ + return ((val) << DPU_LUT_LE_SLOPE_SCALE_LUT_LE_SLOPE_OFLOW_SCALE__SHIFT) & DPU_LUT_LE_SLOPE_SCALE_LUT_LE_SLOPE_OFLOW_SCALE__MASK; +} +#define DPU_LUT_LE_SLOPE_SCALE_LUT_LE_SLOPE_UFLOW_SCALE__MASK 0x0000ffff +#define DPU_LUT_LE_SLOPE_SCALE_LUT_LE_SLOPE_UFLOW_SCALE__SHIFT 0 +static inline uint32_t DPU_LUT_LE_SLOPE_SCALE_LUT_LE_SLOPE_UFLOW_SCALE(uint32_t val) +{ + return ((val) << DPU_LUT_LE_SLOPE_SCALE_LUT_LE_SLOPE_UFLOW_SCALE__SHIFT) & DPU_LUT_LE_SLOPE_SCALE_LUT_LE_SLOPE_UFLOW_SCALE__MASK; +} + +#define REG_DPU_LUT_LE_SLOPE_SHIFT 0x00004124 +#define DPU_LUT_LE_SLOPE_SHIFT_RESERVED_0__MASK 0xfffffc00 +#define DPU_LUT_LE_SLOPE_SHIFT_RESERVED_0__SHIFT 10 +static inline uint32_t DPU_LUT_LE_SLOPE_SHIFT_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_LUT_LE_SLOPE_SHIFT_RESERVED_0__SHIFT) & DPU_LUT_LE_SLOPE_SHIFT_RESERVED_0__MASK; +} +#define DPU_LUT_LE_SLOPE_SHIFT_LUT_LE_SLOPE_OFLOW_SHIFT__MASK 0x000003e0 +#define DPU_LUT_LE_SLOPE_SHIFT_LUT_LE_SLOPE_OFLOW_SHIFT__SHIFT 5 +static inline uint32_t DPU_LUT_LE_SLOPE_SHIFT_LUT_LE_SLOPE_OFLOW_SHIFT(uint32_t val) +{ + return ((val) << DPU_LUT_LE_SLOPE_SHIFT_LUT_LE_SLOPE_OFLOW_SHIFT__SHIFT) & DPU_LUT_LE_SLOPE_SHIFT_LUT_LE_SLOPE_OFLOW_SHIFT__MASK; +} +#define DPU_LUT_LE_SLOPE_SHIFT_LUT_LE_SLOPE_UFLOW_SHIFT__MASK 0x0000001f +#define DPU_LUT_LE_SLOPE_SHIFT_LUT_LE_SLOPE_UFLOW_SHIFT__SHIFT 0 +static inline uint32_t DPU_LUT_LE_SLOPE_SHIFT_LUT_LE_SLOPE_UFLOW_SHIFT(uint32_t val) +{ + return ((val) << DPU_LUT_LE_SLOPE_SHIFT_LUT_LE_SLOPE_UFLOW_SHIFT__SHIFT) & DPU_LUT_LE_SLOPE_SHIFT_LUT_LE_SLOPE_UFLOW_SHIFT__MASK; +} + +#define REG_DPU_LUT_LO_SLOPE_SCALE 0x00004128 +#define DPU_LUT_LO_SLOPE_SCALE_LUT_LO_SLOPE_OFLOW_SCALE__MASK 0xffff0000 +#define DPU_LUT_LO_SLOPE_SCALE_LUT_LO_SLOPE_OFLOW_SCALE__SHIFT 16 +static inline uint32_t DPU_LUT_LO_SLOPE_SCALE_LUT_LO_SLOPE_OFLOW_SCALE(uint32_t val) +{ + return ((val) << DPU_LUT_LO_SLOPE_SCALE_LUT_LO_SLOPE_OFLOW_SCALE__SHIFT) & DPU_LUT_LO_SLOPE_SCALE_LUT_LO_SLOPE_OFLOW_SCALE__MASK; +} +#define DPU_LUT_LO_SLOPE_SCALE_LUT_LO_SLOPE_UFLOW_SCALE__MASK 0x0000ffff +#define DPU_LUT_LO_SLOPE_SCALE_LUT_LO_SLOPE_UFLOW_SCALE__SHIFT 0 +static inline uint32_t DPU_LUT_LO_SLOPE_SCALE_LUT_LO_SLOPE_UFLOW_SCALE(uint32_t val) +{ + return ((val) << DPU_LUT_LO_SLOPE_SCALE_LUT_LO_SLOPE_UFLOW_SCALE__SHIFT) & DPU_LUT_LO_SLOPE_SCALE_LUT_LO_SLOPE_UFLOW_SCALE__MASK; +} + +#define REG_DPU_LUT_LO_SLOPE_SHIFT 0x0000412c +#define DPU_LUT_LO_SLOPE_SHIFT_RESERVED_0__MASK 0xfffffc00 +#define DPU_LUT_LO_SLOPE_SHIFT_RESERVED_0__SHIFT 10 +static inline uint32_t DPU_LUT_LO_SLOPE_SHIFT_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_LUT_LO_SLOPE_SHIFT_RESERVED_0__SHIFT) & DPU_LUT_LO_SLOPE_SHIFT_RESERVED_0__MASK; +} +#define DPU_LUT_LO_SLOPE_SHIFT_LUT_LO_SLOPE_OFLOW_SHIFT__MASK 0x000003e0 +#define DPU_LUT_LO_SLOPE_SHIFT_LUT_LO_SLOPE_OFLOW_SHIFT__SHIFT 5 +static inline uint32_t DPU_LUT_LO_SLOPE_SHIFT_LUT_LO_SLOPE_OFLOW_SHIFT(uint32_t val) +{ + return ((val) << DPU_LUT_LO_SLOPE_SHIFT_LUT_LO_SLOPE_OFLOW_SHIFT__SHIFT) & DPU_LUT_LO_SLOPE_SHIFT_LUT_LO_SLOPE_OFLOW_SHIFT__MASK; +} +#define DPU_LUT_LO_SLOPE_SHIFT_LUT_LO_SLOPE_UFLOW_SHIFT__MASK 0x0000001f +#define DPU_LUT_LO_SLOPE_SHIFT_LUT_LO_SLOPE_UFLOW_SHIFT__SHIFT 0 +static inline uint32_t DPU_LUT_LO_SLOPE_SHIFT_LUT_LO_SLOPE_UFLOW_SHIFT(uint32_t val) +{ + return ((val) << DPU_LUT_LO_SLOPE_SHIFT_LUT_LO_SLOPE_UFLOW_SHIFT__SHIFT) & DPU_LUT_LO_SLOPE_SHIFT_LUT_LO_SLOPE_UFLOW_SHIFT__MASK; +} + +#define REG_DPU_RDMA_RDMA_S_STATUS 0x00005000 +#define DPU_RDMA_RDMA_S_STATUS_RESERVED_0__MASK 0xfffc0000 +#define DPU_RDMA_RDMA_S_STATUS_RESERVED_0__SHIFT 18 +static inline uint32_t DPU_RDMA_RDMA_S_STATUS_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_S_STATUS_RESERVED_0__SHIFT) & DPU_RDMA_RDMA_S_STATUS_RESERVED_0__MASK; +} +#define DPU_RDMA_RDMA_S_STATUS_STATUS_1__MASK 0x00030000 +#define DPU_RDMA_RDMA_S_STATUS_STATUS_1__SHIFT 16 +static inline uint32_t DPU_RDMA_RDMA_S_STATUS_STATUS_1(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_S_STATUS_STATUS_1__SHIFT) & DPU_RDMA_RDMA_S_STATUS_STATUS_1__MASK; +} +#define DPU_RDMA_RDMA_S_STATUS_RESERVED_1__MASK 0x0000fffc +#define DPU_RDMA_RDMA_S_STATUS_RESERVED_1__SHIFT 2 +static inline uint32_t DPU_RDMA_RDMA_S_STATUS_RESERVED_1(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_S_STATUS_RESERVED_1__SHIFT) & DPU_RDMA_RDMA_S_STATUS_RESERVED_1__MASK; +} +#define DPU_RDMA_RDMA_S_STATUS_STATUS_0__MASK 0x00000003 +#define DPU_RDMA_RDMA_S_STATUS_STATUS_0__SHIFT 0 +static inline uint32_t DPU_RDMA_RDMA_S_STATUS_STATUS_0(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_S_STATUS_STATUS_0__SHIFT) & DPU_RDMA_RDMA_S_STATUS_STATUS_0__MASK; +} + +#define REG_DPU_RDMA_RDMA_S_POINTER 0x00005004 +#define DPU_RDMA_RDMA_S_POINTER_RESERVED_0__MASK 0xfffe0000 +#define DPU_RDMA_RDMA_S_POINTER_RESERVED_0__SHIFT 17 +static inline uint32_t DPU_RDMA_RDMA_S_POINTER_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_S_POINTER_RESERVED_0__SHIFT) & DPU_RDMA_RDMA_S_POINTER_RESERVED_0__MASK; +} +#define DPU_RDMA_RDMA_S_POINTER_EXECUTER__MASK 0x00010000 +#define DPU_RDMA_RDMA_S_POINTER_EXECUTER__SHIFT 16 +static inline uint32_t DPU_RDMA_RDMA_S_POINTER_EXECUTER(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_S_POINTER_EXECUTER__SHIFT) & DPU_RDMA_RDMA_S_POINTER_EXECUTER__MASK; +} +#define DPU_RDMA_RDMA_S_POINTER_RESERVED_1__MASK 0x0000ffc0 +#define DPU_RDMA_RDMA_S_POINTER_RESERVED_1__SHIFT 6 +static inline uint32_t DPU_RDMA_RDMA_S_POINTER_RESERVED_1(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_S_POINTER_RESERVED_1__SHIFT) & DPU_RDMA_RDMA_S_POINTER_RESERVED_1__MASK; +} +#define DPU_RDMA_RDMA_S_POINTER_EXECUTER_PP_CLEAR__MASK 0x00000020 +#define DPU_RDMA_RDMA_S_POINTER_EXECUTER_PP_CLEAR__SHIFT 5 +static inline uint32_t DPU_RDMA_RDMA_S_POINTER_EXECUTER_PP_CLEAR(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_S_POINTER_EXECUTER_PP_CLEAR__SHIFT) & DPU_RDMA_RDMA_S_POINTER_EXECUTER_PP_CLEAR__MASK; +} +#define DPU_RDMA_RDMA_S_POINTER_POINTER_PP_CLEAR__MASK 0x00000010 +#define DPU_RDMA_RDMA_S_POINTER_POINTER_PP_CLEAR__SHIFT 4 +static inline uint32_t DPU_RDMA_RDMA_S_POINTER_POINTER_PP_CLEAR(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_S_POINTER_POINTER_PP_CLEAR__SHIFT) & DPU_RDMA_RDMA_S_POINTER_POINTER_PP_CLEAR__MASK; +} +#define DPU_RDMA_RDMA_S_POINTER_POINTER_PP_MODE__MASK 0x00000008 +#define DPU_RDMA_RDMA_S_POINTER_POINTER_PP_MODE__SHIFT 3 +static inline uint32_t DPU_RDMA_RDMA_S_POINTER_POINTER_PP_MODE(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_S_POINTER_POINTER_PP_MODE__SHIFT) & DPU_RDMA_RDMA_S_POINTER_POINTER_PP_MODE__MASK; +} +#define DPU_RDMA_RDMA_S_POINTER_EXECUTER_PP_EN__MASK 0x00000004 +#define DPU_RDMA_RDMA_S_POINTER_EXECUTER_PP_EN__SHIFT 2 +static inline uint32_t DPU_RDMA_RDMA_S_POINTER_EXECUTER_PP_EN(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_S_POINTER_EXECUTER_PP_EN__SHIFT) & DPU_RDMA_RDMA_S_POINTER_EXECUTER_PP_EN__MASK; +} +#define DPU_RDMA_RDMA_S_POINTER_POINTER_PP_EN__MASK 0x00000002 +#define DPU_RDMA_RDMA_S_POINTER_POINTER_PP_EN__SHIFT 1 +static inline uint32_t DPU_RDMA_RDMA_S_POINTER_POINTER_PP_EN(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_S_POINTER_POINTER_PP_EN__SHIFT) & DPU_RDMA_RDMA_S_POINTER_POINTER_PP_EN__MASK; +} +#define DPU_RDMA_RDMA_S_POINTER_POINTER__MASK 0x00000001 +#define DPU_RDMA_RDMA_S_POINTER_POINTER__SHIFT 0 +static inline uint32_t DPU_RDMA_RDMA_S_POINTER_POINTER(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_S_POINTER_POINTER__SHIFT) & DPU_RDMA_RDMA_S_POINTER_POINTER__MASK; +} + +#define REG_DPU_RDMA_RDMA_OPERATION_ENABLE 0x00005008 +#define DPU_RDMA_RDMA_OPERATION_ENABLE_RESERVED_0__MASK 0xfffffffe +#define DPU_RDMA_RDMA_OPERATION_ENABLE_RESERVED_0__SHIFT 1 +static inline uint32_t DPU_RDMA_RDMA_OPERATION_ENABLE_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_OPERATION_ENABLE_RESERVED_0__SHIFT) & DPU_RDMA_RDMA_OPERATION_ENABLE_RESERVED_0__MASK; +} +#define DPU_RDMA_RDMA_OPERATION_ENABLE_OP_EN__MASK 0x00000001 +#define DPU_RDMA_RDMA_OPERATION_ENABLE_OP_EN__SHIFT 0 +static inline uint32_t DPU_RDMA_RDMA_OPERATION_ENABLE_OP_EN(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_OPERATION_ENABLE_OP_EN__SHIFT) & DPU_RDMA_RDMA_OPERATION_ENABLE_OP_EN__MASK; +} + +#define REG_DPU_RDMA_RDMA_DATA_CUBE_WIDTH 0x0000500c +#define DPU_RDMA_RDMA_DATA_CUBE_WIDTH_RESERVED_0__MASK 0xffffe000 +#define DPU_RDMA_RDMA_DATA_CUBE_WIDTH_RESERVED_0__SHIFT 13 +static inline uint32_t DPU_RDMA_RDMA_DATA_CUBE_WIDTH_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_DATA_CUBE_WIDTH_RESERVED_0__SHIFT) & DPU_RDMA_RDMA_DATA_CUBE_WIDTH_RESERVED_0__MASK; +} +#define DPU_RDMA_RDMA_DATA_CUBE_WIDTH_WIDTH__MASK 0x00001fff +#define DPU_RDMA_RDMA_DATA_CUBE_WIDTH_WIDTH__SHIFT 0 +static inline uint32_t DPU_RDMA_RDMA_DATA_CUBE_WIDTH_WIDTH(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_DATA_CUBE_WIDTH_WIDTH__SHIFT) & DPU_RDMA_RDMA_DATA_CUBE_WIDTH_WIDTH__MASK; +} + +#define REG_DPU_RDMA_RDMA_DATA_CUBE_HEIGHT 0x00005010 +#define DPU_RDMA_RDMA_DATA_CUBE_HEIGHT_RESERVED_0__MASK 0xe0000000 +#define DPU_RDMA_RDMA_DATA_CUBE_HEIGHT_RESERVED_0__SHIFT 29 +static inline uint32_t DPU_RDMA_RDMA_DATA_CUBE_HEIGHT_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_DATA_CUBE_HEIGHT_RESERVED_0__SHIFT) & DPU_RDMA_RDMA_DATA_CUBE_HEIGHT_RESERVED_0__MASK; +} +#define DPU_RDMA_RDMA_DATA_CUBE_HEIGHT_EW_LINE_NOTCH_ADDR__MASK 0x1fff0000 +#define DPU_RDMA_RDMA_DATA_CUBE_HEIGHT_EW_LINE_NOTCH_ADDR__SHIFT 16 +static inline uint32_t DPU_RDMA_RDMA_DATA_CUBE_HEIGHT_EW_LINE_NOTCH_ADDR(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_DATA_CUBE_HEIGHT_EW_LINE_NOTCH_ADDR__SHIFT) & DPU_RDMA_RDMA_DATA_CUBE_HEIGHT_EW_LINE_NOTCH_ADDR__MASK; +} +#define DPU_RDMA_RDMA_DATA_CUBE_HEIGHT_RESERVED_1__MASK 0x0000e000 +#define DPU_RDMA_RDMA_DATA_CUBE_HEIGHT_RESERVED_1__SHIFT 13 +static inline uint32_t DPU_RDMA_RDMA_DATA_CUBE_HEIGHT_RESERVED_1(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_DATA_CUBE_HEIGHT_RESERVED_1__SHIFT) & DPU_RDMA_RDMA_DATA_CUBE_HEIGHT_RESERVED_1__MASK; +} +#define DPU_RDMA_RDMA_DATA_CUBE_HEIGHT_HEIGHT__MASK 0x00001fff +#define DPU_RDMA_RDMA_DATA_CUBE_HEIGHT_HEIGHT__SHIFT 0 +static inline uint32_t DPU_RDMA_RDMA_DATA_CUBE_HEIGHT_HEIGHT(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_DATA_CUBE_HEIGHT_HEIGHT__SHIFT) & DPU_RDMA_RDMA_DATA_CUBE_HEIGHT_HEIGHT__MASK; +} + +#define REG_DPU_RDMA_RDMA_DATA_CUBE_CHANNEL 0x00005014 +#define DPU_RDMA_RDMA_DATA_CUBE_CHANNEL_RESERVED_0__MASK 0xffffe000 +#define DPU_RDMA_RDMA_DATA_CUBE_CHANNEL_RESERVED_0__SHIFT 13 +static inline uint32_t DPU_RDMA_RDMA_DATA_CUBE_CHANNEL_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_DATA_CUBE_CHANNEL_RESERVED_0__SHIFT) & DPU_RDMA_RDMA_DATA_CUBE_CHANNEL_RESERVED_0__MASK; +} +#define DPU_RDMA_RDMA_DATA_CUBE_CHANNEL_CHANNEL__MASK 0x00001fff +#define DPU_RDMA_RDMA_DATA_CUBE_CHANNEL_CHANNEL__SHIFT 0 +static inline uint32_t DPU_RDMA_RDMA_DATA_CUBE_CHANNEL_CHANNEL(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_DATA_CUBE_CHANNEL_CHANNEL__SHIFT) & DPU_RDMA_RDMA_DATA_CUBE_CHANNEL_CHANNEL__MASK; +} + +#define REG_DPU_RDMA_RDMA_SRC_BASE_ADDR 0x00005018 +#define DPU_RDMA_RDMA_SRC_BASE_ADDR_SRC_BASE_ADDR__MASK 0xffffffff +#define DPU_RDMA_RDMA_SRC_BASE_ADDR_SRC_BASE_ADDR__SHIFT 0 +static inline uint32_t DPU_RDMA_RDMA_SRC_BASE_ADDR_SRC_BASE_ADDR(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_SRC_BASE_ADDR_SRC_BASE_ADDR__SHIFT) & DPU_RDMA_RDMA_SRC_BASE_ADDR_SRC_BASE_ADDR__MASK; +} + +#define REG_DPU_RDMA_RDMA_BRDMA_CFG 0x0000501c +#define DPU_RDMA_RDMA_BRDMA_CFG_RESERVED_0__MASK 0xffffffe0 +#define DPU_RDMA_RDMA_BRDMA_CFG_RESERVED_0__SHIFT 5 +static inline uint32_t DPU_RDMA_RDMA_BRDMA_CFG_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_BRDMA_CFG_RESERVED_0__SHIFT) & DPU_RDMA_RDMA_BRDMA_CFG_RESERVED_0__MASK; +} +#define DPU_RDMA_RDMA_BRDMA_CFG_BRDMA_DATA_USE__MASK 0x0000001e +#define DPU_RDMA_RDMA_BRDMA_CFG_BRDMA_DATA_USE__SHIFT 1 +static inline uint32_t DPU_RDMA_RDMA_BRDMA_CFG_BRDMA_DATA_USE(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_BRDMA_CFG_BRDMA_DATA_USE__SHIFT) & DPU_RDMA_RDMA_BRDMA_CFG_BRDMA_DATA_USE__MASK; +} +#define DPU_RDMA_RDMA_BRDMA_CFG_RESERVED_1__MASK 0x00000001 +#define DPU_RDMA_RDMA_BRDMA_CFG_RESERVED_1__SHIFT 0 +static inline uint32_t DPU_RDMA_RDMA_BRDMA_CFG_RESERVED_1(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_BRDMA_CFG_RESERVED_1__SHIFT) & DPU_RDMA_RDMA_BRDMA_CFG_RESERVED_1__MASK; +} + +#define REG_DPU_RDMA_RDMA_BS_BASE_ADDR 0x00005020 +#define DPU_RDMA_RDMA_BS_BASE_ADDR_BS_BASE_ADDR__MASK 0xffffffff +#define DPU_RDMA_RDMA_BS_BASE_ADDR_BS_BASE_ADDR__SHIFT 0 +static inline uint32_t DPU_RDMA_RDMA_BS_BASE_ADDR_BS_BASE_ADDR(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_BS_BASE_ADDR_BS_BASE_ADDR__SHIFT) & DPU_RDMA_RDMA_BS_BASE_ADDR_BS_BASE_ADDR__MASK; +} + +#define REG_DPU_RDMA_RDMA_NRDMA_CFG 0x00005028 +#define DPU_RDMA_RDMA_NRDMA_CFG_RESERVED_0__MASK 0xffffffe0 +#define DPU_RDMA_RDMA_NRDMA_CFG_RESERVED_0__SHIFT 5 +static inline uint32_t DPU_RDMA_RDMA_NRDMA_CFG_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_NRDMA_CFG_RESERVED_0__SHIFT) & DPU_RDMA_RDMA_NRDMA_CFG_RESERVED_0__MASK; +} +#define DPU_RDMA_RDMA_NRDMA_CFG_NRDMA_DATA_USE__MASK 0x0000001e +#define DPU_RDMA_RDMA_NRDMA_CFG_NRDMA_DATA_USE__SHIFT 1 +static inline uint32_t DPU_RDMA_RDMA_NRDMA_CFG_NRDMA_DATA_USE(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_NRDMA_CFG_NRDMA_DATA_USE__SHIFT) & DPU_RDMA_RDMA_NRDMA_CFG_NRDMA_DATA_USE__MASK; +} +#define DPU_RDMA_RDMA_NRDMA_CFG_RESERVED_1__MASK 0x00000001 +#define DPU_RDMA_RDMA_NRDMA_CFG_RESERVED_1__SHIFT 0 +static inline uint32_t DPU_RDMA_RDMA_NRDMA_CFG_RESERVED_1(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_NRDMA_CFG_RESERVED_1__SHIFT) & DPU_RDMA_RDMA_NRDMA_CFG_RESERVED_1__MASK; +} + +#define REG_DPU_RDMA_RDMA_BN_BASE_ADDR 0x0000502c +#define DPU_RDMA_RDMA_BN_BASE_ADDR_BN_BASE_ADDR__MASK 0xffffffff +#define DPU_RDMA_RDMA_BN_BASE_ADDR_BN_BASE_ADDR__SHIFT 0 +static inline uint32_t DPU_RDMA_RDMA_BN_BASE_ADDR_BN_BASE_ADDR(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_BN_BASE_ADDR_BN_BASE_ADDR__SHIFT) & DPU_RDMA_RDMA_BN_BASE_ADDR_BN_BASE_ADDR__MASK; +} + +#define REG_DPU_RDMA_RDMA_ERDMA_CFG 0x00005034 +#define DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_DATA_MODE__MASK 0xc0000000 +#define DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_DATA_MODE__SHIFT 30 +static inline uint32_t DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_DATA_MODE(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_DATA_MODE__SHIFT) & DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_DATA_MODE__MASK; +} +#define DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_SURF_MODE__MASK 0x20000000 +#define DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_SURF_MODE__SHIFT 29 +static inline uint32_t DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_SURF_MODE(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_SURF_MODE__SHIFT) & DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_SURF_MODE__MASK; +} +#define DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_NONALIGN__MASK 0x10000000 +#define DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_NONALIGN__SHIFT 28 +static inline uint32_t DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_NONALIGN(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_NONALIGN__SHIFT) & DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_NONALIGN__MASK; +} +#define DPU_RDMA_RDMA_ERDMA_CFG_RESERVED_0__MASK 0x0ffffff0 +#define DPU_RDMA_RDMA_ERDMA_CFG_RESERVED_0__SHIFT 4 +static inline uint32_t DPU_RDMA_RDMA_ERDMA_CFG_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_ERDMA_CFG_RESERVED_0__SHIFT) & DPU_RDMA_RDMA_ERDMA_CFG_RESERVED_0__MASK; +} +#define DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_DATA_SIZE__MASK 0x0000000c +#define DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_DATA_SIZE__SHIFT 2 +static inline uint32_t DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_DATA_SIZE(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_DATA_SIZE__SHIFT) & DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_DATA_SIZE__MASK; +} +#define DPU_RDMA_RDMA_ERDMA_CFG_OV4K_BYPASS__MASK 0x00000002 +#define DPU_RDMA_RDMA_ERDMA_CFG_OV4K_BYPASS__SHIFT 1 +static inline uint32_t DPU_RDMA_RDMA_ERDMA_CFG_OV4K_BYPASS(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_ERDMA_CFG_OV4K_BYPASS__SHIFT) & DPU_RDMA_RDMA_ERDMA_CFG_OV4K_BYPASS__MASK; +} +#define DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_DISABLE__MASK 0x00000001 +#define DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_DISABLE__SHIFT 0 +static inline uint32_t DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_DISABLE(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_DISABLE__SHIFT) & DPU_RDMA_RDMA_ERDMA_CFG_ERDMA_DISABLE__MASK; +} + +#define REG_DPU_RDMA_RDMA_EW_BASE_ADDR 0x00005038 +#define DPU_RDMA_RDMA_EW_BASE_ADDR_EW_BASE_ADDR__MASK 0xffffffff +#define DPU_RDMA_RDMA_EW_BASE_ADDR_EW_BASE_ADDR__SHIFT 0 +static inline uint32_t DPU_RDMA_RDMA_EW_BASE_ADDR_EW_BASE_ADDR(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_EW_BASE_ADDR_EW_BASE_ADDR__SHIFT) & DPU_RDMA_RDMA_EW_BASE_ADDR_EW_BASE_ADDR__MASK; +} + +#define REG_DPU_RDMA_RDMA_EW_SURF_STRIDE 0x00005040 +#define DPU_RDMA_RDMA_EW_SURF_STRIDE_EW_SURF_STRIDE__MASK 0xfffffff0 +#define DPU_RDMA_RDMA_EW_SURF_STRIDE_EW_SURF_STRIDE__SHIFT 4 +static inline uint32_t DPU_RDMA_RDMA_EW_SURF_STRIDE_EW_SURF_STRIDE(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_EW_SURF_STRIDE_EW_SURF_STRIDE__SHIFT) & DPU_RDMA_RDMA_EW_SURF_STRIDE_EW_SURF_STRIDE__MASK; +} +#define DPU_RDMA_RDMA_EW_SURF_STRIDE_RESERVED_0__MASK 0x0000000f +#define DPU_RDMA_RDMA_EW_SURF_STRIDE_RESERVED_0__SHIFT 0 +static inline uint32_t DPU_RDMA_RDMA_EW_SURF_STRIDE_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_EW_SURF_STRIDE_RESERVED_0__SHIFT) & DPU_RDMA_RDMA_EW_SURF_STRIDE_RESERVED_0__MASK; +} + +#define REG_DPU_RDMA_RDMA_FEATURE_MODE_CFG 0x00005044 +#define DPU_RDMA_RDMA_FEATURE_MODE_CFG_RESERVED_0__MASK 0xfffc0000 +#define DPU_RDMA_RDMA_FEATURE_MODE_CFG_RESERVED_0__SHIFT 18 +static inline uint32_t DPU_RDMA_RDMA_FEATURE_MODE_CFG_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_FEATURE_MODE_CFG_RESERVED_0__SHIFT) & DPU_RDMA_RDMA_FEATURE_MODE_CFG_RESERVED_0__MASK; +} +#define DPU_RDMA_RDMA_FEATURE_MODE_CFG_IN_PRECISION__MASK 0x00038000 +#define DPU_RDMA_RDMA_FEATURE_MODE_CFG_IN_PRECISION__SHIFT 15 +static inline uint32_t DPU_RDMA_RDMA_FEATURE_MODE_CFG_IN_PRECISION(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_FEATURE_MODE_CFG_IN_PRECISION__SHIFT) & DPU_RDMA_RDMA_FEATURE_MODE_CFG_IN_PRECISION__MASK; +} +#define DPU_RDMA_RDMA_FEATURE_MODE_CFG_BURST_LEN__MASK 0x00007800 +#define DPU_RDMA_RDMA_FEATURE_MODE_CFG_BURST_LEN__SHIFT 11 +static inline uint32_t DPU_RDMA_RDMA_FEATURE_MODE_CFG_BURST_LEN(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_FEATURE_MODE_CFG_BURST_LEN__SHIFT) & DPU_RDMA_RDMA_FEATURE_MODE_CFG_BURST_LEN__MASK; +} +#define DPU_RDMA_RDMA_FEATURE_MODE_CFG_COMB_USE__MASK 0x00000700 +#define DPU_RDMA_RDMA_FEATURE_MODE_CFG_COMB_USE__SHIFT 8 +static inline uint32_t DPU_RDMA_RDMA_FEATURE_MODE_CFG_COMB_USE(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_FEATURE_MODE_CFG_COMB_USE__SHIFT) & DPU_RDMA_RDMA_FEATURE_MODE_CFG_COMB_USE__MASK; +} +#define DPU_RDMA_RDMA_FEATURE_MODE_CFG_PROC_PRECISION__MASK 0x000000e0 +#define DPU_RDMA_RDMA_FEATURE_MODE_CFG_PROC_PRECISION__SHIFT 5 +static inline uint32_t DPU_RDMA_RDMA_FEATURE_MODE_CFG_PROC_PRECISION(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_FEATURE_MODE_CFG_PROC_PRECISION__SHIFT) & DPU_RDMA_RDMA_FEATURE_MODE_CFG_PROC_PRECISION__MASK; +} +#define DPU_RDMA_RDMA_FEATURE_MODE_CFG_MRDMA_DISABLE__MASK 0x00000010 +#define DPU_RDMA_RDMA_FEATURE_MODE_CFG_MRDMA_DISABLE__SHIFT 4 +static inline uint32_t DPU_RDMA_RDMA_FEATURE_MODE_CFG_MRDMA_DISABLE(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_FEATURE_MODE_CFG_MRDMA_DISABLE__SHIFT) & DPU_RDMA_RDMA_FEATURE_MODE_CFG_MRDMA_DISABLE__MASK; +} +#define DPU_RDMA_RDMA_FEATURE_MODE_CFG_MRDMA_FP16TOFP32_EN__MASK 0x00000008 +#define DPU_RDMA_RDMA_FEATURE_MODE_CFG_MRDMA_FP16TOFP32_EN__SHIFT 3 +static inline uint32_t DPU_RDMA_RDMA_FEATURE_MODE_CFG_MRDMA_FP16TOFP32_EN(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_FEATURE_MODE_CFG_MRDMA_FP16TOFP32_EN__SHIFT) & DPU_RDMA_RDMA_FEATURE_MODE_CFG_MRDMA_FP16TOFP32_EN__MASK; +} +#define DPU_RDMA_RDMA_FEATURE_MODE_CFG_CONV_MODE__MASK 0x00000006 +#define DPU_RDMA_RDMA_FEATURE_MODE_CFG_CONV_MODE__SHIFT 1 +static inline uint32_t DPU_RDMA_RDMA_FEATURE_MODE_CFG_CONV_MODE(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_FEATURE_MODE_CFG_CONV_MODE__SHIFT) & DPU_RDMA_RDMA_FEATURE_MODE_CFG_CONV_MODE__MASK; +} +#define DPU_RDMA_RDMA_FEATURE_MODE_CFG_FLYING_MODE__MASK 0x00000001 +#define DPU_RDMA_RDMA_FEATURE_MODE_CFG_FLYING_MODE__SHIFT 0 +static inline uint32_t DPU_RDMA_RDMA_FEATURE_MODE_CFG_FLYING_MODE(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_FEATURE_MODE_CFG_FLYING_MODE__SHIFT) & DPU_RDMA_RDMA_FEATURE_MODE_CFG_FLYING_MODE__MASK; +} + +#define REG_DPU_RDMA_RDMA_SRC_DMA_CFG 0x00005048 +#define DPU_RDMA_RDMA_SRC_DMA_CFG_LINE_NOTCH_ADDR__MASK 0xfff80000 +#define DPU_RDMA_RDMA_SRC_DMA_CFG_LINE_NOTCH_ADDR__SHIFT 19 +static inline uint32_t DPU_RDMA_RDMA_SRC_DMA_CFG_LINE_NOTCH_ADDR(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_SRC_DMA_CFG_LINE_NOTCH_ADDR__SHIFT) & DPU_RDMA_RDMA_SRC_DMA_CFG_LINE_NOTCH_ADDR__MASK; +} +#define DPU_RDMA_RDMA_SRC_DMA_CFG_RESERVED_0__MASK 0x0007c000 +#define DPU_RDMA_RDMA_SRC_DMA_CFG_RESERVED_0__SHIFT 14 +static inline uint32_t DPU_RDMA_RDMA_SRC_DMA_CFG_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_SRC_DMA_CFG_RESERVED_0__SHIFT) & DPU_RDMA_RDMA_SRC_DMA_CFG_RESERVED_0__MASK; +} +#define DPU_RDMA_RDMA_SRC_DMA_CFG_POOLING_METHOD__MASK 0x00002000 +#define DPU_RDMA_RDMA_SRC_DMA_CFG_POOLING_METHOD__SHIFT 13 +static inline uint32_t DPU_RDMA_RDMA_SRC_DMA_CFG_POOLING_METHOD(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_SRC_DMA_CFG_POOLING_METHOD__SHIFT) & DPU_RDMA_RDMA_SRC_DMA_CFG_POOLING_METHOD__MASK; +} +#define DPU_RDMA_RDMA_SRC_DMA_CFG_UNPOOLING_EN__MASK 0x00001000 +#define DPU_RDMA_RDMA_SRC_DMA_CFG_UNPOOLING_EN__SHIFT 12 +static inline uint32_t DPU_RDMA_RDMA_SRC_DMA_CFG_UNPOOLING_EN(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_SRC_DMA_CFG_UNPOOLING_EN__SHIFT) & DPU_RDMA_RDMA_SRC_DMA_CFG_UNPOOLING_EN__MASK; +} +#define DPU_RDMA_RDMA_SRC_DMA_CFG_KERNEL_STRIDE_HEIGHT__MASK 0x00000e00 +#define DPU_RDMA_RDMA_SRC_DMA_CFG_KERNEL_STRIDE_HEIGHT__SHIFT 9 +static inline uint32_t DPU_RDMA_RDMA_SRC_DMA_CFG_KERNEL_STRIDE_HEIGHT(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_SRC_DMA_CFG_KERNEL_STRIDE_HEIGHT__SHIFT) & DPU_RDMA_RDMA_SRC_DMA_CFG_KERNEL_STRIDE_HEIGHT__MASK; +} +#define DPU_RDMA_RDMA_SRC_DMA_CFG_KERNEL_STRIDE_WIDTH__MASK 0x000001c0 +#define DPU_RDMA_RDMA_SRC_DMA_CFG_KERNEL_STRIDE_WIDTH__SHIFT 6 +static inline uint32_t DPU_RDMA_RDMA_SRC_DMA_CFG_KERNEL_STRIDE_WIDTH(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_SRC_DMA_CFG_KERNEL_STRIDE_WIDTH__SHIFT) & DPU_RDMA_RDMA_SRC_DMA_CFG_KERNEL_STRIDE_WIDTH__MASK; +} +#define DPU_RDMA_RDMA_SRC_DMA_CFG_KERNEL_HEIGHT__MASK 0x00000038 +#define DPU_RDMA_RDMA_SRC_DMA_CFG_KERNEL_HEIGHT__SHIFT 3 +static inline uint32_t DPU_RDMA_RDMA_SRC_DMA_CFG_KERNEL_HEIGHT(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_SRC_DMA_CFG_KERNEL_HEIGHT__SHIFT) & DPU_RDMA_RDMA_SRC_DMA_CFG_KERNEL_HEIGHT__MASK; +} +#define DPU_RDMA_RDMA_SRC_DMA_CFG_KERNEL_WIDTH__MASK 0x00000007 +#define DPU_RDMA_RDMA_SRC_DMA_CFG_KERNEL_WIDTH__SHIFT 0 +static inline uint32_t DPU_RDMA_RDMA_SRC_DMA_CFG_KERNEL_WIDTH(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_SRC_DMA_CFG_KERNEL_WIDTH__SHIFT) & DPU_RDMA_RDMA_SRC_DMA_CFG_KERNEL_WIDTH__MASK; +} + +#define REG_DPU_RDMA_RDMA_SURF_NOTCH 0x0000504c +#define DPU_RDMA_RDMA_SURF_NOTCH_SURF_NOTCH_ADDR__MASK 0xfffffff0 +#define DPU_RDMA_RDMA_SURF_NOTCH_SURF_NOTCH_ADDR__SHIFT 4 +static inline uint32_t DPU_RDMA_RDMA_SURF_NOTCH_SURF_NOTCH_ADDR(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_SURF_NOTCH_SURF_NOTCH_ADDR__SHIFT) & DPU_RDMA_RDMA_SURF_NOTCH_SURF_NOTCH_ADDR__MASK; +} +#define DPU_RDMA_RDMA_SURF_NOTCH_RESERVED_0__MASK 0x0000000f +#define DPU_RDMA_RDMA_SURF_NOTCH_RESERVED_0__SHIFT 0 +static inline uint32_t DPU_RDMA_RDMA_SURF_NOTCH_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_SURF_NOTCH_RESERVED_0__SHIFT) & DPU_RDMA_RDMA_SURF_NOTCH_RESERVED_0__MASK; +} + +#define REG_DPU_RDMA_RDMA_PAD_CFG 0x00005064 +#define DPU_RDMA_RDMA_PAD_CFG_PAD_VALUE__MASK 0xffff0000 +#define DPU_RDMA_RDMA_PAD_CFG_PAD_VALUE__SHIFT 16 +static inline uint32_t DPU_RDMA_RDMA_PAD_CFG_PAD_VALUE(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_PAD_CFG_PAD_VALUE__SHIFT) & DPU_RDMA_RDMA_PAD_CFG_PAD_VALUE__MASK; +} +#define DPU_RDMA_RDMA_PAD_CFG_RESERVED_0__MASK 0x0000ff80 +#define DPU_RDMA_RDMA_PAD_CFG_RESERVED_0__SHIFT 7 +static inline uint32_t DPU_RDMA_RDMA_PAD_CFG_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_PAD_CFG_RESERVED_0__SHIFT) & DPU_RDMA_RDMA_PAD_CFG_RESERVED_0__MASK; +} +#define DPU_RDMA_RDMA_PAD_CFG_PAD_TOP__MASK 0x00000070 +#define DPU_RDMA_RDMA_PAD_CFG_PAD_TOP__SHIFT 4 +static inline uint32_t DPU_RDMA_RDMA_PAD_CFG_PAD_TOP(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_PAD_CFG_PAD_TOP__SHIFT) & DPU_RDMA_RDMA_PAD_CFG_PAD_TOP__MASK; +} +#define DPU_RDMA_RDMA_PAD_CFG_RESERVED_1__MASK 0x00000008 +#define DPU_RDMA_RDMA_PAD_CFG_RESERVED_1__SHIFT 3 +static inline uint32_t DPU_RDMA_RDMA_PAD_CFG_RESERVED_1(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_PAD_CFG_RESERVED_1__SHIFT) & DPU_RDMA_RDMA_PAD_CFG_RESERVED_1__MASK; +} +#define DPU_RDMA_RDMA_PAD_CFG_PAD_LEFT__MASK 0x00000007 +#define DPU_RDMA_RDMA_PAD_CFG_PAD_LEFT__SHIFT 0 +static inline uint32_t DPU_RDMA_RDMA_PAD_CFG_PAD_LEFT(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_PAD_CFG_PAD_LEFT__SHIFT) & DPU_RDMA_RDMA_PAD_CFG_PAD_LEFT__MASK; +} + +#define REG_DPU_RDMA_RDMA_WEIGHT 0x00005068 +#define DPU_RDMA_RDMA_WEIGHT_E_WEIGHT__MASK 0xff000000 +#define DPU_RDMA_RDMA_WEIGHT_E_WEIGHT__SHIFT 24 +static inline uint32_t DPU_RDMA_RDMA_WEIGHT_E_WEIGHT(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_WEIGHT_E_WEIGHT__SHIFT) & DPU_RDMA_RDMA_WEIGHT_E_WEIGHT__MASK; +} +#define DPU_RDMA_RDMA_WEIGHT_N_WEIGHT__MASK 0x00ff0000 +#define DPU_RDMA_RDMA_WEIGHT_N_WEIGHT__SHIFT 16 +static inline uint32_t DPU_RDMA_RDMA_WEIGHT_N_WEIGHT(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_WEIGHT_N_WEIGHT__SHIFT) & DPU_RDMA_RDMA_WEIGHT_N_WEIGHT__MASK; +} +#define DPU_RDMA_RDMA_WEIGHT_B_WEIGHT__MASK 0x0000ff00 +#define DPU_RDMA_RDMA_WEIGHT_B_WEIGHT__SHIFT 8 +static inline uint32_t DPU_RDMA_RDMA_WEIGHT_B_WEIGHT(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_WEIGHT_B_WEIGHT__SHIFT) & DPU_RDMA_RDMA_WEIGHT_B_WEIGHT__MASK; +} +#define DPU_RDMA_RDMA_WEIGHT_M_WEIGHT__MASK 0x000000ff +#define DPU_RDMA_RDMA_WEIGHT_M_WEIGHT__SHIFT 0 +static inline uint32_t DPU_RDMA_RDMA_WEIGHT_M_WEIGHT(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_WEIGHT_M_WEIGHT__SHIFT) & DPU_RDMA_RDMA_WEIGHT_M_WEIGHT__MASK; +} + +#define REG_DPU_RDMA_RDMA_EW_SURF_NOTCH 0x0000506c +#define DPU_RDMA_RDMA_EW_SURF_NOTCH_EW_SURF_NOTCH__MASK 0xfffffff0 +#define DPU_RDMA_RDMA_EW_SURF_NOTCH_EW_SURF_NOTCH__SHIFT 4 +static inline uint32_t DPU_RDMA_RDMA_EW_SURF_NOTCH_EW_SURF_NOTCH(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_EW_SURF_NOTCH_EW_SURF_NOTCH__SHIFT) & DPU_RDMA_RDMA_EW_SURF_NOTCH_EW_SURF_NOTCH__MASK; +} +#define DPU_RDMA_RDMA_EW_SURF_NOTCH_RESERVED_0__MASK 0x0000000f +#define DPU_RDMA_RDMA_EW_SURF_NOTCH_RESERVED_0__SHIFT 0 +static inline uint32_t DPU_RDMA_RDMA_EW_SURF_NOTCH_RESERVED_0(uint32_t val) +{ + return ((val) << DPU_RDMA_RDMA_EW_SURF_NOTCH_RESERVED_0__SHIFT) & DPU_RDMA_RDMA_EW_SURF_NOTCH_RESERVED_0__MASK; +} + +#define REG_PPU_S_STATUS 0x00006000 +#define PPU_S_STATUS_RESERVED_0__MASK 0xfffc0000 +#define PPU_S_STATUS_RESERVED_0__SHIFT 18 +static inline uint32_t PPU_S_STATUS_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_S_STATUS_RESERVED_0__SHIFT) & PPU_S_STATUS_RESERVED_0__MASK; +} +#define PPU_S_STATUS_STATUS_1__MASK 0x00030000 +#define PPU_S_STATUS_STATUS_1__SHIFT 16 +static inline uint32_t PPU_S_STATUS_STATUS_1(uint32_t val) +{ + return ((val) << PPU_S_STATUS_STATUS_1__SHIFT) & PPU_S_STATUS_STATUS_1__MASK; +} +#define PPU_S_STATUS_RESERVED_1__MASK 0x0000fffc +#define PPU_S_STATUS_RESERVED_1__SHIFT 2 +static inline uint32_t PPU_S_STATUS_RESERVED_1(uint32_t val) +{ + return ((val) << PPU_S_STATUS_RESERVED_1__SHIFT) & PPU_S_STATUS_RESERVED_1__MASK; +} +#define PPU_S_STATUS_STATUS_0__MASK 0x00000003 +#define PPU_S_STATUS_STATUS_0__SHIFT 0 +static inline uint32_t PPU_S_STATUS_STATUS_0(uint32_t val) +{ + return ((val) << PPU_S_STATUS_STATUS_0__SHIFT) & PPU_S_STATUS_STATUS_0__MASK; +} + +#define REG_PPU_S_POINTER 0x00006004 +#define PPU_S_POINTER_RESERVED_0__MASK 0xfffe0000 +#define PPU_S_POINTER_RESERVED_0__SHIFT 17 +static inline uint32_t PPU_S_POINTER_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_S_POINTER_RESERVED_0__SHIFT) & PPU_S_POINTER_RESERVED_0__MASK; +} +#define PPU_S_POINTER_EXECUTER__MASK 0x00010000 +#define PPU_S_POINTER_EXECUTER__SHIFT 16 +static inline uint32_t PPU_S_POINTER_EXECUTER(uint32_t val) +{ + return ((val) << PPU_S_POINTER_EXECUTER__SHIFT) & PPU_S_POINTER_EXECUTER__MASK; +} +#define PPU_S_POINTER_RESERVED_1__MASK 0x0000ffc0 +#define PPU_S_POINTER_RESERVED_1__SHIFT 6 +static inline uint32_t PPU_S_POINTER_RESERVED_1(uint32_t val) +{ + return ((val) << PPU_S_POINTER_RESERVED_1__SHIFT) & PPU_S_POINTER_RESERVED_1__MASK; +} +#define PPU_S_POINTER_EXECUTER_PP_CLEAR__MASK 0x00000020 +#define PPU_S_POINTER_EXECUTER_PP_CLEAR__SHIFT 5 +static inline uint32_t PPU_S_POINTER_EXECUTER_PP_CLEAR(uint32_t val) +{ + return ((val) << PPU_S_POINTER_EXECUTER_PP_CLEAR__SHIFT) & PPU_S_POINTER_EXECUTER_PP_CLEAR__MASK; +} +#define PPU_S_POINTER_POINTER_PP_CLEAR__MASK 0x00000010 +#define PPU_S_POINTER_POINTER_PP_CLEAR__SHIFT 4 +static inline uint32_t PPU_S_POINTER_POINTER_PP_CLEAR(uint32_t val) +{ + return ((val) << PPU_S_POINTER_POINTER_PP_CLEAR__SHIFT) & PPU_S_POINTER_POINTER_PP_CLEAR__MASK; +} +#define PPU_S_POINTER_POINTER_PP_MODE__MASK 0x00000008 +#define PPU_S_POINTER_POINTER_PP_MODE__SHIFT 3 +static inline uint32_t PPU_S_POINTER_POINTER_PP_MODE(uint32_t val) +{ + return ((val) << PPU_S_POINTER_POINTER_PP_MODE__SHIFT) & PPU_S_POINTER_POINTER_PP_MODE__MASK; +} +#define PPU_S_POINTER_EXECUTER_PP_EN__MASK 0x00000004 +#define PPU_S_POINTER_EXECUTER_PP_EN__SHIFT 2 +static inline uint32_t PPU_S_POINTER_EXECUTER_PP_EN(uint32_t val) +{ + return ((val) << PPU_S_POINTER_EXECUTER_PP_EN__SHIFT) & PPU_S_POINTER_EXECUTER_PP_EN__MASK; +} +#define PPU_S_POINTER_POINTER_PP_EN__MASK 0x00000002 +#define PPU_S_POINTER_POINTER_PP_EN__SHIFT 1 +static inline uint32_t PPU_S_POINTER_POINTER_PP_EN(uint32_t val) +{ + return ((val) << PPU_S_POINTER_POINTER_PP_EN__SHIFT) & PPU_S_POINTER_POINTER_PP_EN__MASK; +} +#define PPU_S_POINTER_POINTER__MASK 0x00000001 +#define PPU_S_POINTER_POINTER__SHIFT 0 +static inline uint32_t PPU_S_POINTER_POINTER(uint32_t val) +{ + return ((val) << PPU_S_POINTER_POINTER__SHIFT) & PPU_S_POINTER_POINTER__MASK; +} + +#define REG_PPU_OPERATION_ENABLE 0x00006008 +#define PPU_OPERATION_ENABLE_RESERVED_0__MASK 0xfffffffe +#define PPU_OPERATION_ENABLE_RESERVED_0__SHIFT 1 +static inline uint32_t PPU_OPERATION_ENABLE_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_OPERATION_ENABLE_RESERVED_0__SHIFT) & PPU_OPERATION_ENABLE_RESERVED_0__MASK; +} +#define PPU_OPERATION_ENABLE_OP_EN__MASK 0x00000001 +#define PPU_OPERATION_ENABLE_OP_EN__SHIFT 0 +static inline uint32_t PPU_OPERATION_ENABLE_OP_EN(uint32_t val) +{ + return ((val) << PPU_OPERATION_ENABLE_OP_EN__SHIFT) & PPU_OPERATION_ENABLE_OP_EN__MASK; +} + +#define REG_PPU_DATA_CUBE_IN_WIDTH 0x0000600c +#define PPU_DATA_CUBE_IN_WIDTH_RESERVED_0__MASK 0xffffe000 +#define PPU_DATA_CUBE_IN_WIDTH_RESERVED_0__SHIFT 13 +static inline uint32_t PPU_DATA_CUBE_IN_WIDTH_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_DATA_CUBE_IN_WIDTH_RESERVED_0__SHIFT) & PPU_DATA_CUBE_IN_WIDTH_RESERVED_0__MASK; +} +#define PPU_DATA_CUBE_IN_WIDTH_CUBE_IN_WIDTH__MASK 0x00001fff +#define PPU_DATA_CUBE_IN_WIDTH_CUBE_IN_WIDTH__SHIFT 0 +static inline uint32_t PPU_DATA_CUBE_IN_WIDTH_CUBE_IN_WIDTH(uint32_t val) +{ + return ((val) << PPU_DATA_CUBE_IN_WIDTH_CUBE_IN_WIDTH__SHIFT) & PPU_DATA_CUBE_IN_WIDTH_CUBE_IN_WIDTH__MASK; +} + +#define REG_PPU_DATA_CUBE_IN_HEIGHT 0x00006010 +#define PPU_DATA_CUBE_IN_HEIGHT_RESERVED_0__MASK 0xffffe000 +#define PPU_DATA_CUBE_IN_HEIGHT_RESERVED_0__SHIFT 13 +static inline uint32_t PPU_DATA_CUBE_IN_HEIGHT_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_DATA_CUBE_IN_HEIGHT_RESERVED_0__SHIFT) & PPU_DATA_CUBE_IN_HEIGHT_RESERVED_0__MASK; +} +#define PPU_DATA_CUBE_IN_HEIGHT_CUBE_IN_HEIGHT__MASK 0x00001fff +#define PPU_DATA_CUBE_IN_HEIGHT_CUBE_IN_HEIGHT__SHIFT 0 +static inline uint32_t PPU_DATA_CUBE_IN_HEIGHT_CUBE_IN_HEIGHT(uint32_t val) +{ + return ((val) << PPU_DATA_CUBE_IN_HEIGHT_CUBE_IN_HEIGHT__SHIFT) & PPU_DATA_CUBE_IN_HEIGHT_CUBE_IN_HEIGHT__MASK; +} + +#define REG_PPU_DATA_CUBE_IN_CHANNEL 0x00006014 +#define PPU_DATA_CUBE_IN_CHANNEL_RESERVED_0__MASK 0xffffe000 +#define PPU_DATA_CUBE_IN_CHANNEL_RESERVED_0__SHIFT 13 +static inline uint32_t PPU_DATA_CUBE_IN_CHANNEL_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_DATA_CUBE_IN_CHANNEL_RESERVED_0__SHIFT) & PPU_DATA_CUBE_IN_CHANNEL_RESERVED_0__MASK; +} +#define PPU_DATA_CUBE_IN_CHANNEL_CUBE_IN_CHANNEL__MASK 0x00001fff +#define PPU_DATA_CUBE_IN_CHANNEL_CUBE_IN_CHANNEL__SHIFT 0 +static inline uint32_t PPU_DATA_CUBE_IN_CHANNEL_CUBE_IN_CHANNEL(uint32_t val) +{ + return ((val) << PPU_DATA_CUBE_IN_CHANNEL_CUBE_IN_CHANNEL__SHIFT) & PPU_DATA_CUBE_IN_CHANNEL_CUBE_IN_CHANNEL__MASK; +} + +#define REG_PPU_DATA_CUBE_OUT_WIDTH 0x00006018 +#define PPU_DATA_CUBE_OUT_WIDTH_RESERVED_0__MASK 0xffffe000 +#define PPU_DATA_CUBE_OUT_WIDTH_RESERVED_0__SHIFT 13 +static inline uint32_t PPU_DATA_CUBE_OUT_WIDTH_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_DATA_CUBE_OUT_WIDTH_RESERVED_0__SHIFT) & PPU_DATA_CUBE_OUT_WIDTH_RESERVED_0__MASK; +} +#define PPU_DATA_CUBE_OUT_WIDTH_CUBE_OUT_WIDTH__MASK 0x00001fff +#define PPU_DATA_CUBE_OUT_WIDTH_CUBE_OUT_WIDTH__SHIFT 0 +static inline uint32_t PPU_DATA_CUBE_OUT_WIDTH_CUBE_OUT_WIDTH(uint32_t val) +{ + return ((val) << PPU_DATA_CUBE_OUT_WIDTH_CUBE_OUT_WIDTH__SHIFT) & PPU_DATA_CUBE_OUT_WIDTH_CUBE_OUT_WIDTH__MASK; +} + +#define REG_PPU_DATA_CUBE_OUT_HEIGHT 0x0000601c +#define PPU_DATA_CUBE_OUT_HEIGHT_RESERVED_0__MASK 0xffffe000 +#define PPU_DATA_CUBE_OUT_HEIGHT_RESERVED_0__SHIFT 13 +static inline uint32_t PPU_DATA_CUBE_OUT_HEIGHT_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_DATA_CUBE_OUT_HEIGHT_RESERVED_0__SHIFT) & PPU_DATA_CUBE_OUT_HEIGHT_RESERVED_0__MASK; +} +#define PPU_DATA_CUBE_OUT_HEIGHT_CUBE_OUT_HEIGHT__MASK 0x00001fff +#define PPU_DATA_CUBE_OUT_HEIGHT_CUBE_OUT_HEIGHT__SHIFT 0 +static inline uint32_t PPU_DATA_CUBE_OUT_HEIGHT_CUBE_OUT_HEIGHT(uint32_t val) +{ + return ((val) << PPU_DATA_CUBE_OUT_HEIGHT_CUBE_OUT_HEIGHT__SHIFT) & PPU_DATA_CUBE_OUT_HEIGHT_CUBE_OUT_HEIGHT__MASK; +} + +#define REG_PPU_DATA_CUBE_OUT_CHANNEL 0x00006020 +#define PPU_DATA_CUBE_OUT_CHANNEL_RESERVED_0__MASK 0xffffe000 +#define PPU_DATA_CUBE_OUT_CHANNEL_RESERVED_0__SHIFT 13 +static inline uint32_t PPU_DATA_CUBE_OUT_CHANNEL_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_DATA_CUBE_OUT_CHANNEL_RESERVED_0__SHIFT) & PPU_DATA_CUBE_OUT_CHANNEL_RESERVED_0__MASK; +} +#define PPU_DATA_CUBE_OUT_CHANNEL_CUBE_OUT_CHANNEL__MASK 0x00001fff +#define PPU_DATA_CUBE_OUT_CHANNEL_CUBE_OUT_CHANNEL__SHIFT 0 +static inline uint32_t PPU_DATA_CUBE_OUT_CHANNEL_CUBE_OUT_CHANNEL(uint32_t val) +{ + return ((val) << PPU_DATA_CUBE_OUT_CHANNEL_CUBE_OUT_CHANNEL__SHIFT) & PPU_DATA_CUBE_OUT_CHANNEL_CUBE_OUT_CHANNEL__MASK; +} + +#define REG_PPU_OPERATION_MODE_CFG 0x00006024 +#define PPU_OPERATION_MODE_CFG_RESERVED_0__MASK 0x80000000 +#define PPU_OPERATION_MODE_CFG_RESERVED_0__SHIFT 31 +static inline uint32_t PPU_OPERATION_MODE_CFG_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_OPERATION_MODE_CFG_RESERVED_0__SHIFT) & PPU_OPERATION_MODE_CFG_RESERVED_0__MASK; +} +#define PPU_OPERATION_MODE_CFG_INDEX_EN__MASK 0x40000000 +#define PPU_OPERATION_MODE_CFG_INDEX_EN__SHIFT 30 +static inline uint32_t PPU_OPERATION_MODE_CFG_INDEX_EN(uint32_t val) +{ + return ((val) << PPU_OPERATION_MODE_CFG_INDEX_EN__SHIFT) & PPU_OPERATION_MODE_CFG_INDEX_EN__MASK; +} +#define PPU_OPERATION_MODE_CFG_RESERVED_1__MASK 0x20000000 +#define PPU_OPERATION_MODE_CFG_RESERVED_1__SHIFT 29 +static inline uint32_t PPU_OPERATION_MODE_CFG_RESERVED_1(uint32_t val) +{ + return ((val) << PPU_OPERATION_MODE_CFG_RESERVED_1__SHIFT) & PPU_OPERATION_MODE_CFG_RESERVED_1__MASK; +} +#define PPU_OPERATION_MODE_CFG_NOTCH_ADDR__MASK 0x1fff0000 +#define PPU_OPERATION_MODE_CFG_NOTCH_ADDR__SHIFT 16 +static inline uint32_t PPU_OPERATION_MODE_CFG_NOTCH_ADDR(uint32_t val) +{ + return ((val) << PPU_OPERATION_MODE_CFG_NOTCH_ADDR__SHIFT) & PPU_OPERATION_MODE_CFG_NOTCH_ADDR__MASK; +} +#define PPU_OPERATION_MODE_CFG_RESERVED_2__MASK 0x0000ff00 +#define PPU_OPERATION_MODE_CFG_RESERVED_2__SHIFT 8 +static inline uint32_t PPU_OPERATION_MODE_CFG_RESERVED_2(uint32_t val) +{ + return ((val) << PPU_OPERATION_MODE_CFG_RESERVED_2__SHIFT) & PPU_OPERATION_MODE_CFG_RESERVED_2__MASK; +} +#define PPU_OPERATION_MODE_CFG_USE_CNT__MASK 0x000000e0 +#define PPU_OPERATION_MODE_CFG_USE_CNT__SHIFT 5 +static inline uint32_t PPU_OPERATION_MODE_CFG_USE_CNT(uint32_t val) +{ + return ((val) << PPU_OPERATION_MODE_CFG_USE_CNT__SHIFT) & PPU_OPERATION_MODE_CFG_USE_CNT__MASK; +} +#define PPU_OPERATION_MODE_CFG_FLYING_MODE__MASK 0x00000010 +#define PPU_OPERATION_MODE_CFG_FLYING_MODE__SHIFT 4 +static inline uint32_t PPU_OPERATION_MODE_CFG_FLYING_MODE(uint32_t val) +{ + return ((val) << PPU_OPERATION_MODE_CFG_FLYING_MODE__SHIFT) & PPU_OPERATION_MODE_CFG_FLYING_MODE__MASK; +} +#define PPU_OPERATION_MODE_CFG_RESERVED_3__MASK 0x0000000c +#define PPU_OPERATION_MODE_CFG_RESERVED_3__SHIFT 2 +static inline uint32_t PPU_OPERATION_MODE_CFG_RESERVED_3(uint32_t val) +{ + return ((val) << PPU_OPERATION_MODE_CFG_RESERVED_3__SHIFT) & PPU_OPERATION_MODE_CFG_RESERVED_3__MASK; +} +#define PPU_OPERATION_MODE_CFG_POOLING_METHOD__MASK 0x00000003 +#define PPU_OPERATION_MODE_CFG_POOLING_METHOD__SHIFT 0 +static inline uint32_t PPU_OPERATION_MODE_CFG_POOLING_METHOD(uint32_t val) +{ + return ((val) << PPU_OPERATION_MODE_CFG_POOLING_METHOD__SHIFT) & PPU_OPERATION_MODE_CFG_POOLING_METHOD__MASK; +} + +#define REG_PPU_POOLING_KERNEL_CFG 0x00006034 +#define PPU_POOLING_KERNEL_CFG_RESERVED_0__MASK 0xff000000 +#define PPU_POOLING_KERNEL_CFG_RESERVED_0__SHIFT 24 +static inline uint32_t PPU_POOLING_KERNEL_CFG_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_POOLING_KERNEL_CFG_RESERVED_0__SHIFT) & PPU_POOLING_KERNEL_CFG_RESERVED_0__MASK; +} +#define PPU_POOLING_KERNEL_CFG_KERNEL_STRIDE_HEIGHT__MASK 0x00f00000 +#define PPU_POOLING_KERNEL_CFG_KERNEL_STRIDE_HEIGHT__SHIFT 20 +static inline uint32_t PPU_POOLING_KERNEL_CFG_KERNEL_STRIDE_HEIGHT(uint32_t val) +{ + return ((val) << PPU_POOLING_KERNEL_CFG_KERNEL_STRIDE_HEIGHT__SHIFT) & PPU_POOLING_KERNEL_CFG_KERNEL_STRIDE_HEIGHT__MASK; +} +#define PPU_POOLING_KERNEL_CFG_KERNEL_STRIDE_WIDTH__MASK 0x000f0000 +#define PPU_POOLING_KERNEL_CFG_KERNEL_STRIDE_WIDTH__SHIFT 16 +static inline uint32_t PPU_POOLING_KERNEL_CFG_KERNEL_STRIDE_WIDTH(uint32_t val) +{ + return ((val) << PPU_POOLING_KERNEL_CFG_KERNEL_STRIDE_WIDTH__SHIFT) & PPU_POOLING_KERNEL_CFG_KERNEL_STRIDE_WIDTH__MASK; +} +#define PPU_POOLING_KERNEL_CFG_RESERVED_1__MASK 0x0000f000 +#define PPU_POOLING_KERNEL_CFG_RESERVED_1__SHIFT 12 +static inline uint32_t PPU_POOLING_KERNEL_CFG_RESERVED_1(uint32_t val) +{ + return ((val) << PPU_POOLING_KERNEL_CFG_RESERVED_1__SHIFT) & PPU_POOLING_KERNEL_CFG_RESERVED_1__MASK; +} +#define PPU_POOLING_KERNEL_CFG_KERNEL_HEIGHT__MASK 0x00000f00 +#define PPU_POOLING_KERNEL_CFG_KERNEL_HEIGHT__SHIFT 8 +static inline uint32_t PPU_POOLING_KERNEL_CFG_KERNEL_HEIGHT(uint32_t val) +{ + return ((val) << PPU_POOLING_KERNEL_CFG_KERNEL_HEIGHT__SHIFT) & PPU_POOLING_KERNEL_CFG_KERNEL_HEIGHT__MASK; +} +#define PPU_POOLING_KERNEL_CFG_RESERVED_2__MASK 0x000000f0 +#define PPU_POOLING_KERNEL_CFG_RESERVED_2__SHIFT 4 +static inline uint32_t PPU_POOLING_KERNEL_CFG_RESERVED_2(uint32_t val) +{ + return ((val) << PPU_POOLING_KERNEL_CFG_RESERVED_2__SHIFT) & PPU_POOLING_KERNEL_CFG_RESERVED_2__MASK; +} +#define PPU_POOLING_KERNEL_CFG_KERNEL_WIDTH__MASK 0x0000000f +#define PPU_POOLING_KERNEL_CFG_KERNEL_WIDTH__SHIFT 0 +static inline uint32_t PPU_POOLING_KERNEL_CFG_KERNEL_WIDTH(uint32_t val) +{ + return ((val) << PPU_POOLING_KERNEL_CFG_KERNEL_WIDTH__SHIFT) & PPU_POOLING_KERNEL_CFG_KERNEL_WIDTH__MASK; +} + +#define REG_PPU_RECIP_KERNEL_WIDTH 0x00006038 +#define PPU_RECIP_KERNEL_WIDTH_RESERVED_0__MASK 0xfffe0000 +#define PPU_RECIP_KERNEL_WIDTH_RESERVED_0__SHIFT 17 +static inline uint32_t PPU_RECIP_KERNEL_WIDTH_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_RECIP_KERNEL_WIDTH_RESERVED_0__SHIFT) & PPU_RECIP_KERNEL_WIDTH_RESERVED_0__MASK; +} +#define PPU_RECIP_KERNEL_WIDTH_RECIP_KERNEL_WIDTH__MASK 0x0001ffff +#define PPU_RECIP_KERNEL_WIDTH_RECIP_KERNEL_WIDTH__SHIFT 0 +static inline uint32_t PPU_RECIP_KERNEL_WIDTH_RECIP_KERNEL_WIDTH(uint32_t val) +{ + return ((val) << PPU_RECIP_KERNEL_WIDTH_RECIP_KERNEL_WIDTH__SHIFT) & PPU_RECIP_KERNEL_WIDTH_RECIP_KERNEL_WIDTH__MASK; +} + +#define REG_PPU_RECIP_KERNEL_HEIGHT 0x0000603c +#define PPU_RECIP_KERNEL_HEIGHT_RESERVED_0__MASK 0xfffe0000 +#define PPU_RECIP_KERNEL_HEIGHT_RESERVED_0__SHIFT 17 +static inline uint32_t PPU_RECIP_KERNEL_HEIGHT_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_RECIP_KERNEL_HEIGHT_RESERVED_0__SHIFT) & PPU_RECIP_KERNEL_HEIGHT_RESERVED_0__MASK; +} +#define PPU_RECIP_KERNEL_HEIGHT_RECIP_KERNEL_HEIGHT__MASK 0x0001ffff +#define PPU_RECIP_KERNEL_HEIGHT_RECIP_KERNEL_HEIGHT__SHIFT 0 +static inline uint32_t PPU_RECIP_KERNEL_HEIGHT_RECIP_KERNEL_HEIGHT(uint32_t val) +{ + return ((val) << PPU_RECIP_KERNEL_HEIGHT_RECIP_KERNEL_HEIGHT__SHIFT) & PPU_RECIP_KERNEL_HEIGHT_RECIP_KERNEL_HEIGHT__MASK; +} + +#define REG_PPU_POOLING_PADDING_CFG 0x00006040 +#define PPU_POOLING_PADDING_CFG_RESERVED_0__MASK 0xffff8000 +#define PPU_POOLING_PADDING_CFG_RESERVED_0__SHIFT 15 +static inline uint32_t PPU_POOLING_PADDING_CFG_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_POOLING_PADDING_CFG_RESERVED_0__SHIFT) & PPU_POOLING_PADDING_CFG_RESERVED_0__MASK; +} +#define PPU_POOLING_PADDING_CFG_PAD_BOTTOM__MASK 0x00007000 +#define PPU_POOLING_PADDING_CFG_PAD_BOTTOM__SHIFT 12 +static inline uint32_t PPU_POOLING_PADDING_CFG_PAD_BOTTOM(uint32_t val) +{ + return ((val) << PPU_POOLING_PADDING_CFG_PAD_BOTTOM__SHIFT) & PPU_POOLING_PADDING_CFG_PAD_BOTTOM__MASK; +} +#define PPU_POOLING_PADDING_CFG_RESERVED_1__MASK 0x00000800 +#define PPU_POOLING_PADDING_CFG_RESERVED_1__SHIFT 11 +static inline uint32_t PPU_POOLING_PADDING_CFG_RESERVED_1(uint32_t val) +{ + return ((val) << PPU_POOLING_PADDING_CFG_RESERVED_1__SHIFT) & PPU_POOLING_PADDING_CFG_RESERVED_1__MASK; +} +#define PPU_POOLING_PADDING_CFG_PAD_RIGHT__MASK 0x00000700 +#define PPU_POOLING_PADDING_CFG_PAD_RIGHT__SHIFT 8 +static inline uint32_t PPU_POOLING_PADDING_CFG_PAD_RIGHT(uint32_t val) +{ + return ((val) << PPU_POOLING_PADDING_CFG_PAD_RIGHT__SHIFT) & PPU_POOLING_PADDING_CFG_PAD_RIGHT__MASK; +} +#define PPU_POOLING_PADDING_CFG_RESERVED_2__MASK 0x00000080 +#define PPU_POOLING_PADDING_CFG_RESERVED_2__SHIFT 7 +static inline uint32_t PPU_POOLING_PADDING_CFG_RESERVED_2(uint32_t val) +{ + return ((val) << PPU_POOLING_PADDING_CFG_RESERVED_2__SHIFT) & PPU_POOLING_PADDING_CFG_RESERVED_2__MASK; +} +#define PPU_POOLING_PADDING_CFG_PAD_TOP__MASK 0x00000070 +#define PPU_POOLING_PADDING_CFG_PAD_TOP__SHIFT 4 +static inline uint32_t PPU_POOLING_PADDING_CFG_PAD_TOP(uint32_t val) +{ + return ((val) << PPU_POOLING_PADDING_CFG_PAD_TOP__SHIFT) & PPU_POOLING_PADDING_CFG_PAD_TOP__MASK; +} +#define PPU_POOLING_PADDING_CFG_RESERVED_3__MASK 0x00000008 +#define PPU_POOLING_PADDING_CFG_RESERVED_3__SHIFT 3 +static inline uint32_t PPU_POOLING_PADDING_CFG_RESERVED_3(uint32_t val) +{ + return ((val) << PPU_POOLING_PADDING_CFG_RESERVED_3__SHIFT) & PPU_POOLING_PADDING_CFG_RESERVED_3__MASK; +} +#define PPU_POOLING_PADDING_CFG_PAD_LEFT__MASK 0x00000007 +#define PPU_POOLING_PADDING_CFG_PAD_LEFT__SHIFT 0 +static inline uint32_t PPU_POOLING_PADDING_CFG_PAD_LEFT(uint32_t val) +{ + return ((val) << PPU_POOLING_PADDING_CFG_PAD_LEFT__SHIFT) & PPU_POOLING_PADDING_CFG_PAD_LEFT__MASK; +} + +#define REG_PPU_PADDING_VALUE_1_CFG 0x00006044 +#define PPU_PADDING_VALUE_1_CFG_PAD_VALUE_0__MASK 0xffffffff +#define PPU_PADDING_VALUE_1_CFG_PAD_VALUE_0__SHIFT 0 +static inline uint32_t PPU_PADDING_VALUE_1_CFG_PAD_VALUE_0(uint32_t val) +{ + return ((val) << PPU_PADDING_VALUE_1_CFG_PAD_VALUE_0__SHIFT) & PPU_PADDING_VALUE_1_CFG_PAD_VALUE_0__MASK; +} + +#define REG_PPU_PADDING_VALUE_2_CFG 0x00006048 +#define PPU_PADDING_VALUE_2_CFG_RESERVED_0__MASK 0xfffffff8 +#define PPU_PADDING_VALUE_2_CFG_RESERVED_0__SHIFT 3 +static inline uint32_t PPU_PADDING_VALUE_2_CFG_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_PADDING_VALUE_2_CFG_RESERVED_0__SHIFT) & PPU_PADDING_VALUE_2_CFG_RESERVED_0__MASK; +} +#define PPU_PADDING_VALUE_2_CFG_PAD_VALUE_1__MASK 0x00000007 +#define PPU_PADDING_VALUE_2_CFG_PAD_VALUE_1__SHIFT 0 +static inline uint32_t PPU_PADDING_VALUE_2_CFG_PAD_VALUE_1(uint32_t val) +{ + return ((val) << PPU_PADDING_VALUE_2_CFG_PAD_VALUE_1__SHIFT) & PPU_PADDING_VALUE_2_CFG_PAD_VALUE_1__MASK; +} + +#define REG_PPU_DST_BASE_ADDR 0x00006070 +#define PPU_DST_BASE_ADDR_DST_BASE_ADDR__MASK 0xfffffff0 +#define PPU_DST_BASE_ADDR_DST_BASE_ADDR__SHIFT 4 +static inline uint32_t PPU_DST_BASE_ADDR_DST_BASE_ADDR(uint32_t val) +{ + return ((val) << PPU_DST_BASE_ADDR_DST_BASE_ADDR__SHIFT) & PPU_DST_BASE_ADDR_DST_BASE_ADDR__MASK; +} +#define PPU_DST_BASE_ADDR_RESERVED_0__MASK 0x0000000f +#define PPU_DST_BASE_ADDR_RESERVED_0__SHIFT 0 +static inline uint32_t PPU_DST_BASE_ADDR_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_DST_BASE_ADDR_RESERVED_0__SHIFT) & PPU_DST_BASE_ADDR_RESERVED_0__MASK; +} + +#define REG_PPU_DST_SURF_STRIDE 0x0000607c +#define PPU_DST_SURF_STRIDE_DST_SURF_STRIDE__MASK 0xfffffff0 +#define PPU_DST_SURF_STRIDE_DST_SURF_STRIDE__SHIFT 4 +static inline uint32_t PPU_DST_SURF_STRIDE_DST_SURF_STRIDE(uint32_t val) +{ + return ((val) << PPU_DST_SURF_STRIDE_DST_SURF_STRIDE__SHIFT) & PPU_DST_SURF_STRIDE_DST_SURF_STRIDE__MASK; +} +#define PPU_DST_SURF_STRIDE_RESERVED_0__MASK 0x0000000f +#define PPU_DST_SURF_STRIDE_RESERVED_0__SHIFT 0 +static inline uint32_t PPU_DST_SURF_STRIDE_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_DST_SURF_STRIDE_RESERVED_0__SHIFT) & PPU_DST_SURF_STRIDE_RESERVED_0__MASK; +} + +#define REG_PPU_DATA_FORMAT 0x00006084 +#define PPU_DATA_FORMAT_INDEX_ADD__MASK 0xfffffff0 +#define PPU_DATA_FORMAT_INDEX_ADD__SHIFT 4 +static inline uint32_t PPU_DATA_FORMAT_INDEX_ADD(uint32_t val) +{ + return ((val) << PPU_DATA_FORMAT_INDEX_ADD__SHIFT) & PPU_DATA_FORMAT_INDEX_ADD__MASK; +} +#define PPU_DATA_FORMAT_DPU_FLYIN__MASK 0x00000008 +#define PPU_DATA_FORMAT_DPU_FLYIN__SHIFT 3 +static inline uint32_t PPU_DATA_FORMAT_DPU_FLYIN(uint32_t val) +{ + return ((val) << PPU_DATA_FORMAT_DPU_FLYIN__SHIFT) & PPU_DATA_FORMAT_DPU_FLYIN__MASK; +} +#define PPU_DATA_FORMAT_PROC_PRECISION__MASK 0x00000007 +#define PPU_DATA_FORMAT_PROC_PRECISION__SHIFT 0 +static inline uint32_t PPU_DATA_FORMAT_PROC_PRECISION(uint32_t val) +{ + return ((val) << PPU_DATA_FORMAT_PROC_PRECISION__SHIFT) & PPU_DATA_FORMAT_PROC_PRECISION__MASK; +} + +#define REG_PPU_MISC_CTRL 0x000060dc +#define PPU_MISC_CTRL_SURF_LEN__MASK 0xffff0000 +#define PPU_MISC_CTRL_SURF_LEN__SHIFT 16 +static inline uint32_t PPU_MISC_CTRL_SURF_LEN(uint32_t val) +{ + return ((val) << PPU_MISC_CTRL_SURF_LEN__SHIFT) & PPU_MISC_CTRL_SURF_LEN__MASK; +} +#define PPU_MISC_CTRL_RESERVED_0__MASK 0x0000fe00 +#define PPU_MISC_CTRL_RESERVED_0__SHIFT 9 +static inline uint32_t PPU_MISC_CTRL_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_MISC_CTRL_RESERVED_0__SHIFT) & PPU_MISC_CTRL_RESERVED_0__MASK; +} +#define PPU_MISC_CTRL_MC_SURF_OUT__MASK 0x00000100 +#define PPU_MISC_CTRL_MC_SURF_OUT__SHIFT 8 +static inline uint32_t PPU_MISC_CTRL_MC_SURF_OUT(uint32_t val) +{ + return ((val) << PPU_MISC_CTRL_MC_SURF_OUT__SHIFT) & PPU_MISC_CTRL_MC_SURF_OUT__MASK; +} +#define PPU_MISC_CTRL_NONALIGN__MASK 0x00000080 +#define PPU_MISC_CTRL_NONALIGN__SHIFT 7 +static inline uint32_t PPU_MISC_CTRL_NONALIGN(uint32_t val) +{ + return ((val) << PPU_MISC_CTRL_NONALIGN__SHIFT) & PPU_MISC_CTRL_NONALIGN__MASK; +} +#define PPU_MISC_CTRL_RESERVED_1__MASK 0x00000070 +#define PPU_MISC_CTRL_RESERVED_1__SHIFT 4 +static inline uint32_t PPU_MISC_CTRL_RESERVED_1(uint32_t val) +{ + return ((val) << PPU_MISC_CTRL_RESERVED_1__SHIFT) & PPU_MISC_CTRL_RESERVED_1__MASK; +} +#define PPU_MISC_CTRL_BURST_LEN__MASK 0x0000000f +#define PPU_MISC_CTRL_BURST_LEN__SHIFT 0 +static inline uint32_t PPU_MISC_CTRL_BURST_LEN(uint32_t val) +{ + return ((val) << PPU_MISC_CTRL_BURST_LEN__SHIFT) & PPU_MISC_CTRL_BURST_LEN__MASK; +} + +#define REG_PPU_RDMA_RDMA_S_STATUS 0x00007000 +#define PPU_RDMA_RDMA_S_STATUS_RESERVED_0__MASK 0xfffc0000 +#define PPU_RDMA_RDMA_S_STATUS_RESERVED_0__SHIFT 18 +static inline uint32_t PPU_RDMA_RDMA_S_STATUS_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_S_STATUS_RESERVED_0__SHIFT) & PPU_RDMA_RDMA_S_STATUS_RESERVED_0__MASK; +} +#define PPU_RDMA_RDMA_S_STATUS_STATUS_1__MASK 0x00030000 +#define PPU_RDMA_RDMA_S_STATUS_STATUS_1__SHIFT 16 +static inline uint32_t PPU_RDMA_RDMA_S_STATUS_STATUS_1(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_S_STATUS_STATUS_1__SHIFT) & PPU_RDMA_RDMA_S_STATUS_STATUS_1__MASK; +} +#define PPU_RDMA_RDMA_S_STATUS_RESERVED_1__MASK 0x0000fffc +#define PPU_RDMA_RDMA_S_STATUS_RESERVED_1__SHIFT 2 +static inline uint32_t PPU_RDMA_RDMA_S_STATUS_RESERVED_1(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_S_STATUS_RESERVED_1__SHIFT) & PPU_RDMA_RDMA_S_STATUS_RESERVED_1__MASK; +} +#define PPU_RDMA_RDMA_S_STATUS_STATUS_0__MASK 0x00000003 +#define PPU_RDMA_RDMA_S_STATUS_STATUS_0__SHIFT 0 +static inline uint32_t PPU_RDMA_RDMA_S_STATUS_STATUS_0(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_S_STATUS_STATUS_0__SHIFT) & PPU_RDMA_RDMA_S_STATUS_STATUS_0__MASK; +} + +#define REG_PPU_RDMA_RDMA_S_POINTER 0x00007004 +#define PPU_RDMA_RDMA_S_POINTER_RESERVED_0__MASK 0xfffe0000 +#define PPU_RDMA_RDMA_S_POINTER_RESERVED_0__SHIFT 17 +static inline uint32_t PPU_RDMA_RDMA_S_POINTER_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_S_POINTER_RESERVED_0__SHIFT) & PPU_RDMA_RDMA_S_POINTER_RESERVED_0__MASK; +} +#define PPU_RDMA_RDMA_S_POINTER_EXECUTER__MASK 0x00010000 +#define PPU_RDMA_RDMA_S_POINTER_EXECUTER__SHIFT 16 +static inline uint32_t PPU_RDMA_RDMA_S_POINTER_EXECUTER(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_S_POINTER_EXECUTER__SHIFT) & PPU_RDMA_RDMA_S_POINTER_EXECUTER__MASK; +} +#define PPU_RDMA_RDMA_S_POINTER_RESERVED_1__MASK 0x0000ffc0 +#define PPU_RDMA_RDMA_S_POINTER_RESERVED_1__SHIFT 6 +static inline uint32_t PPU_RDMA_RDMA_S_POINTER_RESERVED_1(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_S_POINTER_RESERVED_1__SHIFT) & PPU_RDMA_RDMA_S_POINTER_RESERVED_1__MASK; +} +#define PPU_RDMA_RDMA_S_POINTER_EXECUTER_PP_CLEAR__MASK 0x00000020 +#define PPU_RDMA_RDMA_S_POINTER_EXECUTER_PP_CLEAR__SHIFT 5 +static inline uint32_t PPU_RDMA_RDMA_S_POINTER_EXECUTER_PP_CLEAR(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_S_POINTER_EXECUTER_PP_CLEAR__SHIFT) & PPU_RDMA_RDMA_S_POINTER_EXECUTER_PP_CLEAR__MASK; +} +#define PPU_RDMA_RDMA_S_POINTER_POINTER_PP_CLEAR__MASK 0x00000010 +#define PPU_RDMA_RDMA_S_POINTER_POINTER_PP_CLEAR__SHIFT 4 +static inline uint32_t PPU_RDMA_RDMA_S_POINTER_POINTER_PP_CLEAR(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_S_POINTER_POINTER_PP_CLEAR__SHIFT) & PPU_RDMA_RDMA_S_POINTER_POINTER_PP_CLEAR__MASK; +} +#define PPU_RDMA_RDMA_S_POINTER_POINTER_PP_MODE__MASK 0x00000008 +#define PPU_RDMA_RDMA_S_POINTER_POINTER_PP_MODE__SHIFT 3 +static inline uint32_t PPU_RDMA_RDMA_S_POINTER_POINTER_PP_MODE(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_S_POINTER_POINTER_PP_MODE__SHIFT) & PPU_RDMA_RDMA_S_POINTER_POINTER_PP_MODE__MASK; +} +#define PPU_RDMA_RDMA_S_POINTER_EXECUTER_PP_EN__MASK 0x00000004 +#define PPU_RDMA_RDMA_S_POINTER_EXECUTER_PP_EN__SHIFT 2 +static inline uint32_t PPU_RDMA_RDMA_S_POINTER_EXECUTER_PP_EN(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_S_POINTER_EXECUTER_PP_EN__SHIFT) & PPU_RDMA_RDMA_S_POINTER_EXECUTER_PP_EN__MASK; +} +#define PPU_RDMA_RDMA_S_POINTER_POINTER_PP_EN__MASK 0x00000002 +#define PPU_RDMA_RDMA_S_POINTER_POINTER_PP_EN__SHIFT 1 +static inline uint32_t PPU_RDMA_RDMA_S_POINTER_POINTER_PP_EN(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_S_POINTER_POINTER_PP_EN__SHIFT) & PPU_RDMA_RDMA_S_POINTER_POINTER_PP_EN__MASK; +} +#define PPU_RDMA_RDMA_S_POINTER_POINTER__MASK 0x00000001 +#define PPU_RDMA_RDMA_S_POINTER_POINTER__SHIFT 0 +static inline uint32_t PPU_RDMA_RDMA_S_POINTER_POINTER(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_S_POINTER_POINTER__SHIFT) & PPU_RDMA_RDMA_S_POINTER_POINTER__MASK; +} + +#define REG_PPU_RDMA_RDMA_OPERATION_ENABLE 0x00007008 +#define PPU_RDMA_RDMA_OPERATION_ENABLE_RESERVED_0__MASK 0xfffffffe +#define PPU_RDMA_RDMA_OPERATION_ENABLE_RESERVED_0__SHIFT 1 +static inline uint32_t PPU_RDMA_RDMA_OPERATION_ENABLE_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_OPERATION_ENABLE_RESERVED_0__SHIFT) & PPU_RDMA_RDMA_OPERATION_ENABLE_RESERVED_0__MASK; +} +#define PPU_RDMA_RDMA_OPERATION_ENABLE_OP_EN__MASK 0x00000001 +#define PPU_RDMA_RDMA_OPERATION_ENABLE_OP_EN__SHIFT 0 +static inline uint32_t PPU_RDMA_RDMA_OPERATION_ENABLE_OP_EN(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_OPERATION_ENABLE_OP_EN__SHIFT) & PPU_RDMA_RDMA_OPERATION_ENABLE_OP_EN__MASK; +} + +#define REG_PPU_RDMA_RDMA_CUBE_IN_WIDTH 0x0000700c +#define PPU_RDMA_RDMA_CUBE_IN_WIDTH_RESERVED_0__MASK 0xffffe000 +#define PPU_RDMA_RDMA_CUBE_IN_WIDTH_RESERVED_0__SHIFT 13 +static inline uint32_t PPU_RDMA_RDMA_CUBE_IN_WIDTH_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_CUBE_IN_WIDTH_RESERVED_0__SHIFT) & PPU_RDMA_RDMA_CUBE_IN_WIDTH_RESERVED_0__MASK; +} +#define PPU_RDMA_RDMA_CUBE_IN_WIDTH_CUBE_IN_WIDTH__MASK 0x00001fff +#define PPU_RDMA_RDMA_CUBE_IN_WIDTH_CUBE_IN_WIDTH__SHIFT 0 +static inline uint32_t PPU_RDMA_RDMA_CUBE_IN_WIDTH_CUBE_IN_WIDTH(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_CUBE_IN_WIDTH_CUBE_IN_WIDTH__SHIFT) & PPU_RDMA_RDMA_CUBE_IN_WIDTH_CUBE_IN_WIDTH__MASK; +} + +#define REG_PPU_RDMA_RDMA_CUBE_IN_HEIGHT 0x00007010 +#define PPU_RDMA_RDMA_CUBE_IN_HEIGHT_RESERVED_0__MASK 0xffffe000 +#define PPU_RDMA_RDMA_CUBE_IN_HEIGHT_RESERVED_0__SHIFT 13 +static inline uint32_t PPU_RDMA_RDMA_CUBE_IN_HEIGHT_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_CUBE_IN_HEIGHT_RESERVED_0__SHIFT) & PPU_RDMA_RDMA_CUBE_IN_HEIGHT_RESERVED_0__MASK; +} +#define PPU_RDMA_RDMA_CUBE_IN_HEIGHT_CUBE_IN_HEIGHT__MASK 0x00001fff +#define PPU_RDMA_RDMA_CUBE_IN_HEIGHT_CUBE_IN_HEIGHT__SHIFT 0 +static inline uint32_t PPU_RDMA_RDMA_CUBE_IN_HEIGHT_CUBE_IN_HEIGHT(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_CUBE_IN_HEIGHT_CUBE_IN_HEIGHT__SHIFT) & PPU_RDMA_RDMA_CUBE_IN_HEIGHT_CUBE_IN_HEIGHT__MASK; +} + +#define REG_PPU_RDMA_RDMA_CUBE_IN_CHANNEL 0x00007014 +#define PPU_RDMA_RDMA_CUBE_IN_CHANNEL_RESERVED_0__MASK 0xffffe000 +#define PPU_RDMA_RDMA_CUBE_IN_CHANNEL_RESERVED_0__SHIFT 13 +static inline uint32_t PPU_RDMA_RDMA_CUBE_IN_CHANNEL_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_CUBE_IN_CHANNEL_RESERVED_0__SHIFT) & PPU_RDMA_RDMA_CUBE_IN_CHANNEL_RESERVED_0__MASK; +} +#define PPU_RDMA_RDMA_CUBE_IN_CHANNEL_CUBE_IN_CHANNEL__MASK 0x00001fff +#define PPU_RDMA_RDMA_CUBE_IN_CHANNEL_CUBE_IN_CHANNEL__SHIFT 0 +static inline uint32_t PPU_RDMA_RDMA_CUBE_IN_CHANNEL_CUBE_IN_CHANNEL(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_CUBE_IN_CHANNEL_CUBE_IN_CHANNEL__SHIFT) & PPU_RDMA_RDMA_CUBE_IN_CHANNEL_CUBE_IN_CHANNEL__MASK; +} + +#define REG_PPU_RDMA_RDMA_SRC_BASE_ADDR 0x0000701c +#define PPU_RDMA_RDMA_SRC_BASE_ADDR_SRC_BASE_ADDR__MASK 0xffffffff +#define PPU_RDMA_RDMA_SRC_BASE_ADDR_SRC_BASE_ADDR__SHIFT 0 +static inline uint32_t PPU_RDMA_RDMA_SRC_BASE_ADDR_SRC_BASE_ADDR(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_SRC_BASE_ADDR_SRC_BASE_ADDR__SHIFT) & PPU_RDMA_RDMA_SRC_BASE_ADDR_SRC_BASE_ADDR__MASK; +} + +#define REG_PPU_RDMA_RDMA_SRC_LINE_STRIDE 0x00007024 +#define PPU_RDMA_RDMA_SRC_LINE_STRIDE_SRC_LINE_STRIDE__MASK 0xfffffff0 +#define PPU_RDMA_RDMA_SRC_LINE_STRIDE_SRC_LINE_STRIDE__SHIFT 4 +static inline uint32_t PPU_RDMA_RDMA_SRC_LINE_STRIDE_SRC_LINE_STRIDE(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_SRC_LINE_STRIDE_SRC_LINE_STRIDE__SHIFT) & PPU_RDMA_RDMA_SRC_LINE_STRIDE_SRC_LINE_STRIDE__MASK; +} +#define PPU_RDMA_RDMA_SRC_LINE_STRIDE_RESERVED_0__MASK 0x0000000f +#define PPU_RDMA_RDMA_SRC_LINE_STRIDE_RESERVED_0__SHIFT 0 +static inline uint32_t PPU_RDMA_RDMA_SRC_LINE_STRIDE_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_SRC_LINE_STRIDE_RESERVED_0__SHIFT) & PPU_RDMA_RDMA_SRC_LINE_STRIDE_RESERVED_0__MASK; +} + +#define REG_PPU_RDMA_RDMA_SRC_SURF_STRIDE 0x00007028 +#define PPU_RDMA_RDMA_SRC_SURF_STRIDE_SRC_SURF_STRIDE__MASK 0xfffffff0 +#define PPU_RDMA_RDMA_SRC_SURF_STRIDE_SRC_SURF_STRIDE__SHIFT 4 +static inline uint32_t PPU_RDMA_RDMA_SRC_SURF_STRIDE_SRC_SURF_STRIDE(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_SRC_SURF_STRIDE_SRC_SURF_STRIDE__SHIFT) & PPU_RDMA_RDMA_SRC_SURF_STRIDE_SRC_SURF_STRIDE__MASK; +} +#define PPU_RDMA_RDMA_SRC_SURF_STRIDE_RESERVED_0__MASK 0x0000000f +#define PPU_RDMA_RDMA_SRC_SURF_STRIDE_RESERVED_0__SHIFT 0 +static inline uint32_t PPU_RDMA_RDMA_SRC_SURF_STRIDE_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_SRC_SURF_STRIDE_RESERVED_0__SHIFT) & PPU_RDMA_RDMA_SRC_SURF_STRIDE_RESERVED_0__MASK; +} + +#define REG_PPU_RDMA_RDMA_DATA_FORMAT 0x00007030 +#define PPU_RDMA_RDMA_DATA_FORMAT_RESERVED_0__MASK 0xfffffffc +#define PPU_RDMA_RDMA_DATA_FORMAT_RESERVED_0__SHIFT 2 +static inline uint32_t PPU_RDMA_RDMA_DATA_FORMAT_RESERVED_0(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_DATA_FORMAT_RESERVED_0__SHIFT) & PPU_RDMA_RDMA_DATA_FORMAT_RESERVED_0__MASK; +} +#define PPU_RDMA_RDMA_DATA_FORMAT_IN_PRECISION__MASK 0x00000003 +#define PPU_RDMA_RDMA_DATA_FORMAT_IN_PRECISION__SHIFT 0 +static inline uint32_t PPU_RDMA_RDMA_DATA_FORMAT_IN_PRECISION(uint32_t val) +{ + return ((val) << PPU_RDMA_RDMA_DATA_FORMAT_IN_PRECISION__SHIFT) & PPU_RDMA_RDMA_DATA_FORMAT_IN_PRECISION__MASK; +} + +#define REG_DDMA_CFG_OUTSTANDING 0x00008000 +#define DDMA_CFG_OUTSTANDING_RESERVED_0__MASK 0xffff0000 +#define DDMA_CFG_OUTSTANDING_RESERVED_0__SHIFT 16 +static inline uint32_t DDMA_CFG_OUTSTANDING_RESERVED_0(uint32_t val) +{ + return ((val) << DDMA_CFG_OUTSTANDING_RESERVED_0__SHIFT) & DDMA_CFG_OUTSTANDING_RESERVED_0__MASK; +} +#define DDMA_CFG_OUTSTANDING_WR_OS_CNT__MASK 0x0000ff00 +#define DDMA_CFG_OUTSTANDING_WR_OS_CNT__SHIFT 8 +static inline uint32_t DDMA_CFG_OUTSTANDING_WR_OS_CNT(uint32_t val) +{ + return ((val) << DDMA_CFG_OUTSTANDING_WR_OS_CNT__SHIFT) & DDMA_CFG_OUTSTANDING_WR_OS_CNT__MASK; +} +#define DDMA_CFG_OUTSTANDING_RD_OS_CNT__MASK 0x000000ff +#define DDMA_CFG_OUTSTANDING_RD_OS_CNT__SHIFT 0 +static inline uint32_t DDMA_CFG_OUTSTANDING_RD_OS_CNT(uint32_t val) +{ + return ((val) << DDMA_CFG_OUTSTANDING_RD_OS_CNT__SHIFT) & DDMA_CFG_OUTSTANDING_RD_OS_CNT__MASK; +} + +#define REG_DDMA_RD_WEIGHT_0 0x00008004 +#define DDMA_RD_WEIGHT_0_RD_WEIGHT_PDP__MASK 0xff000000 +#define DDMA_RD_WEIGHT_0_RD_WEIGHT_PDP__SHIFT 24 +static inline uint32_t DDMA_RD_WEIGHT_0_RD_WEIGHT_PDP(uint32_t val) +{ + return ((val) << DDMA_RD_WEIGHT_0_RD_WEIGHT_PDP__SHIFT) & DDMA_RD_WEIGHT_0_RD_WEIGHT_PDP__MASK; +} +#define DDMA_RD_WEIGHT_0_RD_WEIGHT_DPU__MASK 0x00ff0000 +#define DDMA_RD_WEIGHT_0_RD_WEIGHT_DPU__SHIFT 16 +static inline uint32_t DDMA_RD_WEIGHT_0_RD_WEIGHT_DPU(uint32_t val) +{ + return ((val) << DDMA_RD_WEIGHT_0_RD_WEIGHT_DPU__SHIFT) & DDMA_RD_WEIGHT_0_RD_WEIGHT_DPU__MASK; +} +#define DDMA_RD_WEIGHT_0_RD_WEIGHT_KERNEL__MASK 0x0000ff00 +#define DDMA_RD_WEIGHT_0_RD_WEIGHT_KERNEL__SHIFT 8 +static inline uint32_t DDMA_RD_WEIGHT_0_RD_WEIGHT_KERNEL(uint32_t val) +{ + return ((val) << DDMA_RD_WEIGHT_0_RD_WEIGHT_KERNEL__SHIFT) & DDMA_RD_WEIGHT_0_RD_WEIGHT_KERNEL__MASK; +} +#define DDMA_RD_WEIGHT_0_RD_WEIGHT_FEATURE__MASK 0x000000ff +#define DDMA_RD_WEIGHT_0_RD_WEIGHT_FEATURE__SHIFT 0 +static inline uint32_t DDMA_RD_WEIGHT_0_RD_WEIGHT_FEATURE(uint32_t val) +{ + return ((val) << DDMA_RD_WEIGHT_0_RD_WEIGHT_FEATURE__SHIFT) & DDMA_RD_WEIGHT_0_RD_WEIGHT_FEATURE__MASK; +} + +#define REG_DDMA_WR_WEIGHT_0 0x00008008 +#define DDMA_WR_WEIGHT_0_RESERVED_0__MASK 0xffff0000 +#define DDMA_WR_WEIGHT_0_RESERVED_0__SHIFT 16 +static inline uint32_t DDMA_WR_WEIGHT_0_RESERVED_0(uint32_t val) +{ + return ((val) << DDMA_WR_WEIGHT_0_RESERVED_0__SHIFT) & DDMA_WR_WEIGHT_0_RESERVED_0__MASK; +} +#define DDMA_WR_WEIGHT_0_WR_WEIGHT_PDP__MASK 0x0000ff00 +#define DDMA_WR_WEIGHT_0_WR_WEIGHT_PDP__SHIFT 8 +static inline uint32_t DDMA_WR_WEIGHT_0_WR_WEIGHT_PDP(uint32_t val) +{ + return ((val) << DDMA_WR_WEIGHT_0_WR_WEIGHT_PDP__SHIFT) & DDMA_WR_WEIGHT_0_WR_WEIGHT_PDP__MASK; +} +#define DDMA_WR_WEIGHT_0_WR_WEIGHT_DPU__MASK 0x000000ff +#define DDMA_WR_WEIGHT_0_WR_WEIGHT_DPU__SHIFT 0 +static inline uint32_t DDMA_WR_WEIGHT_0_WR_WEIGHT_DPU(uint32_t val) +{ + return ((val) << DDMA_WR_WEIGHT_0_WR_WEIGHT_DPU__SHIFT) & DDMA_WR_WEIGHT_0_WR_WEIGHT_DPU__MASK; +} + +#define REG_DDMA_CFG_ID_ERROR 0x0000800c +#define DDMA_CFG_ID_ERROR_RESERVED_0__MASK 0xfffffc00 +#define DDMA_CFG_ID_ERROR_RESERVED_0__SHIFT 10 +static inline uint32_t DDMA_CFG_ID_ERROR_RESERVED_0(uint32_t val) +{ + return ((val) << DDMA_CFG_ID_ERROR_RESERVED_0__SHIFT) & DDMA_CFG_ID_ERROR_RESERVED_0__MASK; +} +#define DDMA_CFG_ID_ERROR_WR_RESP_ID__MASK 0x000003c0 +#define DDMA_CFG_ID_ERROR_WR_RESP_ID__SHIFT 6 +static inline uint32_t DDMA_CFG_ID_ERROR_WR_RESP_ID(uint32_t val) +{ + return ((val) << DDMA_CFG_ID_ERROR_WR_RESP_ID__SHIFT) & DDMA_CFG_ID_ERROR_WR_RESP_ID__MASK; +} +#define DDMA_CFG_ID_ERROR_RESERVED_1__MASK 0x00000020 +#define DDMA_CFG_ID_ERROR_RESERVED_1__SHIFT 5 +static inline uint32_t DDMA_CFG_ID_ERROR_RESERVED_1(uint32_t val) +{ + return ((val) << DDMA_CFG_ID_ERROR_RESERVED_1__SHIFT) & DDMA_CFG_ID_ERROR_RESERVED_1__MASK; +} +#define DDMA_CFG_ID_ERROR_RD_RESP_ID__MASK 0x0000001f +#define DDMA_CFG_ID_ERROR_RD_RESP_ID__SHIFT 0 +static inline uint32_t DDMA_CFG_ID_ERROR_RD_RESP_ID(uint32_t val) +{ + return ((val) << DDMA_CFG_ID_ERROR_RD_RESP_ID__SHIFT) & DDMA_CFG_ID_ERROR_RD_RESP_ID__MASK; +} + +#define REG_DDMA_RD_WEIGHT_1 0x00008010 +#define DDMA_RD_WEIGHT_1_RESERVED_0__MASK 0xffffff00 +#define DDMA_RD_WEIGHT_1_RESERVED_0__SHIFT 8 +static inline uint32_t DDMA_RD_WEIGHT_1_RESERVED_0(uint32_t val) +{ + return ((val) << DDMA_RD_WEIGHT_1_RESERVED_0__SHIFT) & DDMA_RD_WEIGHT_1_RESERVED_0__MASK; +} +#define DDMA_RD_WEIGHT_1_RD_WEIGHT_PC__MASK 0x000000ff +#define DDMA_RD_WEIGHT_1_RD_WEIGHT_PC__SHIFT 0 +static inline uint32_t DDMA_RD_WEIGHT_1_RD_WEIGHT_PC(uint32_t val) +{ + return ((val) << DDMA_RD_WEIGHT_1_RD_WEIGHT_PC__SHIFT) & DDMA_RD_WEIGHT_1_RD_WEIGHT_PC__MASK; +} + +#define REG_DDMA_CFG_DMA_FIFO_CLR 0x00008014 +#define DDMA_CFG_DMA_FIFO_CLR_RESERVED_0__MASK 0xfffffffe +#define DDMA_CFG_DMA_FIFO_CLR_RESERVED_0__SHIFT 1 +static inline uint32_t DDMA_CFG_DMA_FIFO_CLR_RESERVED_0(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_FIFO_CLR_RESERVED_0__SHIFT) & DDMA_CFG_DMA_FIFO_CLR_RESERVED_0__MASK; +} +#define DDMA_CFG_DMA_FIFO_CLR_DMA_FIFO_CLR__MASK 0x00000001 +#define DDMA_CFG_DMA_FIFO_CLR_DMA_FIFO_CLR__SHIFT 0 +static inline uint32_t DDMA_CFG_DMA_FIFO_CLR_DMA_FIFO_CLR(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_FIFO_CLR_DMA_FIFO_CLR__SHIFT) & DDMA_CFG_DMA_FIFO_CLR_DMA_FIFO_CLR__MASK; +} + +#define REG_DDMA_CFG_DMA_ARB 0x00008018 +#define DDMA_CFG_DMA_ARB_RESERVED_0__MASK 0xfffffc00 +#define DDMA_CFG_DMA_ARB_RESERVED_0__SHIFT 10 +static inline uint32_t DDMA_CFG_DMA_ARB_RESERVED_0(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_ARB_RESERVED_0__SHIFT) & DDMA_CFG_DMA_ARB_RESERVED_0__MASK; +} +#define DDMA_CFG_DMA_ARB_WR_ARBIT_MODEL__MASK 0x00000200 +#define DDMA_CFG_DMA_ARB_WR_ARBIT_MODEL__SHIFT 9 +static inline uint32_t DDMA_CFG_DMA_ARB_WR_ARBIT_MODEL(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_ARB_WR_ARBIT_MODEL__SHIFT) & DDMA_CFG_DMA_ARB_WR_ARBIT_MODEL__MASK; +} +#define DDMA_CFG_DMA_ARB_RD_ARBIT_MODEL__MASK 0x00000100 +#define DDMA_CFG_DMA_ARB_RD_ARBIT_MODEL__SHIFT 8 +static inline uint32_t DDMA_CFG_DMA_ARB_RD_ARBIT_MODEL(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_ARB_RD_ARBIT_MODEL__SHIFT) & DDMA_CFG_DMA_ARB_RD_ARBIT_MODEL__MASK; +} +#define DDMA_CFG_DMA_ARB_RESERVED_1__MASK 0x00000080 +#define DDMA_CFG_DMA_ARB_RESERVED_1__SHIFT 7 +static inline uint32_t DDMA_CFG_DMA_ARB_RESERVED_1(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_ARB_RESERVED_1__SHIFT) & DDMA_CFG_DMA_ARB_RESERVED_1__MASK; +} +#define DDMA_CFG_DMA_ARB_WR_FIX_ARB__MASK 0x00000070 +#define DDMA_CFG_DMA_ARB_WR_FIX_ARB__SHIFT 4 +static inline uint32_t DDMA_CFG_DMA_ARB_WR_FIX_ARB(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_ARB_WR_FIX_ARB__SHIFT) & DDMA_CFG_DMA_ARB_WR_FIX_ARB__MASK; +} +#define DDMA_CFG_DMA_ARB_RESERVED_2__MASK 0x00000008 +#define DDMA_CFG_DMA_ARB_RESERVED_2__SHIFT 3 +static inline uint32_t DDMA_CFG_DMA_ARB_RESERVED_2(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_ARB_RESERVED_2__SHIFT) & DDMA_CFG_DMA_ARB_RESERVED_2__MASK; +} +#define DDMA_CFG_DMA_ARB_RD_FIX_ARB__MASK 0x00000007 +#define DDMA_CFG_DMA_ARB_RD_FIX_ARB__SHIFT 0 +static inline uint32_t DDMA_CFG_DMA_ARB_RD_FIX_ARB(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_ARB_RD_FIX_ARB__SHIFT) & DDMA_CFG_DMA_ARB_RD_FIX_ARB__MASK; +} + +#define REG_DDMA_CFG_DMA_RD_QOS 0x00008020 +#define DDMA_CFG_DMA_RD_QOS_RESERVED_0__MASK 0xfffffc00 +#define DDMA_CFG_DMA_RD_QOS_RESERVED_0__SHIFT 10 +static inline uint32_t DDMA_CFG_DMA_RD_QOS_RESERVED_0(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_RD_QOS_RESERVED_0__SHIFT) & DDMA_CFG_DMA_RD_QOS_RESERVED_0__MASK; +} +#define DDMA_CFG_DMA_RD_QOS_RD_PC_QOS__MASK 0x00000300 +#define DDMA_CFG_DMA_RD_QOS_RD_PC_QOS__SHIFT 8 +static inline uint32_t DDMA_CFG_DMA_RD_QOS_RD_PC_QOS(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_RD_QOS_RD_PC_QOS__SHIFT) & DDMA_CFG_DMA_RD_QOS_RD_PC_QOS__MASK; +} +#define DDMA_CFG_DMA_RD_QOS_RD_PPU_QOS__MASK 0x000000c0 +#define DDMA_CFG_DMA_RD_QOS_RD_PPU_QOS__SHIFT 6 +static inline uint32_t DDMA_CFG_DMA_RD_QOS_RD_PPU_QOS(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_RD_QOS_RD_PPU_QOS__SHIFT) & DDMA_CFG_DMA_RD_QOS_RD_PPU_QOS__MASK; +} +#define DDMA_CFG_DMA_RD_QOS_RD_DPU_QOS__MASK 0x00000030 +#define DDMA_CFG_DMA_RD_QOS_RD_DPU_QOS__SHIFT 4 +static inline uint32_t DDMA_CFG_DMA_RD_QOS_RD_DPU_QOS(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_RD_QOS_RD_DPU_QOS__SHIFT) & DDMA_CFG_DMA_RD_QOS_RD_DPU_QOS__MASK; +} +#define DDMA_CFG_DMA_RD_QOS_RD_KERNEL_QOS__MASK 0x0000000c +#define DDMA_CFG_DMA_RD_QOS_RD_KERNEL_QOS__SHIFT 2 +static inline uint32_t DDMA_CFG_DMA_RD_QOS_RD_KERNEL_QOS(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_RD_QOS_RD_KERNEL_QOS__SHIFT) & DDMA_CFG_DMA_RD_QOS_RD_KERNEL_QOS__MASK; +} +#define DDMA_CFG_DMA_RD_QOS_RD_FEATURE_QOS__MASK 0x00000003 +#define DDMA_CFG_DMA_RD_QOS_RD_FEATURE_QOS__SHIFT 0 +static inline uint32_t DDMA_CFG_DMA_RD_QOS_RD_FEATURE_QOS(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_RD_QOS_RD_FEATURE_QOS__SHIFT) & DDMA_CFG_DMA_RD_QOS_RD_FEATURE_QOS__MASK; +} + +#define REG_DDMA_CFG_DMA_RD_CFG 0x00008024 +#define DDMA_CFG_DMA_RD_CFG_RESERVED_0__MASK 0xffffe000 +#define DDMA_CFG_DMA_RD_CFG_RESERVED_0__SHIFT 13 +static inline uint32_t DDMA_CFG_DMA_RD_CFG_RESERVED_0(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_RD_CFG_RESERVED_0__SHIFT) & DDMA_CFG_DMA_RD_CFG_RESERVED_0__MASK; +} +#define DDMA_CFG_DMA_RD_CFG_RD_ARLOCK__MASK 0x00001000 +#define DDMA_CFG_DMA_RD_CFG_RD_ARLOCK__SHIFT 12 +static inline uint32_t DDMA_CFG_DMA_RD_CFG_RD_ARLOCK(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_RD_CFG_RD_ARLOCK__SHIFT) & DDMA_CFG_DMA_RD_CFG_RD_ARLOCK__MASK; +} +#define DDMA_CFG_DMA_RD_CFG_RD_ARCACHE__MASK 0x00000f00 +#define DDMA_CFG_DMA_RD_CFG_RD_ARCACHE__SHIFT 8 +static inline uint32_t DDMA_CFG_DMA_RD_CFG_RD_ARCACHE(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_RD_CFG_RD_ARCACHE__SHIFT) & DDMA_CFG_DMA_RD_CFG_RD_ARCACHE__MASK; +} +#define DDMA_CFG_DMA_RD_CFG_RD_ARPROT__MASK 0x000000e0 +#define DDMA_CFG_DMA_RD_CFG_RD_ARPROT__SHIFT 5 +static inline uint32_t DDMA_CFG_DMA_RD_CFG_RD_ARPROT(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_RD_CFG_RD_ARPROT__SHIFT) & DDMA_CFG_DMA_RD_CFG_RD_ARPROT__MASK; +} +#define DDMA_CFG_DMA_RD_CFG_RD_ARBURST__MASK 0x00000018 +#define DDMA_CFG_DMA_RD_CFG_RD_ARBURST__SHIFT 3 +static inline uint32_t DDMA_CFG_DMA_RD_CFG_RD_ARBURST(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_RD_CFG_RD_ARBURST__SHIFT) & DDMA_CFG_DMA_RD_CFG_RD_ARBURST__MASK; +} +#define DDMA_CFG_DMA_RD_CFG_RD_ARSIZE__MASK 0x00000007 +#define DDMA_CFG_DMA_RD_CFG_RD_ARSIZE__SHIFT 0 +static inline uint32_t DDMA_CFG_DMA_RD_CFG_RD_ARSIZE(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_RD_CFG_RD_ARSIZE__SHIFT) & DDMA_CFG_DMA_RD_CFG_RD_ARSIZE__MASK; +} + +#define REG_DDMA_CFG_DMA_WR_CFG 0x00008028 +#define DDMA_CFG_DMA_WR_CFG_RESERVED_0__MASK 0xffffe000 +#define DDMA_CFG_DMA_WR_CFG_RESERVED_0__SHIFT 13 +static inline uint32_t DDMA_CFG_DMA_WR_CFG_RESERVED_0(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_WR_CFG_RESERVED_0__SHIFT) & DDMA_CFG_DMA_WR_CFG_RESERVED_0__MASK; +} +#define DDMA_CFG_DMA_WR_CFG_WR_AWLOCK__MASK 0x00001000 +#define DDMA_CFG_DMA_WR_CFG_WR_AWLOCK__SHIFT 12 +static inline uint32_t DDMA_CFG_DMA_WR_CFG_WR_AWLOCK(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_WR_CFG_WR_AWLOCK__SHIFT) & DDMA_CFG_DMA_WR_CFG_WR_AWLOCK__MASK; +} +#define DDMA_CFG_DMA_WR_CFG_WR_AWCACHE__MASK 0x00000f00 +#define DDMA_CFG_DMA_WR_CFG_WR_AWCACHE__SHIFT 8 +static inline uint32_t DDMA_CFG_DMA_WR_CFG_WR_AWCACHE(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_WR_CFG_WR_AWCACHE__SHIFT) & DDMA_CFG_DMA_WR_CFG_WR_AWCACHE__MASK; +} +#define DDMA_CFG_DMA_WR_CFG_WR_AWPROT__MASK 0x000000e0 +#define DDMA_CFG_DMA_WR_CFG_WR_AWPROT__SHIFT 5 +static inline uint32_t DDMA_CFG_DMA_WR_CFG_WR_AWPROT(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_WR_CFG_WR_AWPROT__SHIFT) & DDMA_CFG_DMA_WR_CFG_WR_AWPROT__MASK; +} +#define DDMA_CFG_DMA_WR_CFG_WR_AWBURST__MASK 0x00000018 +#define DDMA_CFG_DMA_WR_CFG_WR_AWBURST__SHIFT 3 +static inline uint32_t DDMA_CFG_DMA_WR_CFG_WR_AWBURST(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_WR_CFG_WR_AWBURST__SHIFT) & DDMA_CFG_DMA_WR_CFG_WR_AWBURST__MASK; +} +#define DDMA_CFG_DMA_WR_CFG_WR_AWSIZE__MASK 0x00000007 +#define DDMA_CFG_DMA_WR_CFG_WR_AWSIZE__SHIFT 0 +static inline uint32_t DDMA_CFG_DMA_WR_CFG_WR_AWSIZE(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_WR_CFG_WR_AWSIZE__SHIFT) & DDMA_CFG_DMA_WR_CFG_WR_AWSIZE__MASK; +} + +#define REG_DDMA_CFG_DMA_WSTRB 0x0000802c +#define DDMA_CFG_DMA_WSTRB_WR_WSTRB__MASK 0xffffffff +#define DDMA_CFG_DMA_WSTRB_WR_WSTRB__SHIFT 0 +static inline uint32_t DDMA_CFG_DMA_WSTRB_WR_WSTRB(uint32_t val) +{ + return ((val) << DDMA_CFG_DMA_WSTRB_WR_WSTRB__SHIFT) & DDMA_CFG_DMA_WSTRB_WR_WSTRB__MASK; +} + +#define REG_DDMA_CFG_STATUS 0x00008030 +#define DDMA_CFG_STATUS_RESERVED_0__MASK 0xfffffe00 +#define DDMA_CFG_STATUS_RESERVED_0__SHIFT 9 +static inline uint32_t DDMA_CFG_STATUS_RESERVED_0(uint32_t val) +{ + return ((val) << DDMA_CFG_STATUS_RESERVED_0__SHIFT) & DDMA_CFG_STATUS_RESERVED_0__MASK; +} +#define DDMA_CFG_STATUS_IDEL__MASK 0x00000100 +#define DDMA_CFG_STATUS_IDEL__SHIFT 8 +static inline uint32_t DDMA_CFG_STATUS_IDEL(uint32_t val) +{ + return ((val) << DDMA_CFG_STATUS_IDEL__SHIFT) & DDMA_CFG_STATUS_IDEL__MASK; +} +#define DDMA_CFG_STATUS_RESERVED_1__MASK 0x000000ff +#define DDMA_CFG_STATUS_RESERVED_1__SHIFT 0 +static inline uint32_t DDMA_CFG_STATUS_RESERVED_1(uint32_t val) +{ + return ((val) << DDMA_CFG_STATUS_RESERVED_1__SHIFT) & DDMA_CFG_STATUS_RESERVED_1__MASK; +} + +#define REG_SDMA_CFG_OUTSTANDING 0x00009000 +#define SDMA_CFG_OUTSTANDING_RESERVED_0__MASK 0xffff0000 +#define SDMA_CFG_OUTSTANDING_RESERVED_0__SHIFT 16 +static inline uint32_t SDMA_CFG_OUTSTANDING_RESERVED_0(uint32_t val) +{ + return ((val) << SDMA_CFG_OUTSTANDING_RESERVED_0__SHIFT) & SDMA_CFG_OUTSTANDING_RESERVED_0__MASK; +} +#define SDMA_CFG_OUTSTANDING_WR_OS_CNT__MASK 0x0000ff00 +#define SDMA_CFG_OUTSTANDING_WR_OS_CNT__SHIFT 8 +static inline uint32_t SDMA_CFG_OUTSTANDING_WR_OS_CNT(uint32_t val) +{ + return ((val) << SDMA_CFG_OUTSTANDING_WR_OS_CNT__SHIFT) & SDMA_CFG_OUTSTANDING_WR_OS_CNT__MASK; +} +#define SDMA_CFG_OUTSTANDING_RD_OS_CNT__MASK 0x000000ff +#define SDMA_CFG_OUTSTANDING_RD_OS_CNT__SHIFT 0 +static inline uint32_t SDMA_CFG_OUTSTANDING_RD_OS_CNT(uint32_t val) +{ + return ((val) << SDMA_CFG_OUTSTANDING_RD_OS_CNT__SHIFT) & SDMA_CFG_OUTSTANDING_RD_OS_CNT__MASK; +} + +#define REG_SDMA_RD_WEIGHT_0 0x00009004 +#define SDMA_RD_WEIGHT_0_RD_WEIGHT_PDP__MASK 0xff000000 +#define SDMA_RD_WEIGHT_0_RD_WEIGHT_PDP__SHIFT 24 +static inline uint32_t SDMA_RD_WEIGHT_0_RD_WEIGHT_PDP(uint32_t val) +{ + return ((val) << SDMA_RD_WEIGHT_0_RD_WEIGHT_PDP__SHIFT) & SDMA_RD_WEIGHT_0_RD_WEIGHT_PDP__MASK; +} +#define SDMA_RD_WEIGHT_0_RD_WEIGHT_DPU__MASK 0x00ff0000 +#define SDMA_RD_WEIGHT_0_RD_WEIGHT_DPU__SHIFT 16 +static inline uint32_t SDMA_RD_WEIGHT_0_RD_WEIGHT_DPU(uint32_t val) +{ + return ((val) << SDMA_RD_WEIGHT_0_RD_WEIGHT_DPU__SHIFT) & SDMA_RD_WEIGHT_0_RD_WEIGHT_DPU__MASK; +} +#define SDMA_RD_WEIGHT_0_RD_WEIGHT_KERNEL__MASK 0x0000ff00 +#define SDMA_RD_WEIGHT_0_RD_WEIGHT_KERNEL__SHIFT 8 +static inline uint32_t SDMA_RD_WEIGHT_0_RD_WEIGHT_KERNEL(uint32_t val) +{ + return ((val) << SDMA_RD_WEIGHT_0_RD_WEIGHT_KERNEL__SHIFT) & SDMA_RD_WEIGHT_0_RD_WEIGHT_KERNEL__MASK; +} +#define SDMA_RD_WEIGHT_0_RD_WEIGHT_FEATURE__MASK 0x000000ff +#define SDMA_RD_WEIGHT_0_RD_WEIGHT_FEATURE__SHIFT 0 +static inline uint32_t SDMA_RD_WEIGHT_0_RD_WEIGHT_FEATURE(uint32_t val) +{ + return ((val) << SDMA_RD_WEIGHT_0_RD_WEIGHT_FEATURE__SHIFT) & SDMA_RD_WEIGHT_0_RD_WEIGHT_FEATURE__MASK; +} + +#define REG_SDMA_WR_WEIGHT_0 0x00009008 +#define SDMA_WR_WEIGHT_0_RESERVED_0__MASK 0xffff0000 +#define SDMA_WR_WEIGHT_0_RESERVED_0__SHIFT 16 +static inline uint32_t SDMA_WR_WEIGHT_0_RESERVED_0(uint32_t val) +{ + return ((val) << SDMA_WR_WEIGHT_0_RESERVED_0__SHIFT) & SDMA_WR_WEIGHT_0_RESERVED_0__MASK; +} +#define SDMA_WR_WEIGHT_0_WR_WEIGHT_PDP__MASK 0x0000ff00 +#define SDMA_WR_WEIGHT_0_WR_WEIGHT_PDP__SHIFT 8 +static inline uint32_t SDMA_WR_WEIGHT_0_WR_WEIGHT_PDP(uint32_t val) +{ + return ((val) << SDMA_WR_WEIGHT_0_WR_WEIGHT_PDP__SHIFT) & SDMA_WR_WEIGHT_0_WR_WEIGHT_PDP__MASK; +} +#define SDMA_WR_WEIGHT_0_WR_WEIGHT_DPU__MASK 0x000000ff +#define SDMA_WR_WEIGHT_0_WR_WEIGHT_DPU__SHIFT 0 +static inline uint32_t SDMA_WR_WEIGHT_0_WR_WEIGHT_DPU(uint32_t val) +{ + return ((val) << SDMA_WR_WEIGHT_0_WR_WEIGHT_DPU__SHIFT) & SDMA_WR_WEIGHT_0_WR_WEIGHT_DPU__MASK; +} + +#define REG_SDMA_CFG_ID_ERROR 0x0000900c +#define SDMA_CFG_ID_ERROR_RESERVED_0__MASK 0xfffffc00 +#define SDMA_CFG_ID_ERROR_RESERVED_0__SHIFT 10 +static inline uint32_t SDMA_CFG_ID_ERROR_RESERVED_0(uint32_t val) +{ + return ((val) << SDMA_CFG_ID_ERROR_RESERVED_0__SHIFT) & SDMA_CFG_ID_ERROR_RESERVED_0__MASK; +} +#define SDMA_CFG_ID_ERROR_WR_RESP_ID__MASK 0x000003c0 +#define SDMA_CFG_ID_ERROR_WR_RESP_ID__SHIFT 6 +static inline uint32_t SDMA_CFG_ID_ERROR_WR_RESP_ID(uint32_t val) +{ + return ((val) << SDMA_CFG_ID_ERROR_WR_RESP_ID__SHIFT) & SDMA_CFG_ID_ERROR_WR_RESP_ID__MASK; +} +#define SDMA_CFG_ID_ERROR_RESERVED_1__MASK 0x00000020 +#define SDMA_CFG_ID_ERROR_RESERVED_1__SHIFT 5 +static inline uint32_t SDMA_CFG_ID_ERROR_RESERVED_1(uint32_t val) +{ + return ((val) << SDMA_CFG_ID_ERROR_RESERVED_1__SHIFT) & SDMA_CFG_ID_ERROR_RESERVED_1__MASK; +} +#define SDMA_CFG_ID_ERROR_RD_RESP_ID__MASK 0x0000001f +#define SDMA_CFG_ID_ERROR_RD_RESP_ID__SHIFT 0 +static inline uint32_t SDMA_CFG_ID_ERROR_RD_RESP_ID(uint32_t val) +{ + return ((val) << SDMA_CFG_ID_ERROR_RD_RESP_ID__SHIFT) & SDMA_CFG_ID_ERROR_RD_RESP_ID__MASK; +} + +#define REG_SDMA_RD_WEIGHT_1 0x00009010 +#define SDMA_RD_WEIGHT_1_RESERVED_0__MASK 0xffffff00 +#define SDMA_RD_WEIGHT_1_RESERVED_0__SHIFT 8 +static inline uint32_t SDMA_RD_WEIGHT_1_RESERVED_0(uint32_t val) +{ + return ((val) << SDMA_RD_WEIGHT_1_RESERVED_0__SHIFT) & SDMA_RD_WEIGHT_1_RESERVED_0__MASK; +} +#define SDMA_RD_WEIGHT_1_RD_WEIGHT_PC__MASK 0x000000ff +#define SDMA_RD_WEIGHT_1_RD_WEIGHT_PC__SHIFT 0 +static inline uint32_t SDMA_RD_WEIGHT_1_RD_WEIGHT_PC(uint32_t val) +{ + return ((val) << SDMA_RD_WEIGHT_1_RD_WEIGHT_PC__SHIFT) & SDMA_RD_WEIGHT_1_RD_WEIGHT_PC__MASK; +} + +#define REG_SDMA_CFG_DMA_FIFO_CLR 0x00009014 +#define SDMA_CFG_DMA_FIFO_CLR_RESERVED_0__MASK 0xfffffffe +#define SDMA_CFG_DMA_FIFO_CLR_RESERVED_0__SHIFT 1 +static inline uint32_t SDMA_CFG_DMA_FIFO_CLR_RESERVED_0(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_FIFO_CLR_RESERVED_0__SHIFT) & SDMA_CFG_DMA_FIFO_CLR_RESERVED_0__MASK; +} +#define SDMA_CFG_DMA_FIFO_CLR_DMA_FIFO_CLR__MASK 0x00000001 +#define SDMA_CFG_DMA_FIFO_CLR_DMA_FIFO_CLR__SHIFT 0 +static inline uint32_t SDMA_CFG_DMA_FIFO_CLR_DMA_FIFO_CLR(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_FIFO_CLR_DMA_FIFO_CLR__SHIFT) & SDMA_CFG_DMA_FIFO_CLR_DMA_FIFO_CLR__MASK; +} + +#define REG_SDMA_CFG_DMA_ARB 0x00009018 +#define SDMA_CFG_DMA_ARB_RESERVED_0__MASK 0xfffffc00 +#define SDMA_CFG_DMA_ARB_RESERVED_0__SHIFT 10 +static inline uint32_t SDMA_CFG_DMA_ARB_RESERVED_0(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_ARB_RESERVED_0__SHIFT) & SDMA_CFG_DMA_ARB_RESERVED_0__MASK; +} +#define SDMA_CFG_DMA_ARB_WR_ARBIT_MODEL__MASK 0x00000200 +#define SDMA_CFG_DMA_ARB_WR_ARBIT_MODEL__SHIFT 9 +static inline uint32_t SDMA_CFG_DMA_ARB_WR_ARBIT_MODEL(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_ARB_WR_ARBIT_MODEL__SHIFT) & SDMA_CFG_DMA_ARB_WR_ARBIT_MODEL__MASK; +} +#define SDMA_CFG_DMA_ARB_RD_ARBIT_MODEL__MASK 0x00000100 +#define SDMA_CFG_DMA_ARB_RD_ARBIT_MODEL__SHIFT 8 +static inline uint32_t SDMA_CFG_DMA_ARB_RD_ARBIT_MODEL(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_ARB_RD_ARBIT_MODEL__SHIFT) & SDMA_CFG_DMA_ARB_RD_ARBIT_MODEL__MASK; +} +#define SDMA_CFG_DMA_ARB_RESERVED_1__MASK 0x00000080 +#define SDMA_CFG_DMA_ARB_RESERVED_1__SHIFT 7 +static inline uint32_t SDMA_CFG_DMA_ARB_RESERVED_1(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_ARB_RESERVED_1__SHIFT) & SDMA_CFG_DMA_ARB_RESERVED_1__MASK; +} +#define SDMA_CFG_DMA_ARB_WR_FIX_ARB__MASK 0x00000070 +#define SDMA_CFG_DMA_ARB_WR_FIX_ARB__SHIFT 4 +static inline uint32_t SDMA_CFG_DMA_ARB_WR_FIX_ARB(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_ARB_WR_FIX_ARB__SHIFT) & SDMA_CFG_DMA_ARB_WR_FIX_ARB__MASK; +} +#define SDMA_CFG_DMA_ARB_RESERVED_2__MASK 0x00000008 +#define SDMA_CFG_DMA_ARB_RESERVED_2__SHIFT 3 +static inline uint32_t SDMA_CFG_DMA_ARB_RESERVED_2(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_ARB_RESERVED_2__SHIFT) & SDMA_CFG_DMA_ARB_RESERVED_2__MASK; +} +#define SDMA_CFG_DMA_ARB_RD_FIX_ARB__MASK 0x00000007 +#define SDMA_CFG_DMA_ARB_RD_FIX_ARB__SHIFT 0 +static inline uint32_t SDMA_CFG_DMA_ARB_RD_FIX_ARB(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_ARB_RD_FIX_ARB__SHIFT) & SDMA_CFG_DMA_ARB_RD_FIX_ARB__MASK; +} + +#define REG_SDMA_CFG_DMA_RD_QOS 0x00009020 +#define SDMA_CFG_DMA_RD_QOS_RESERVED_0__MASK 0xfffffc00 +#define SDMA_CFG_DMA_RD_QOS_RESERVED_0__SHIFT 10 +static inline uint32_t SDMA_CFG_DMA_RD_QOS_RESERVED_0(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_RD_QOS_RESERVED_0__SHIFT) & SDMA_CFG_DMA_RD_QOS_RESERVED_0__MASK; +} +#define SDMA_CFG_DMA_RD_QOS_RD_PC_QOS__MASK 0x00000300 +#define SDMA_CFG_DMA_RD_QOS_RD_PC_QOS__SHIFT 8 +static inline uint32_t SDMA_CFG_DMA_RD_QOS_RD_PC_QOS(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_RD_QOS_RD_PC_QOS__SHIFT) & SDMA_CFG_DMA_RD_QOS_RD_PC_QOS__MASK; +} +#define SDMA_CFG_DMA_RD_QOS_RD_PPU_QOS__MASK 0x000000c0 +#define SDMA_CFG_DMA_RD_QOS_RD_PPU_QOS__SHIFT 6 +static inline uint32_t SDMA_CFG_DMA_RD_QOS_RD_PPU_QOS(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_RD_QOS_RD_PPU_QOS__SHIFT) & SDMA_CFG_DMA_RD_QOS_RD_PPU_QOS__MASK; +} +#define SDMA_CFG_DMA_RD_QOS_RD_DPU_QOS__MASK 0x00000030 +#define SDMA_CFG_DMA_RD_QOS_RD_DPU_QOS__SHIFT 4 +static inline uint32_t SDMA_CFG_DMA_RD_QOS_RD_DPU_QOS(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_RD_QOS_RD_DPU_QOS__SHIFT) & SDMA_CFG_DMA_RD_QOS_RD_DPU_QOS__MASK; +} +#define SDMA_CFG_DMA_RD_QOS_RD_KERNEL_QOS__MASK 0x0000000c +#define SDMA_CFG_DMA_RD_QOS_RD_KERNEL_QOS__SHIFT 2 +static inline uint32_t SDMA_CFG_DMA_RD_QOS_RD_KERNEL_QOS(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_RD_QOS_RD_KERNEL_QOS__SHIFT) & SDMA_CFG_DMA_RD_QOS_RD_KERNEL_QOS__MASK; +} +#define SDMA_CFG_DMA_RD_QOS_RD_FEATURE_QOS__MASK 0x00000003 +#define SDMA_CFG_DMA_RD_QOS_RD_FEATURE_QOS__SHIFT 0 +static inline uint32_t SDMA_CFG_DMA_RD_QOS_RD_FEATURE_QOS(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_RD_QOS_RD_FEATURE_QOS__SHIFT) & SDMA_CFG_DMA_RD_QOS_RD_FEATURE_QOS__MASK; +} + +#define REG_SDMA_CFG_DMA_RD_CFG 0x00009024 +#define SDMA_CFG_DMA_RD_CFG_RESERVED_0__MASK 0xffffe000 +#define SDMA_CFG_DMA_RD_CFG_RESERVED_0__SHIFT 13 +static inline uint32_t SDMA_CFG_DMA_RD_CFG_RESERVED_0(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_RD_CFG_RESERVED_0__SHIFT) & SDMA_CFG_DMA_RD_CFG_RESERVED_0__MASK; +} +#define SDMA_CFG_DMA_RD_CFG_RD_ARLOCK__MASK 0x00001000 +#define SDMA_CFG_DMA_RD_CFG_RD_ARLOCK__SHIFT 12 +static inline uint32_t SDMA_CFG_DMA_RD_CFG_RD_ARLOCK(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_RD_CFG_RD_ARLOCK__SHIFT) & SDMA_CFG_DMA_RD_CFG_RD_ARLOCK__MASK; +} +#define SDMA_CFG_DMA_RD_CFG_RD_ARCACHE__MASK 0x00000f00 +#define SDMA_CFG_DMA_RD_CFG_RD_ARCACHE__SHIFT 8 +static inline uint32_t SDMA_CFG_DMA_RD_CFG_RD_ARCACHE(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_RD_CFG_RD_ARCACHE__SHIFT) & SDMA_CFG_DMA_RD_CFG_RD_ARCACHE__MASK; +} +#define SDMA_CFG_DMA_RD_CFG_RD_ARPROT__MASK 0x000000e0 +#define SDMA_CFG_DMA_RD_CFG_RD_ARPROT__SHIFT 5 +static inline uint32_t SDMA_CFG_DMA_RD_CFG_RD_ARPROT(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_RD_CFG_RD_ARPROT__SHIFT) & SDMA_CFG_DMA_RD_CFG_RD_ARPROT__MASK; +} +#define SDMA_CFG_DMA_RD_CFG_RD_ARBURST__MASK 0x00000018 +#define SDMA_CFG_DMA_RD_CFG_RD_ARBURST__SHIFT 3 +static inline uint32_t SDMA_CFG_DMA_RD_CFG_RD_ARBURST(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_RD_CFG_RD_ARBURST__SHIFT) & SDMA_CFG_DMA_RD_CFG_RD_ARBURST__MASK; +} +#define SDMA_CFG_DMA_RD_CFG_RD_ARSIZE__MASK 0x00000007 +#define SDMA_CFG_DMA_RD_CFG_RD_ARSIZE__SHIFT 0 +static inline uint32_t SDMA_CFG_DMA_RD_CFG_RD_ARSIZE(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_RD_CFG_RD_ARSIZE__SHIFT) & SDMA_CFG_DMA_RD_CFG_RD_ARSIZE__MASK; +} + +#define REG_SDMA_CFG_DMA_WR_CFG 0x00009028 +#define SDMA_CFG_DMA_WR_CFG_RESERVED_0__MASK 0xffffe000 +#define SDMA_CFG_DMA_WR_CFG_RESERVED_0__SHIFT 13 +static inline uint32_t SDMA_CFG_DMA_WR_CFG_RESERVED_0(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_WR_CFG_RESERVED_0__SHIFT) & SDMA_CFG_DMA_WR_CFG_RESERVED_0__MASK; +} +#define SDMA_CFG_DMA_WR_CFG_WR_AWLOCK__MASK 0x00001000 +#define SDMA_CFG_DMA_WR_CFG_WR_AWLOCK__SHIFT 12 +static inline uint32_t SDMA_CFG_DMA_WR_CFG_WR_AWLOCK(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_WR_CFG_WR_AWLOCK__SHIFT) & SDMA_CFG_DMA_WR_CFG_WR_AWLOCK__MASK; +} +#define SDMA_CFG_DMA_WR_CFG_WR_AWCACHE__MASK 0x00000f00 +#define SDMA_CFG_DMA_WR_CFG_WR_AWCACHE__SHIFT 8 +static inline uint32_t SDMA_CFG_DMA_WR_CFG_WR_AWCACHE(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_WR_CFG_WR_AWCACHE__SHIFT) & SDMA_CFG_DMA_WR_CFG_WR_AWCACHE__MASK; +} +#define SDMA_CFG_DMA_WR_CFG_WR_AWPROT__MASK 0x000000e0 +#define SDMA_CFG_DMA_WR_CFG_WR_AWPROT__SHIFT 5 +static inline uint32_t SDMA_CFG_DMA_WR_CFG_WR_AWPROT(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_WR_CFG_WR_AWPROT__SHIFT) & SDMA_CFG_DMA_WR_CFG_WR_AWPROT__MASK; +} +#define SDMA_CFG_DMA_WR_CFG_WR_AWBURST__MASK 0x00000018 +#define SDMA_CFG_DMA_WR_CFG_WR_AWBURST__SHIFT 3 +static inline uint32_t SDMA_CFG_DMA_WR_CFG_WR_AWBURST(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_WR_CFG_WR_AWBURST__SHIFT) & SDMA_CFG_DMA_WR_CFG_WR_AWBURST__MASK; +} +#define SDMA_CFG_DMA_WR_CFG_WR_AWSIZE__MASK 0x00000007 +#define SDMA_CFG_DMA_WR_CFG_WR_AWSIZE__SHIFT 0 +static inline uint32_t SDMA_CFG_DMA_WR_CFG_WR_AWSIZE(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_WR_CFG_WR_AWSIZE__SHIFT) & SDMA_CFG_DMA_WR_CFG_WR_AWSIZE__MASK; +} + +#define REG_SDMA_CFG_DMA_WSTRB 0x0000902c +#define SDMA_CFG_DMA_WSTRB_WR_WSTRB__MASK 0xffffffff +#define SDMA_CFG_DMA_WSTRB_WR_WSTRB__SHIFT 0 +static inline uint32_t SDMA_CFG_DMA_WSTRB_WR_WSTRB(uint32_t val) +{ + return ((val) << SDMA_CFG_DMA_WSTRB_WR_WSTRB__SHIFT) & SDMA_CFG_DMA_WSTRB_WR_WSTRB__MASK; +} + +#define REG_SDMA_CFG_STATUS 0x00009030 +#define SDMA_CFG_STATUS_RESERVED_0__MASK 0xfffffe00 +#define SDMA_CFG_STATUS_RESERVED_0__SHIFT 9 +static inline uint32_t SDMA_CFG_STATUS_RESERVED_0(uint32_t val) +{ + return ((val) << SDMA_CFG_STATUS_RESERVED_0__SHIFT) & SDMA_CFG_STATUS_RESERVED_0__MASK; +} +#define SDMA_CFG_STATUS_IDEL__MASK 0x00000100 +#define SDMA_CFG_STATUS_IDEL__SHIFT 8 +static inline uint32_t SDMA_CFG_STATUS_IDEL(uint32_t val) +{ + return ((val) << SDMA_CFG_STATUS_IDEL__SHIFT) & SDMA_CFG_STATUS_IDEL__MASK; +} +#define SDMA_CFG_STATUS_RESERVED_1__MASK 0x000000ff +#define SDMA_CFG_STATUS_RESERVED_1__SHIFT 0 +static inline uint32_t SDMA_CFG_STATUS_RESERVED_1(uint32_t val) +{ + return ((val) << SDMA_CFG_STATUS_RESERVED_1__SHIFT) & SDMA_CFG_STATUS_RESERVED_1__MASK; +} + +#define REG_GLOBAL_OPERATION_ENABLE 0x0000f008 +#define GLOBAL_OPERATION_ENABLE_RESERVED_0__MASK 0xffffff80 +#define GLOBAL_OPERATION_ENABLE_RESERVED_0__SHIFT 7 +static inline uint32_t GLOBAL_OPERATION_ENABLE_RESERVED_0(uint32_t val) +{ + return ((val) << GLOBAL_OPERATION_ENABLE_RESERVED_0__SHIFT) & GLOBAL_OPERATION_ENABLE_RESERVED_0__MASK; +} +#define GLOBAL_OPERATION_ENABLE_PPU_RDMA_OP_EN__MASK 0x00000040 +#define GLOBAL_OPERATION_ENABLE_PPU_RDMA_OP_EN__SHIFT 6 +static inline uint32_t GLOBAL_OPERATION_ENABLE_PPU_RDMA_OP_EN(uint32_t val) +{ + return ((val) << GLOBAL_OPERATION_ENABLE_PPU_RDMA_OP_EN__SHIFT) & GLOBAL_OPERATION_ENABLE_PPU_RDMA_OP_EN__MASK; +} +#define GLOBAL_OPERATION_ENABLE_PPU_OP_EN__MASK 0x00000020 +#define GLOBAL_OPERATION_ENABLE_PPU_OP_EN__SHIFT 5 +static inline uint32_t GLOBAL_OPERATION_ENABLE_PPU_OP_EN(uint32_t val) +{ + return ((val) << GLOBAL_OPERATION_ENABLE_PPU_OP_EN__SHIFT) & GLOBAL_OPERATION_ENABLE_PPU_OP_EN__MASK; +} +#define GLOBAL_OPERATION_ENABLE_DPU_RDMA_OP_EN__MASK 0x00000010 +#define GLOBAL_OPERATION_ENABLE_DPU_RDMA_OP_EN__SHIFT 4 +static inline uint32_t GLOBAL_OPERATION_ENABLE_DPU_RDMA_OP_EN(uint32_t val) +{ + return ((val) << GLOBAL_OPERATION_ENABLE_DPU_RDMA_OP_EN__SHIFT) & GLOBAL_OPERATION_ENABLE_DPU_RDMA_OP_EN__MASK; +} +#define GLOBAL_OPERATION_ENABLE_DPU_OP_EN__MASK 0x00000008 +#define GLOBAL_OPERATION_ENABLE_DPU_OP_EN__SHIFT 3 +static inline uint32_t GLOBAL_OPERATION_ENABLE_DPU_OP_EN(uint32_t val) +{ + return ((val) << GLOBAL_OPERATION_ENABLE_DPU_OP_EN__SHIFT) & GLOBAL_OPERATION_ENABLE_DPU_OP_EN__MASK; +} +#define GLOBAL_OPERATION_ENABLE_CORE_OP_EN__MASK 0x00000004 +#define GLOBAL_OPERATION_ENABLE_CORE_OP_EN__SHIFT 2 +static inline uint32_t GLOBAL_OPERATION_ENABLE_CORE_OP_EN(uint32_t val) +{ + return ((val) << GLOBAL_OPERATION_ENABLE_CORE_OP_EN__SHIFT) & GLOBAL_OPERATION_ENABLE_CORE_OP_EN__MASK; +} +#define GLOBAL_OPERATION_ENABLE_RESERVED_1__MASK 0x00000002 +#define GLOBAL_OPERATION_ENABLE_RESERVED_1__SHIFT 1 +static inline uint32_t GLOBAL_OPERATION_ENABLE_RESERVED_1(uint32_t val) +{ + return ((val) << GLOBAL_OPERATION_ENABLE_RESERVED_1__SHIFT) & GLOBAL_OPERATION_ENABLE_RESERVED_1__MASK; +} +#define GLOBAL_OPERATION_ENABLE_CNA_OP_EN__MASK 0x00000001 +#define GLOBAL_OPERATION_ENABLE_CNA_OP_EN__SHIFT 0 +static inline uint32_t GLOBAL_OPERATION_ENABLE_CNA_OP_EN(uint32_t val) +{ + return ((val) << GLOBAL_OPERATION_ENABLE_CNA_OP_EN__SHIFT) & GLOBAL_OPERATION_ENABLE_CNA_OP_EN__MASK; +} + +#endif /* __ROCKET_REGISTERS_XML__ */ |
