515.49.14

This commit is contained in:
Liam Middlebrook 2022-08-19 12:23:00 -07:00
parent 54b4babe66
commit f848abf44e
No known key found for this signature in database
GPG Key ID: CB7884892194D6D2
11 changed files with 367 additions and 28 deletions

View File

@ -1,7 +1,7 @@
# NVIDIA Linux Open GPU Kernel Module Source
This is the source release of the NVIDIA Linux open GPU kernel modules,
version 515.49.10.
version 515.49.14.
## How to Build
@ -17,7 +17,7 @@ as root:
Note that the kernel modules built here must be used with gsp.bin
firmware and user-space NVIDIA GPU driver components from a corresponding
515.49.10 driver release. This can be achieved by installing
515.49.14 driver release. This can be achieved by installing
the NVIDIA GPU driver from the .run file using the `--no-kernel-modules`
option. E.g.,
@ -167,7 +167,7 @@ for the target kernel.
## Compatible GPUs
The open-gpu-kernel-modules can be used on any Turing or later GPU
(see the table below). However, in the 515.49.10 release,
(see the table below). However, in the 515.49.14 release,
GeForce and Workstation support is still considered alpha-quality.
To enable use of the open kernel modules on GeForce and Workstation GPUs,
@ -175,7 +175,7 @@ set the "NVreg_OpenRmEnableUnsupportedGpus" nvidia.ko kernel module
parameter to 1. For more details, see the NVIDIA GPU driver end user
README here:
https://us.download.nvidia.com/XFree86/Linux-x86_64/515.49.10/README/kernel_open.html
https://us.download.nvidia.com/XFree86/Linux-x86_64/515.49.14/README/kernel_open.html
In the below table, if three IDs are listed, the first is the PCI Device
ID, the second is the PCI Subsystem Vendor ID, and the third is the PCI

View File

@ -72,7 +72,7 @@ EXTRA_CFLAGS += -I$(src)/common/inc
EXTRA_CFLAGS += -I$(src)
EXTRA_CFLAGS += -Wall -MD $(DEFINES) $(INCLUDES) -Wno-cast-qual -Wno-error -Wno-format-extra-args
EXTRA_CFLAGS += -D__KERNEL__ -DMODULE -DNVRM
EXTRA_CFLAGS += -DNV_VERSION_STRING=\"515.49.10\"
EXTRA_CFLAGS += -DNV_VERSION_STRING=\"515.49.14\"
EXTRA_CFLAGS += -Wno-unused-function

View File

@ -43,18 +43,18 @@
#endif
#if defined(NV_LINUX) || defined(NV_BSD) || defined(NV_SUNOS)
#define NV_BUILD_BRANCH_VERSION "rel/gpu_drv/r515/VK516_10-209"
#define NV_BUILD_CHANGELIST_NUM (31587802)
#define NV_BUILD_BRANCH_VERSION "rel/gpu_drv/r515/VK516_10-213"
#define NV_BUILD_CHANGELIST_NUM (31711086)
#define NV_BUILD_TYPE "Official"
#define NV_BUILD_NAME "rel/gpu_drv/r515/VK516_10-209"
#define NV_LAST_OFFICIAL_CHANGELIST_NUM (31587802)
#define NV_BUILD_NAME "rel/gpu_drv/r515/VK516_10-213"
#define NV_LAST_OFFICIAL_CHANGELIST_NUM (31711086)
#else /* Windows builds */
#define NV_BUILD_BRANCH_VERSION "VK516_10-10"
#define NV_BUILD_CHANGELIST_NUM (31587802)
#define NV_BUILD_BRANCH_VERSION "VK516_10-14"
#define NV_BUILD_CHANGELIST_NUM (31711086)
#define NV_BUILD_TYPE "Official"
#define NV_BUILD_NAME "516.89"
#define NV_LAST_OFFICIAL_CHANGELIST_NUM (31587802)
#define NV_BUILD_NAME "517.11"
#define NV_LAST_OFFICIAL_CHANGELIST_NUM (31711086)
#define NV_BUILD_BRANCH_BASE_VERSION R515
#endif
// End buildmeister python edited section

View File

@ -4,7 +4,7 @@
#if defined(NV_LINUX) || defined(NV_BSD) || defined(NV_SUNOS) || defined(NV_VMWARE) || defined(NV_QNX) || defined(NV_INTEGRITY) || \
(defined(RMCFG_FEATURE_PLATFORM_GSP) && RMCFG_FEATURE_PLATFORM_GSP == 1)
#define NV_VERSION_STRING "515.49.10"
#define NV_VERSION_STRING "515.49.14"
#else

View File

@ -43,6 +43,14 @@ typedef NvBool (*NVShutDownHeadsTestFunc)(
void nvShutDownHeads(NVDevEvoPtr pDevEvo, NVShutDownHeadsTestFunc pTestFunc);
NVVBlankCallbackPtr nvRegisterVBlankCallback(NVDispEvoPtr pDispEvo,
NvU32 head,
NVVBlankCallbackProc pCallback,
void *pUserData);
void nvUnregisterVBlankCallback(NVDispEvoPtr pDispEvo,
NvU32 head,
NVVBlankCallbackPtr pCallback);
#ifdef __cplusplus
};
#endif

View File

@ -134,6 +134,7 @@ typedef struct _NVVblankSyncObjectRec *NVVblankSyncObjectPtr;
typedef struct _NVDispHeadStateEvoRec *NVDispHeadStateEvoPtr;
typedef struct _NVDispEvoRec *NVDispEvoPtr;
typedef struct _NVParsedEdidEvoRec *NVParsedEdidEvoPtr;
typedef struct _NVVBlankCallbackRec *NVVBlankCallbackPtr;
typedef struct _NVDpyEvoRec *NVDpyEvoPtr;
typedef struct _NVLutSurfaceEvo *NVLutSurfaceEvoPtr;
typedef struct _NVFrameLockEvo *NVFrameLockEvoPtr;
@ -1567,6 +1568,9 @@ typedef struct _NVDispHeadStateEvoRec {
NvU8 numVblankSyncObjectsCreated;
NVVblankSyncObjectRec vblankSyncObjects[NVKMS_MAX_VBLANK_SYNC_OBJECTS_PER_HEAD];
NVDispHeadAudioStateEvoRec audio;
NvU32 rmVBlankCallbackHandle;
NVListRec vblankCallbackList;
} NVDispHeadStateEvoRec;
typedef struct _NVDispEvoRec {
@ -1682,6 +1686,16 @@ typedef struct _NVParsedEdidEvoRec {
char serialNumberString[NVT_EDID_LDD_PAYLOAD_SIZE+1];
} NVParsedEdidEvoRec;
typedef void (*NVVBlankCallbackProc)(NVDispEvoRec *pDispEvo,
const NvU32 head,
NVVBlankCallbackPtr pCallbackData);
typedef struct _NVVBlankCallbackRec {
NVListRec vblankCallbackListEntry;
NVVBlankCallbackProc pCallback;
void *pUserData;
} NVVBlankCallbackRec;
typedef struct _NVDpyEvoRec {
NVListRec dpyListEntry;
NVDpyId id;

View File

@ -270,6 +270,7 @@ enum NvKmsIoctlCommand {
NVKMS_IOCTL_EXPORT_VRR_SEMAPHORE_SURFACE,
NVKMS_IOCTL_ENABLE_VBLANK_SYNC_OBJECT,
NVKMS_IOCTL_DISABLE_VBLANK_SYNC_OBJECT,
NVKMS_IOCTL_NOTIFY_VBLANK,
};
@ -4060,4 +4061,32 @@ struct NvKmsDisableVblankSyncObjectParams {
struct NvKmsDisableVblankSyncObjectReply reply; /*! out */
};
/*!
* NVKMS_IOCTL_NOTIFY_VBLANK:
*
* Register a unicast event fd to be notified when the next vblank event occurs
* on the specified head. This is a one-shot notification, and in order to be
* notified of subsequent vblank events the caller must clear and re-register
* the unicast event fd.
*/
struct NvKmsNotifyVblankRequest {
NvKmsDeviceHandle deviceHandle;
NvKmsDispHandle dispHandle;
NvU32 head;
struct {
int fd;
} unicastEvent;
};
struct NvKmsNotifyVblankReply {
NvU32 padding;
};
struct NvKmsNotifyVblankParams {
struct NvKmsNotifyVblankRequest request; /*! in */
struct NvKmsNotifyVblankReply reply; /*! out */
};
#endif /* NVKMS_API_H */

View File

@ -932,7 +932,35 @@ tryAgain:
return bResult;
}
static void VBlankCallbackDeferredWork(void *dataPtr, NvU32 data32)
{
NVVBlankCallbackPtr pVBlankCallbackTmp = NULL;
NVVBlankCallbackPtr pVBlankCallback = NULL;
NVDispEvoPtr pDispEvo = dataPtr;
NvU32 head = data32;
if (!nvHeadIsActive(pDispEvo, head)) {
return;
}
nvListForEachEntry_safe(pVBlankCallback,
pVBlankCallbackTmp,
&pDispEvo->headState[head].vblankCallbackList,
vblankCallbackListEntry) {
pVBlankCallback->pCallback(pDispEvo, head, pVBlankCallback);
}
}
static void VBlankCallback(void *pParam1, void *pParam2)
{
const NvU32 head = (NvU32)(NvUPtr)pParam2;
(void) nvkms_alloc_timer_with_ref_ptr(
VBlankCallbackDeferredWork,
pParam1, /* ref_ptr to pDispEvo */
head, /* dataU32 */
0); /* timeout: schedule the work immediately */
}
/*!
* Validate the proposed configuration on the specified disp.
@ -2625,6 +2653,71 @@ done:
return ret;
}
/*!
* Register a callback to activate when vblank is reached on a given head.
*
* \param[in,out] pDispEvo The display engine to register the callback on.
* \param[in] head The head to register the callback on.
* \param[in] pCallback The function to call when vblank is reached on the
* provided pDispEvo+head combination.
* \param[in] pUserData A pointer to caller-provided custom data.
*
* \return Returns a pointer to a NVVBlankCallbackRec structure if the
* registration was successful. Otherwise, return NULL.
*/
NVVBlankCallbackPtr nvRegisterVBlankCallback(NVDispEvoPtr pDispEvo,
NvU32 head,
NVVBlankCallbackProc pCallback,
void *pUserData)
{
NVVBlankCallbackPtr pVBlankCallback = NULL;
pVBlankCallback = nvCalloc(1, sizeof(*pVBlankCallback));
if (pVBlankCallback == NULL) {
return NULL;
}
pVBlankCallback->pCallback = pCallback;
pVBlankCallback->pUserData = pUserData;
nvListAppend(&pVBlankCallback->vblankCallbackListEntry,
&pDispEvo->headState[head].vblankCallbackList);
// If this is the first entry in the list, register the vblank callback
if (pDispEvo->headState[head].rmVBlankCallbackHandle == 0) {
pDispEvo->headState[head].rmVBlankCallbackHandle =
nvRmAddVBlankCallback(pDispEvo,
head,
VBlankCallback);
}
return pVBlankCallback;
}
/*!
* Un-register a vblank callback for a given head.
*
* \param[in,out] pDispEvo The display engine to register the callback on.
* \param[in] head The head to register the callback on.
* \param[in] pCallback A pointer to the NVVBlankCallbackRec to un-register.
*
*/
void nvUnregisterVBlankCallback(NVDispEvoPtr pDispEvo,
NvU32 head,
NVVBlankCallbackPtr pCallback)
{
nvListDel(&pCallback->vblankCallbackListEntry);
nvFree(pCallback);
// If there are no more callbacks, disable the RM-level callback
if (nvListIsEmpty(&pDispEvo->headState[head].vblankCallbackList)) {
nvRmRemoveVBlankCallback(pDispEvo,
pDispEvo->headState[head].rmVBlankCallbackHandle);
pDispEvo->headState[head].rmVBlankCallbackHandle = 0;
}
}
/*!
* Perform a modeset that disables some or all heads.
*

View File

@ -238,6 +238,7 @@ static void FreeDisplay(NVDispEvoPtr pDispEvo)
for (head = 0; head < ARRAY_LEN(pDispEvo->pSwapGroup); head++) {
nvAssert(pDispEvo->pSwapGroup[head] == NULL);
nvAssert(nvListIsEmpty(&pDispEvo->headState[head].vblankCallbackList));
}
nvAssert(nvListIsEmpty(&pDispEvo->dpyList));
@ -269,6 +270,7 @@ static inline NVDispEvoPtr AllocDisplay(NVDevEvoPtr pDevEvo)
for (head = 0; head < ARRAY_LEN(pDispEvo->headState); head++) {
pDispEvo->headState[head].activeDpys = nvEmptyDpyIdList();
pDispEvo->headState[head].attributes = NV_EVO_DEFAULT_ATTRIBUTES_SET;
nvListInit(&pDispEvo->headState[head].vblankCallbackList);
}
pDispEvo->ref_ptr = nvkms_alloc_ref_ptr(pDispEvo);

View File

@ -110,6 +110,19 @@ enum NvKmsPerOpenType {
NvKmsPerOpenTypeUndefined,
};
enum NvKmsUnicastEventType {
/* Used by:
* NVKMS_IOCTL_JOIN_SWAP_GROUP */
NvKmsUnicastEventTypeDeferredRequest,
/* Used by:
* NVKMS_IOCTL_NOTIFY_VBLANK */
NvKmsUnicastEventTypeVblankNotification,
/* Undefined, this indicates the unicast fd is available for use. */
NvKmsUnicastEventTypeUndefined,
};
struct NvKmsPerOpenConnector {
NVConnectorEvoPtr pConnectorEvo;
NvKmsConnectorHandle nvKmsApiHandle;
@ -128,6 +141,7 @@ struct NvKmsPerOpenDisp {
NVEvoApiHandlesRec connectorHandles;
struct NvKmsPerOpenConnector connector[NVKMS_MAX_CONNECTORS_PER_DISP];
NVEvoApiHandlesRec vblankSyncObjectHandles[NVKMS_MAX_HEADS_PER_DISP];
NVEvoApiHandlesRec vblankCallbackHandles[NVKMS_MAX_HEADS_PER_DISP];
};
struct NvKmsPerOpenDev {
@ -183,13 +197,19 @@ struct NvKmsPerOpen {
* that object can generate events on the unicast event. Store a
* pointer to that object, so that we can clear the pointer when the
* unicast event NvKmsPerOpen is closed.
*
* So far, deferred request fifos with swap groups are the only
* users of unicast events. When we add more users, we can add an
* enum or similar to know which object type is using this unicast
* event.
*/
NVDeferredRequestFifoPtr pDeferredRequestFifo;
enum NvKmsUnicastEventType type;
union {
struct {
NVDeferredRequestFifoPtr pDeferredRequestFifo;
} deferred;
struct {
NvKmsGenericHandle hCallback;
struct NvKmsPerOpenDisp *pOpenDisp;
NvU32 head;
} vblankNotification;
} e;
} unicastEvent;
};
};
@ -647,6 +667,9 @@ static void ClearPerOpenDisp(
struct NvKmsPerOpenConnector *pOpenConnector;
NvKmsGenericHandle connector;
NVVBlankCallbackPtr pCallbackData;
NvKmsGenericHandle callback;
FreePerOpenFrameLock(pOpen, pOpenDisp);
FOR_ALL_POINTERS_IN_EVO_API_HANDLES(&pOpenDisp->connectorHandles,
@ -659,6 +682,12 @@ static void ClearPerOpenDisp(
for (NvU32 i = 0; i < NVKMS_MAX_HEADS_PER_DISP; i++) {
nvEvoDestroyApiHandles(&pOpenDisp->vblankSyncObjectHandles[i]);
FOR_ALL_POINTERS_IN_EVO_API_HANDLES(&pOpenDisp->vblankCallbackHandles[i],
pCallbackData, callback) {
nvRemoveUnicastEvent(pCallbackData->pUserData);
}
nvEvoDestroyApiHandles(&pOpenDisp->vblankCallbackHandles[i]);
}
nvEvoDestroyApiHandle(&pOpenDev->dispHandles, pOpenDisp->nvKmsApiHandle);
@ -724,6 +753,17 @@ static NvBool InitPerOpenDisp(
}
}
/* Initialize the vblankCallbackHandles for each head.
*
* The limit of VBLANK_SYNC_OBJECTS_PER_HEAD doesn't really apply here, but
* we need something. */
for (NvU32 i = 0; i < NVKMS_MAX_HEADS_PER_DISP; i++) {
if (!nvEvoInitApiHandles(&pOpenDisp->vblankCallbackHandles[i],
NVKMS_MAX_VBLANK_SYNC_OBJECTS_PER_HEAD)) {
goto fail;
}
}
if (!AllocPerOpenFrameLock(pOpen, pOpenDisp)) {
goto fail;
}
@ -1156,6 +1196,30 @@ static NvBool AssignNvKmsPerOpenType(struct NvKmsPerOpen *pOpen,
return TRUE;
}
/*!
* Return whether the PerOpen can be used as a unicast event.
*/
static inline NvBool PerOpenIsValidForUnicastEvent(
const struct NvKmsPerOpen *pOpen)
{
/* If the type is Undefined, it can be made a unicast event. */
if (pOpen->type == NvKmsPerOpenTypeUndefined) {
return TRUE;
}
/*
* If the type is already UnicastEvent but there is no active user, it can
* be made a unicast event.
*/
if ((pOpen->type == NvKmsPerOpenTypeUnicastEvent) &&
(pOpen->unicastEvent.type == NvKmsUnicastEventTypeUndefined)) {
return TRUE;
}
return FALSE;
}
/*!
* Allocate the specified device.
*/
@ -3390,6 +3454,70 @@ static NvBool DisableVblankSyncObject(
return TRUE;
}
static void NotifyVblankCallback(NVDispEvoRec *pDispEvo,
const NvU32 head,
NVVBlankCallbackPtr pCallbackData)
{
struct NvKmsPerOpen *pEventOpenFd = pCallbackData->pUserData;
/*
* NOTIFY_VBLANK events are single-shot so notify the unicast FD, then
* immediately unregister the callback. The unregister step is done in
* nvRemoveUnicastEvent which resets the unicast event data.
*/
nvSendUnicastEvent(pEventOpenFd);
nvRemoveUnicastEvent(pEventOpenFd);
}
static NvBool NotifyVblank(
struct NvKmsPerOpen *pOpen,
void *pParamsVoid)
{
struct NvKmsNotifyVblankParams *pParams = pParamsVoid;
struct NvKmsPerOpen *pEventOpenFd = NULL;
NVVBlankCallbackPtr pCallbackData = NULL;
struct NvKmsPerOpenDisp* pOpenDisp =
GetPerOpenDisp(pOpen, pParams->request.deviceHandle,
pParams->request.dispHandle);
NvU32 head = pParams->request.head;
pEventOpenFd = nvkms_get_per_open_data(pParams->request.unicastEvent.fd);
if (pEventOpenFd == NULL) {
return NV_FALSE;
}
if (!PerOpenIsValidForUnicastEvent(pEventOpenFd)) {
return NV_FALSE;
}
pEventOpenFd->type = NvKmsPerOpenTypeUnicastEvent;
pCallbackData = nvRegisterVBlankCallback(pOpenDisp->pDispEvo,
head,
NotifyVblankCallback,
pEventOpenFd);
if (pCallbackData == NULL) {
return NV_FALSE;
}
pEventOpenFd->unicastEvent.type = NvKmsUnicastEventTypeVblankNotification;
pEventOpenFd->unicastEvent.e.vblankNotification.pOpenDisp = pOpenDisp;
pEventOpenFd->unicastEvent.e.vblankNotification.head = head;
pEventOpenFd->unicastEvent.e.vblankNotification.hCallback
= nvEvoCreateApiHandle(&pOpenDisp->vblankCallbackHandles[head],
pCallbackData);
if (pEventOpenFd->unicastEvent.e.vblankNotification.hCallback == 0) {
nvUnregisterVBlankCallback(pOpenDisp->pDispEvo,
head,
pCallbackData);
return NV_FALSE;
}
return NV_TRUE;
}
/*!
* Perform the ioctl operation requested by the client.
*
@ -3502,6 +3630,7 @@ NvBool nvKmsIoctl(
ENTRY(NVKMS_IOCTL_EXPORT_VRR_SEMAPHORE_SURFACE, ExportVrrSemaphoreSurface),
ENTRY(NVKMS_IOCTL_ENABLE_VBLANK_SYNC_OBJECT, EnableVblankSyncObject),
ENTRY(NVKMS_IOCTL_DISABLE_VBLANK_SYNC_OBJECT, DisableVblankSyncObject),
ENTRY(NVKMS_IOCTL_NOTIFY_VBLANK, NotifyVblank),
};
struct NvKmsPerOpen *pOpen = pOpenVoid;
@ -3701,6 +3830,18 @@ static const char *ProcFsPerOpenTypeString(
return "unknown";
}
static const char *ProcFsUnicastEventTypeString(
enum NvKmsUnicastEventType type)
{
switch (type) {
case NvKmsUnicastEventTypeDeferredRequest: return "DeferredRequest";
case NvKmsUnicastEventTypeVblankNotification: return "VblankNotification";
case NvKmsUnicastEventTypeUndefined: return "undefined";
}
return "unknown";
}
static const char *ProcFsPerOpenClientTypeString(
enum NvKmsClientType clientType)
{
@ -3851,10 +3992,23 @@ ProcFsPrintClients(
pOpen->grantSwapGroup.pSwapGroup);
} else if (pOpen->type == NvKmsPerOpenTypeUnicastEvent) {
nvEvoLogInfoString(&infoString,
" pDeferredRequestFifo : %p",
pOpen->unicastEvent.pDeferredRequestFifo);
" unicastEvent type : %s",
ProcFsUnicastEventTypeString(pOpen->unicastEvent.type));
switch(pOpen->unicastEvent.type) {
case NvKmsUnicastEventTypeDeferredRequest:
nvEvoLogInfoString(&infoString,
" pDeferredRequestFifo : %p",
pOpen->unicastEvent.e.deferred.pDeferredRequestFifo);
break;
case NvKmsUnicastEventTypeVblankNotification:
nvEvoLogInfoString(&infoString,
" head : %x",
pOpen->unicastEvent.e.vblankNotification.head);
break;
default:
break;
}
}
nvEvoLogInfoString(&infoString, "");
@ -4612,6 +4766,7 @@ void nvSendUnicastEvent(struct NvKmsPerOpen *pOpen)
}
nvAssert(pOpen->type == NvKmsPerOpenTypeUnicastEvent);
nvAssert(pOpen->unicastEvent.type != NvKmsUnicastEventTypeUndefined);
nvkms_event_queue_changed(pOpen->pOpenKernel, TRUE);
}
@ -4619,6 +4774,10 @@ void nvSendUnicastEvent(struct NvKmsPerOpen *pOpen)
void nvRemoveUnicastEvent(struct NvKmsPerOpen *pOpen)
{
NVDeferredRequestFifoPtr pDeferredRequestFifo;
NvKmsGenericHandle callbackHandle;
NVVBlankCallbackPtr pCallbackData;
struct NvKmsPerOpenDisp *pOpenDisp;
NvU32 head;
if (pOpen == NULL) {
return;
@ -4626,12 +4785,46 @@ void nvRemoveUnicastEvent(struct NvKmsPerOpen *pOpen)
nvAssert(pOpen->type == NvKmsPerOpenTypeUnicastEvent);
pDeferredRequestFifo = pOpen->unicastEvent.pDeferredRequestFifo;
switch(pOpen->unicastEvent.type)
{
case NvKmsUnicastEventTypeDeferredRequest:
pDeferredRequestFifo =
pOpen->unicastEvent.e.deferred.pDeferredRequestFifo;
if (pDeferredRequestFifo != NULL) {
pDeferredRequestFifo->swapGroup.pOpenUnicastEvent = NULL;
pOpen->unicastEvent.pDeferredRequestFifo = NULL;
pDeferredRequestFifo->swapGroup.pOpenUnicastEvent = NULL;
pOpen->unicastEvent.e.deferred.pDeferredRequestFifo = NULL;
break;
case NvKmsUnicastEventTypeVblankNotification:
/* grab fields from the unicast fd */
callbackHandle =
pOpen->unicastEvent.e.vblankNotification.hCallback;
pOpenDisp =
pOpen->unicastEvent.e.vblankNotification.pOpenDisp;
head = pOpen->unicastEvent.e.vblankNotification.head;
/* Unregister the vblank callback */
pCallbackData =
nvEvoGetPointerFromApiHandle(&pOpenDisp->vblankCallbackHandles[head],
callbackHandle);
nvUnregisterVBlankCallback(pOpenDisp->pDispEvo,
head,
pCallbackData);
nvEvoDestroyApiHandle(&pOpenDisp->vblankCallbackHandles[head],
callbackHandle);
/* invalidate the pOpen data */
pOpen->unicastEvent.e.vblankNotification.hCallback = 0;
pOpen->unicastEvent.e.vblankNotification.pOpenDisp = NULL;
pOpen->unicastEvent.e.vblankNotification.head = NV_INVALID_HEAD;
break;
default:
nvAssert("Invalid Unicast Event Type!");
break;
}
pOpen->unicastEvent.type = NvKmsUnicastEventTypeUndefined;
}
static void AllocSurfaceCtxDmasForAllOpens(NVDevEvoRec *pDevEvo)

View File

@ -1,4 +1,4 @@
NVIDIA_VERSION = 515.49.10
NVIDIA_VERSION = 515.49.14
# This file.
VERSION_MK_FILE := $(lastword $(MAKEFILE_LIST))